code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
package com.arcusys.valamis.certificate.storage
import com.arcusys.valamis.certificate.model.goal.{GoalType, GoalStatuses, CertificateGoalState}
import org.joda.time.DateTime
trait CertificateGoalStateRepository {
def getStatusesBy(userId: Long, certificateId: Long, isOptional: Boolean): Seq[GoalStatuses.Value]
def getBy(userId: Long, goalId: Long): Option[CertificateGoalState]
def getStatusesByIds(userId: Long, goalIds: Seq[Long]): Seq[GoalStatuses.Value]
def getByCertificate(userId: Long, certificateId: Long): Option[CertificateGoalState]
def create(entity: CertificateGoalState): CertificateGoalState
def modify(goalId: Long,
userId: Long,
status: GoalStatuses.Value,
modifiedDate: DateTime): CertificateGoalState
def deleteBy(certificateId: Long)
def deleteBy(certificateId: Long, userId: Long)
}
| igor-borisov/valamis | valamis-certificate/src/main/scala/com/arcusys/valamis/certificate/storage/CertificateGoalStateRepository.scala | Scala | gpl-3.0 | 864 |
package com.getjenny.starchat.resources
import akka.http.scaladsl.model.StatusCodes
import com.getjenny.starchat.TestEnglishBase
import com.getjenny.starchat.entities.io._
import com.getjenny.starchat.entities.persistents._
class TermExtractionResourceTest extends TestEnglishBase {
"StarChat" should {
s"return an HTTP code 201 when populating knowledge base" in {
val documents = List(QADocument(
id = "id1",
conversation = "conv_id_1",
indexInConversation = 1,
coreData = Some(QADocumentCore(
question = Some("term1 term2"),
answer = Some("term1 term3")
)),
annotations = Some(QADocumentAnnotations(
doctype = Some(Doctypes.NORMAL),
agent = Some(Agent.STARCHAT)
))
), QADocument(
id = "id2",
conversation = "conv_id_1",
indexInConversation = 2,
coreData = Some(QADocumentCore(
question = Some("term3 term4"),
answer = Some("term1")
)),
annotations = Some(QADocumentAnnotations(
doctype = Some(Doctypes.NORMAL),
agent = Some(Agent.HUMAN_REPLY)
))
))
for(document <- documents) {
Post(s"/index_getjenny_english_0/knowledgebase?refresh=1", document) ~> addCredentials(testUserCredentials) ~> routes ~> check {
status shouldEqual StatusCodes.Created
val response = responseAs[IndexDocumentResult]
}
}
}
}
it should {
"return an HTTP code 201 when populating prior_data" in {
val document = QADocument(
id = "id3",
conversation = "conv_id_1",
indexInConversation = 4,
coreData = Some(QADocumentCore(
question = Some("term6 term5 term1"),
answer = Some("term1")
)),
annotations = Some(QADocumentAnnotations(
doctype = Some(Doctypes.NORMAL),
agent = Some(Agent.HUMAN_REPLY)
))
)
Post(s"/index_getjenny_english_0/prior_data?refresh=1", document) ~> addCredentials(testUserCredentials) ~> routes ~> check {
status shouldEqual StatusCodes.Created
val response = responseAs[IndexDocumentResult]
}
Post(s"/index_getjenny_english_common_0/prior_data?refresh=1", document.copy(id = "id4")) ~> addCredentials(testUserCredentials) ~> routes ~> check {
status shouldEqual StatusCodes.Created
val response = responseAs[IndexDocumentResult]
}
}
}
it should {
"return an HTTP code 200 when extracting term frequencies" in {
val request = TermsExtractionRequest(
text = "term1 term2 term3 term4 term5 term1",
fieldsObserved = Some(TermCountFields.all),
commonOrSpecificSearchPrior = Some(CommonOrSpecificSearch.IDXSPECIFIC),
observedDataSource = Some(ObservedDataSources.KNOWLEDGEBASE)
)
Post("/index_getjenny_english_0/extraction/frequencies", request) ~> addCredentials(testUserCredentials) ~> routes ~> check {
status shouldEqual StatusCodes.OK
val response = responseAs[TokenFrequency]
response.tokensFreq shouldEqual List(
TokenFrequencyItem(
token = "term1", priorFrequency = 2, observedFrequency = 3
), TokenFrequencyItem(
token = "term2", priorFrequency = 0, observedFrequency = 1
), TokenFrequencyItem(
token = "term3", priorFrequency = 0, observedFrequency = 2
), TokenFrequencyItem(
token = "term4", priorFrequency = 0, observedFrequency = 1
), TokenFrequencyItem(
token = "term5", priorFrequency = 1, observedFrequency = 0
)
)
}
}
}
it should {
"return an HTTP code 200 when extracting term keywords" in {
val request = TermsExtractionRequest(text = "term1 term2 term3 term4",
fieldsObserved = Some(TermCountFields.all),
commonOrSpecificSearchPrior = Some(CommonOrSpecificSearch.IDXSPECIFIC),
observedDataSource = Some(ObservedDataSources.KNOWLEDGEBASE),
minWordsPerSentence = Some(0),
minSentenceInfoBit = Some(0),
minKeywordInfo = Some(0)
)
Post("/index_getjenny_english_0/extraction/keywords", request) ~> addCredentials(testUserCredentials) ~> routes ~> check {
status shouldEqual StatusCodes.OK
val response = responseAs[Map[String, Double]]
response shouldNot be (Map.empty)
}
}
}
it should {
"return an HTTP code 200 when extracting term synonyms" in {
val request = TermsExtractionRequest(text = "term1 term2")
Post("/index_getjenny_english_0/extraction/synonyms", request) ~> addCredentials(testUserCredentials) ~> routes ~> check {
status shouldEqual StatusCodes.OK
val response = responseAs[List[SynonymExtractionItem]]
response.map(_.token.token) shouldBe List("term1", "term2")
}
}
}
}
| GetJenny/starchat | src/test/scala/com/getjenny/starchat/resources/TermExtractionResourceTest.scala | Scala | gpl-2.0 | 4,929 |
package nl.soqua.lcpi.ast.lambda
object Expression {
def V(symbol: String): Variable = Variable(symbol)
def λ(variable: Variable, body: Expression): LambdaAbstraction = LambdaAbstraction(variable, body)
def A(t: Expression, s: Expression): Expression = Application(t, s)
}
sealed trait Expression
case class Variable(symbol: String) extends Expression
case class LambdaAbstraction(variable: Variable, body: Expression) extends Expression
case class Application(t: Expression, s: Expression) extends Expression
| kevinvandervlist/lcpi | ast/src/main/scala/nl/soqua/lcpi/ast/lambda/Expression.scala | Scala | mit | 524 |
object Test extends App {
try {
Array("a", "b", "c") match {
case Array("a", "x", "c") => println("x")
case Array("a", "b", "x") => println("a");
case Array("a", "d", _*) => println("wrongly positive")
}
assert(false, "match succeeded")
} catch {
case _: MatchError => // okay
}
Array("a", "b", "c") match {
case Array("a", "x", "c") => println("x")
case Array("a", "b", "x") => println("a");
case Array("a", "b", _*) => // okay
}
}
| som-snytt/dotty | tests/pending/run/t6695.scala | Scala | apache-2.0 | 473 |
package com.alanjz.microstrike.gear
trait Armor {
}
| spacenut/microstrike | src/com/alanjz/microstrike/gear/Armor.scala | Scala | gpl-2.0 | 54 |
package dotty.tools
package dotc.interactive
import dotc.ast.tpd
import dotc.{CompilationUnit, Compiler, Run}
import dotc.core.Contexts.Context
import dotc.core.Mode
import dotc.reporting.StoreReporter
import dotc.util.{SourceFile, SourcePosition}
import dotc.util.Spans.Span
import org.junit.Test
class CustomCompletionTests extends DottyTest:
private def completions(
input: String,
dependencyCompleter: Option[String => (Int, Seq[String])] = None,
deep: Boolean = false,
extraDefinitions: String = ""
): (Int, Seq[Completion]) =
val prefix = extraDefinitions + """
object Wrapper {
val expr = {
"""
val suffix = """
}
}
"""
val allCode = prefix + input + suffix
val index = prefix.length + input.length
val run = new Run(
new Compiler,
initialCtx.fresh
.addMode(Mode.ReadPositions | Mode.Interactive)
// discard errors - comment out this line to print them in the console
.setReporter(new StoreReporter(null))
.setSetting(initialCtx.settings.YstopAfter, List("typer"))
)
val file = SourceFile.virtual("<completions>", allCode, maybeIncomplete = true)
given ctx: Context = run.runContext.withSource(file)
val unit = CompilationUnit(file)
ctx
.run.nn
.compileUnits(unit :: Nil, ctx)
// ignoring compilation errors here - the input code
// to complete likely doesn't compile
unit.tpdTree = {
import tpd._
unit.tpdTree match {
case PackageDef(_, p) =>
p.reverseIterator.collectFirst {
case TypeDef(_, tmpl: Template) =>
tmpl.body
.collectFirst { case dd: ValDef if dd.name.show == "expr" => dd }
.getOrElse(sys.error("Unexpected tree shape"))
}
.getOrElse(sys.error("Unexpected tree shape"))
case _ => sys.error("Unexpected tree shape")
}
}
val ctx1 = ctx.fresh.setCompilationUnit(unit)
val srcPos = SourcePosition(file, Span(index))
val (offset0, completions) =
if (deep || dependencyCompleter.nonEmpty)
CustomCompletion.completions(srcPos, dependencyCompleteOpt = dependencyCompleter, enableDeep = deep)(using ctx1)
else
Completion.completions(srcPos)(using ctx1)
val offset = offset0 - prefix.length
(offset, completions)
@Test def simple(): Unit =
val prefix = "scala.collection.immutable."
val input = prefix + "Ma"
val (offset, completions0) = completions(input)
val labels = completions0.map(_.label)
assert(offset == prefix.length)
assert(labels.contains("Map"))
@Test def custom(): Unit =
val prefix = "import $ivy."
val input = prefix + "scala"
val dependencies = Seq(
"scalaCompiler",
"scalaLibrary",
"other"
)
val (offset, completions0) = completions(
input,
dependencyCompleter = Some { dep =>
val matches = dependencies.filter(_.startsWith(dep))
(0, matches)
}
)
val labels = completions0.map(_.label)
assert(offset == prefix.length)
assert(labels.contains("scalaCompiler"))
assert(labels.contains("scalaLibrary"))
assert(labels.length == 2)
@Test def backTicks(): Unit =
val prefix = "Foo."
val input = prefix + "a"
val extraDefinitions =
"""object Foo { def a1 = 2; def `a-b` = 3 }
|""".stripMargin
val (offset, completions0) = completions(
input,
extraDefinitions = extraDefinitions,
deep = true // Enables CustomCompleter
)
val labels = completions0.map(_.label)
assert(offset == prefix.length)
assert(labels.contains("a1"))
assert(labels.contains("`a-b`"))
@Test def backTicksDependencies(): Unit =
val prefix = "import $ivy."
val input = prefix + "`org.scala-lang:scala-`"
val dependencies = Seq(
"org.scala-lang:scala-compiler",
"org.scala-lang:scala-library",
"other"
)
val (offset, completions0) = completions(
input,
dependencyCompleter = Some { dep =>
val matches = dependencies.filter(_.startsWith(dep))
(0, matches)
}
)
val labels = completions0.map(_.label)
// Seems backticks mess with that for now...
// assert(offset == prefix.length)
assert(labels.contains("`org.scala-lang:scala-compiler`"))
assert(labels.contains("`org.scala-lang:scala-library`"))
assert(labels.length == 2)
@Test def deep(): Unit =
val prefix = ""
val input = prefix + "ListBuf"
val (offset, completions0) = completions(input, deep = true)
val labels = completions0.map(_.label)
assert(offset == prefix.length)
assert(labels.contains("scala.collection.mutable.ListBuffer"))
@Test def deepType(): Unit =
val prefix = ""
val input = prefix + "Function2"
val (offset, completions0) = completions(input, deep = true)
val labels = completions0.map(_.label)
assert(offset == prefix.length)
assert(labels.contains("scala.Function2"))
| dotty-staging/dotty | compiler/test/dotty/tools/dotc/interactive/CustomCompletionTests.scala | Scala | apache-2.0 | 5,295 |
package com.evojam.mongodb.evolutions.model.evolution
object Action extends Enumeration {
type Action = Value
val Update, ApplyUp, ApplyDown = Value
} | evojam/mongodb-evolutions-scala | src/main/scala/com/evojam/mongodb/evolutions/model/evolution/Action.scala | Scala | apache-2.0 | 156 |
package org.tuubes.core.blocks
import com.electronwill.utils.Vec3i
/**
* A directly accessible Area.
*
* @author TheElectronWill
*/
trait DirectArea extends Area {
/**
* @return the total number of blocks (including air) in this area
*/
def size: Int
/**
* @return the minimum coordinates included in the area
*/
def min: Vec3i
/**
* @return the maximum coordinates included in the area
*/
def max: Vec3i
def apply(x: Int, y: Int, z: Int): BlockType
final def apply(p: Vec3i): BlockType = {
apply(p.x, p.y, p.z)
}
def update(x: Int, y: Int, z: Int, t: BlockType): Unit
final def update(p: Vec3i, t: BlockType): Unit = {
update(p.x, p.y, p.z, t)
}
def replace(replace: BlockType, replacement: BlockType): Unit
def replace(replace: Array[BlockType], replacements: Array[BlockType]): Unit
def replace(from: Vec3i,
to: Vec3i,
replace: Array[BlockType],
replacements: Array[BlockType]): Unit
def fill(block: BlockType): Unit
def fill(block: BlockType, exceptions: Array[BlockType]): Unit
def fill(from: Vec3i, to: Vec3i, fillWith: BlockType, exceptions: Array[BlockType]): Unit
}
| mcphoton/Photon-Server | core/src/main/scala/org/tuubes/core/blocks/DirectArea.scala | Scala | lgpl-3.0 | 1,192 |
package be.wegenenverkeer.atomium.server.slick.models
import org.joda.time.DateTime
case class EntryModel(
id: Option[Long],
uuid: String,
value: String,
timestamp: DateTime)
| joachimvda/atomium | modules/server-slick/src/main/scala/be/wegenenverkeer/atomium/server/slick/models/EntryModel.scala | Scala | mit | 185 |
package de.tudresden.inf.lat.tabulas.ext.renderer
import java.io.{BufferedWriter, FileReader, FileWriter}
import java.util.Objects
import de.tudresden.inf.lat.tabulas.ext.parser.{JsonParser, MultiParser, YamlParser}
import de.tudresden.inf.lat.tabulas.extension.Extension
import de.tudresden.inf.lat.tabulas.parser.{ParserConstant, SimpleFormatParser}
import scala.util.Try
/** This extension exports the metadata as an Rx YAML schema.
*
*/
case class JsonSchemaExtension() extends Extension {
final val Name: String = "jsonschema"
final val Help: String = "(input) (output) : given a Tabula.JSON file with exactly one table, " +
"this extension exports the metadata of that table only as a JSON Schema file. " +
"See " + ParserConstant.DeprecationOfMultipleTables + "."
final val RequiredArguments: Int = 2
override val getExtensionName: String = Name
override val getHelp: String = Help
override val getRequiredArguments: Int = RequiredArguments
override def process(arguments: Seq[String]): Try[Boolean] = Try {
val result = if (Objects.isNull(arguments) || arguments.size != RequiredArguments) {
false
} else {
val inputFileName = arguments(0)
val outputFileName = arguments(1)
val tableMap = MultiParser(
Seq(YamlParser(), JsonParser(), SimpleFormatParser())
).parse(new FileReader(inputFileName)).get
val res = if (tableMap.getTableIds.length == 1) {
val output = new BufferedWriter(new FileWriter(outputFileName))
JsonSchemaRenderer().render(output, tableMap)
true
} else {
false
}
res
}
result
}
}
object JsonSchemaExtension {}
| julianmendez/tabulas | tabulas-ext/src/main/scala/de/tudresden/inf/lat/tabulas/ext/renderer/JsonSchemaExtension.scala | Scala | apache-2.0 | 1,684 |
package com.gx.cake
/**
* Copyright 2017 josephguan
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
//-------------------
// Engine Component
//-------------------
trait EngineComponent {
val engine: Engine
trait Engine {
def start(): Unit
}
class V6Engine extends Engine {
override def start(): Unit = println("Vroom Vroom Vroom... V6 Engine started.")
}
class V8Engine extends Engine {
override def start(): Unit = println("Vroom Vroom Vroom... V8 Engine started.")
}
}
//-------------------
// Wheel Component
//-------------------
trait WheelComponent {
val wheel: Wheel
trait Wheel {
def rotate(): Unit
}
class MichelinWheel extends Wheel {
override def rotate(): Unit = println("Michelin wheel rotated.")
}
class DunlopWheel extends Wheel {
override def rotate(): Unit = println("Dunlop wheel rotated.")
}
}
//-------------------
// Brand Component
//-------------------
trait BrandComponent {
val brand: Brand
trait Brand {
def light(): Unit
}
class AudiBrand extends Brand {
override def light(): Unit = println("I am Audi.")
}
class BMWBrand extends Brand {
override def light(): Unit = println("I am BMW.")
}
}
| josephguan/scala-design-patterns | creational/cake/src/main/scala/com/gx/cake/CarComponents.scala | Scala | apache-2.0 | 1,747 |
package jp.relx.models
import org.elasticsearch.client.transport.TransportClient
import org.elasticsearch.common.transport.InetSocketTransportAddress
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest
import org.elasticsearch.action.index.IndexRequest
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest
object ElasticSearch {
val host = "localhost"
val port = 9300
val indexName = "livedoor-gourmet"
val client = new TransportClient().addTransportAddress(
new InetSocketTransportAddress(host, port))
def refreshIndex = client.admin().indices().refresh(
new RefreshRequest(ElasticSearch.indexName)).actionGet()
def deleteIndex = client.admin().indices().delete(
new DeleteIndexRequest(ElasticSearch.indexName)).actionGet()
def createIndex {
val body = """{
"settings": {
"index": {
"number_of_shards": 5,
"number_of_replicas": 1
},
"analysis": {
"tokenizer": {
"ngram_tokenizer": {
"type": "nGram",
"min_gram": 2,
"max_gram": 3,
"token_chars": ["letter", "digit"]
}
},
"filter": {
},
"analyzer": {
"ngram_analyzer": {
"type": "custom",
"tokenizer": "ngram_tokenizer",
"filter": ["lowercase", "stop"]
}
}
}
},
"mappings": {
"restaurant": {
"_id": {"path": "id"},
"properties": {
"id": {"type": "integer", "index": "not_analyzed"},
"name": {
"type": "multi_field",
"fields": {
"name": {"type": "string", "analyzer": "ngram_analyzer"},
"suggest": {"type": "string", "analyzer": "kuromoji"},
"completion": {"type": "completion", "analyzer": "ngram_analyzer"}
}
},
"property": {"type": "string", "analyzer": "ngram_analyzer"},
"alphabet": {"type": "string", "analyzer": "ngram_analyzer"},
"name_kana": {"type": "string", "analyzer": "ngram_analyzer"},
"pref_id": {"type": "integer", "index": "not_analyzed"},
"category_ids": {"type": "integer", "index": "not_analyzed"},
"zip": {"type": "string", "index": "not_analyzed"},
"address": {"type": "string", "analyzer": "kuromoji"},
"description": {"type": "string", "analyzer": "kuromoji"}
}
}
}
}"""
client.admin().indices().create(new CreateIndexRequest(indexName).settings(body))
}
def indexDoc(typeName: String, json: String) {
client.prepareIndex(indexName, typeName).setSource(json).execute.actionGet
}
} | mzkrelx/tddsearch | src/main/scala/jp/relx/models/ElasticSearch.scala | Scala | mit | 2,929 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.net.SocketTimeoutException
import kafka.cluster.BrokerEndPoint
import org.apache.kafka.clients._
import org.apache.kafka.common.metrics.Metrics
import org.apache.kafka.common.network._
import org.apache.kafka.common.requests.AbstractRequest
import org.apache.kafka.common.security.JaasContext
import org.apache.kafka.common.utils.{LogContext, Time}
import org.apache.kafka.clients.{ApiVersions, ClientResponse, ManualMetadataUpdater, NetworkClient}
import org.apache.kafka.common.Node
import org.apache.kafka.common.requests.AbstractRequest.Builder
import scala.collection.JavaConverters._
trait BlockingSend {
def sendRequest(requestBuilder: AbstractRequest.Builder[_ <: AbstractRequest]): ClientResponse
def close()
}
class ReplicaFetcherBlockingSend(sourceBroker: BrokerEndPoint,
brokerConfig: KafkaConfig,
metrics: Metrics,
time: Time,
fetcherId: Int,
clientId: String,
logContext: LogContext) extends BlockingSend {
private val sourceNode = new Node(sourceBroker.id, sourceBroker.host, sourceBroker.port)
private val socketTimeout: Int = brokerConfig.replicaSocketTimeoutMs
private val networkClient = {
val channelBuilder = ChannelBuilders.clientChannelBuilder(
brokerConfig.interBrokerSecurityProtocol,
JaasContext.Type.SERVER,
brokerConfig,
brokerConfig.interBrokerListenerName,
brokerConfig.saslMechanismInterBrokerProtocol,
brokerConfig.saslInterBrokerHandshakeRequestEnable
)
val selector = new Selector(
NetworkReceive.UNLIMITED,
brokerConfig.connectionsMaxIdleMs,
metrics,
time,
"replica-fetcher",
Map("broker-id" -> sourceBroker.id.toString, "fetcher-id" -> fetcherId.toString).asJava,
false,
channelBuilder,
logContext
)
new NetworkClient(
selector,
new ManualMetadataUpdater(),
clientId,
1,
0,
0,
Selectable.USE_DEFAULT_BUFFER_SIZE,
brokerConfig.replicaSocketReceiveBufferBytes,
brokerConfig.requestTimeoutMs,
time,
false,
new ApiVersions,
logContext
)
}
override def sendRequest(requestBuilder: Builder[_ <: AbstractRequest]): ClientResponse = {
try {
if (!NetworkClientUtils.awaitReady(networkClient, sourceNode, time, socketTimeout))
throw new SocketTimeoutException(s"Failed to connect within $socketTimeout ms")
else {
val clientRequest = networkClient.newClientRequest(sourceBroker.id.toString, requestBuilder,
time.milliseconds(), true)
NetworkClientUtils.sendAndReceive(networkClient, clientRequest, time)
}
}
catch {
case e: Throwable =>
networkClient.close(sourceBroker.id.toString)
throw e
}
}
def close(): Unit = {
networkClient.close()
}
}
| sebadiaz/kafka | core/src/main/scala/kafka/server/ReplicaFetcherBlockingSend.scala | Scala | apache-2.0 | 3,832 |
package sbt
package plugins
import Def.Setting
/**
* Plugin that enables resolving artifacts via ivy.
*
* Core Tasks
* - `update`
* - `makePom`
* - `publish`
* - `artifacts`
* - `publishedArtifacts`
*/
object IvyPlugin extends AutoPlugin {
// We are automatically included on everything that has the global module,
// which is automatically included on everything.
override def requires = CorePlugin
override def trigger = allRequirements
override lazy val projectSettings: Seq[Setting[_]] =
Classpaths.ivyPublishSettings ++ Classpaths.ivyBaseSettings
override lazy val globalSettings: Seq[Setting[_]] =
Defaults.globalIvyCore
}
| pdalpra/sbt | main/src/main/scala/sbt/plugins/IvyPlugin.scala | Scala | bsd-3-clause | 663 |
package teleporter.integration.component.mongo
import akka.stream.scaladsl.Source
import akka.stream.{Attributes, TeleporterAttributes}
import akka.{Done, NotUsed}
import org.mongodb.scala.{Document, MongoClient, MongoCollection}
import teleporter.integration.component.SourceRoller.RollerContext
import teleporter.integration.component._
import teleporter.integration.core._
import teleporter.integration.metrics.Metrics
import teleporter.integration.script.Template
import teleporter.integration.utils.Converters._
import teleporter.integration.utils.MapBean
import scala.concurrent.{ExecutionContext, Future}
/**
* Created by joker on 15/12/07
*/
object Mongo {
def sourceAck(sourceKey: String)
(implicit center: TeleporterCenter): Source[AckMessage[MapBean, MongoMessage], NotUsed] = {
val sourceContext = center.context.getContext[SourceContext](sourceKey)
source(sourceKey).mapConcat(m ⇒ m.data.map { d ⇒
SourceMessage(RollerContext.merge(sourceContext.config, m.coordinate), d)
}.toIndexedSeq)
.via(SourceAck.flow[MongoMessage](sourceContext.id, sourceContext.config))
}
def source(sourceKey: String)
(implicit center: TeleporterCenter): Source[SourceMessage[RollerContext, Seq[MongoMessage]], NotUsed] = {
val sourceContext = center.context.getContext[SourceContext](sourceKey)
val mongoSourceConfig = sourceContext.config.mapTo[MongoSourceMetaBean]
val bind = Option(sourceContext.config.addressBind).getOrElse(sourceKey)
val addressKey = sourceContext.address().key
Source.fromGraph(new MongoSourceAsync(
name = sourceKey,
filter = mongoSourceConfig.filter,
rollerContext = RollerContext(sourceContext.config),
_create = (ec) ⇒ Future {
val mongoClient = center.context.register(addressKey, bind, () ⇒ address(addressKey)).client
val database = mongoClient.getDatabase(mongoSourceConfig.database)
database.getCollection(mongoSourceConfig.collection)
}(ec),
_close = {
(_, _) ⇒
center.context.unRegister(addressKey, bind)
Future.successful(Done)
})).addAttributes(Attributes(TeleporterAttributes.SupervisionStrategy(sourceKey, sourceContext.config)))
.via(Metrics.count[SourceMessage[RollerContext, Seq[MongoMessage]]](sourceKey)(center.metricsRegistry))
}
def address(key: String)(implicit center: TeleporterCenter): AutoCloseClientRef[MongoClient] = {
val config = center.context.getContext[AddressContext](key).config
val mongoMetaBean = config.client.mapTo[MongoAddressMetaBean]
val mongoClient = MongoClient(config[String](mongoMetaBean.url))
new AutoCloseClientRef[MongoClient](key, mongoClient)
}
}
class MongoAddressMetaBean(override val underlying: Map[String, Any]) extends AddressMetaBean(underlying) {
val FUrl = "url"
def url: String = client[String](FUrl)
}
class MongoSourceMetaBean(override val underlying: Map[String, Any]) extends SourceMetaBean(underlying) {
val FDatabase = "database"
val FCollection = "collection"
val FFilter = "filter"
def database: String = client[String](FDatabase)
def collection: String = client[String](FCollection)
def filter: Option[String] = client.get[String](FFilter)
}
class MongoSourceAsync(name: String = "mongo.source",
filter: Option[String],
rollerContext: RollerContext,
_create: (ExecutionContext) ⇒ Future[MongoCollection[MongoMessage]],
_close: (MongoCollection[MongoMessage], ExecutionContext) ⇒ Future[Done])
extends RollerSourceAsync[SourceMessage[RollerContext, Seq[MongoMessage]], MongoCollection[MongoMessage]](name, rollerContext) {
var isCurrConditionExec: Boolean = false
override def readData(client: MongoCollection[MongoMessage], rollerContext: RollerContext,
executionContext: ExecutionContext): Future[Option[SourceMessage[RollerContext, Seq[MongoMessage]]]] = {
implicit val ec = executionContext
if (isCurrConditionExec) {
isCurrConditionExec = false
Future.successful(None)
} else {
isCurrConditionExec = true
val filterDoc = filter.map(s ⇒ Document(Template(s, rollerContext.toMap))).getOrElse(Document())
val query = client.find(filterDoc)
val filterQuery = rollerContext.pagination.map { page ⇒
query.skip(page.offset.toInt).limit(page.pageSize)
}.getOrElse(query)
filterQuery.toFuture().map(m ⇒ Some(SourceMessage(rollerContext, m)))
}
}
override def create(executionContext: ExecutionContext): Future[MongoCollection[MongoMessage]] = _create(executionContext)
override def close(client: MongoCollection[MongoMessage], executionContext: ExecutionContext): Future[Done] = _close(client, executionContext)
}
| huanwuji/teleporter | src/main/scala/teleporter/integration/component/mongo/Mongo.scala | Scala | agpl-3.0 | 4,851 |
import scala.reflect.macros.blackbox.Context
object Impls1 {
def foo[U <: String](c: Context): c.Expr[Unit] = { import c.universe._; c.Expr[Unit](q"""println("hello")""") }
}
class C
class D extends C
object Impls2 {
def foo[U <: C](c: Context): c.Expr[Unit] = { import c.universe._; c.Expr[Unit](q"""println("hello")""") }
}
| yusuke2255/dotty | tests/disabled/macro/run/macro-expand-tparams-bounds/Impls_1.scala | Scala | bsd-3-clause | 333 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs105.boxes
import uk.gov.hmrc.ct.accounts.frs105.retriever.Frs105AccountsBoxRetriever
import uk.gov.hmrc.ct.box._
case class AC421(value: Option[Int]) extends CtBoxIdentifier(name = "Depreciation and other amounts written off assets (previous PoA)")
with CtOptionalInteger
with Input
with ValidatableBox[Frs105AccountsBoxRetriever]
with Debit {
override def validate(boxRetriever: Frs105AccountsBoxRetriever): Set[CtValidation] = {
collectErrors(
validateMoney(value)
)
}
}
| liquidarmour/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs105/boxes/AC421.scala | Scala | apache-2.0 | 1,143 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import org.apache.spark.annotation.Experimental
import org.apache.spark.ml.UnaryTransformer
import org.apache.spark.ml.param.{DoubleParam, ParamValidators}
import org.apache.spark.ml.util.Identifiable
import org.apache.spark.mllib.feature
import org.apache.spark.mllib.linalg.{Vector, VectorUDT}
import org.apache.spark.sql.types.DataType
/**
* 正则化:背后的思想是将各个数值特征进行转换,将它们的值域规范到一个标准区间内
* 正则化特征:
* 1)实际上是对数据集中的单个特征进行转换,比如:减去平均值或是进行标准的正则转换(使得该特征的平均值和标准差分别为0和1)
* 正则化特征向量:通常是对数据中的某一行的所有特征进行转换,以让转换后的特征向量的长度标准化
* 特征向量正则化
* :: Experimental ::
* Normalize a vector to have unit norm using the given p-norm.
*/
@Experimental
class Normalizer(override val uid: String) extends UnaryTransformer[Vector, Vector, Normalizer] {
def this() = this(Identifiable.randomUID("normalizer"))
/**
* Normalization in L^p^ space. Must be >= 1.
* (default: p = 2)
* @group param
*/
val p = new DoubleParam(this, "p", "the p norm value", ParamValidators.gtEq(1))
setDefault(p -> 2.0)
/** @group getParam */
def getP: Double = $(p)
/** @group setParam */
def setP(value: Double): this.type = set(p, value)
override protected def createTransformFunc: Vector => Vector = {
val normalizer = new feature.Normalizer($(p))
normalizer.transform
}
override protected def outputDataType: DataType = new VectorUDT()
}
| tophua/spark1.52 | mllib/src/main/scala/org/apache/spark/ml/feature/Normalizer.scala | Scala | apache-2.0 | 2,468 |
package com.github.play2war.plugin.runners
import com.github.play2war.plugin.it._
import java.io.File
/**
* Starts tomcat8
*/
object Tomcat8xRunner extends App {
val tomcat8CargoContainer = new CargoContainerManager with Servlet31Container with Java8 {
val tomcatVersion = "8.0.22"
override def containerUrl = s"http://archive.apache.org/dist/tomcat/tomcat-8/v$tomcatVersion/bin/apache-tomcat-$tomcatVersion.tar.gz"
override def containerName = "tomcat8x"
}
val servlet31SampleWarPath = new File("../sample/servlet31/target", "a-play2war-sample-servlet31-1.0-SNAPSHOT.war").getAbsolutePath
tomcat8CargoContainer.startContainer(servlet31SampleWarPath, stopOnExit = true)
}
| swatikiran123/play2-war-plugin | project-code/integration-tests/src/test/scala/com/github/play2war/plugin/runners/Tomcat8xRunner.scala | Scala | apache-2.0 | 699 |
package co.blocke.scalajack
package yaml
package parameters
//--- Basic Parameterized Case Class
case class Foo1[A](x: A, b: Int)
case class Bar1(name: String)
//--- Advanced Parameterized Case Class
case class Bar2[X](id: X)
case class VC1(s: String) extends AnyVal
case class Foo2[A](x: Bar2[A], b: Int)
case class Foo3[A](x: Bar2[A], b: A)
case class Bar3[X, Y](id: X, isIt: Y)
case class Foo4[A](x: List[Bar3[A, Boolean]], b: A)
//--- Very Advanced Parameterized Case Class
case class Foo5[A, B](x: List[Bar3[A, B]], b: A)
case class Foo6[A, B, C, D](x: Bar4[C, D, A], y: B)
case class Bar4[X, Y, Z](id: X, thing1: Z, thing2: Y)
case class Blah[T, U](t: T, u: U)
case class Foo7[A, B, C, D](x: Bar5[C, D, A], y: B)
case class Bar5[X, Y, Z](id: Y, blah: Blah[Z, X])
//--- Basic Parameterized Trait
trait T1[X] { val x: X }
case class TFoo1[A](x: A, b: Int) extends T1[A]
trait T2 { val name: String }
case class TBar1(name: String) extends T2
//--- Advanced Parameterized Trait
trait T3[X] { val thing: X }
case class TBar2(thing: Boolean) extends T3[Boolean]
case class TBar3[T](thing: T) extends T3[T]
trait T4[X] { val x: TBar3[X] }
case class TFoo2[A](x: TBar3[A], b: A) extends T4[A]
trait T5[X, Y] { val thing1: X; val thing2: Y }
case class TBar4[T](thing1: T, thing2: String) extends T5[T, String]
trait T6[X] { val x: List[T5[X, String]] }
case class TFoo3[A](x: List[T5[A, String]]) extends T6[A]
//--- Very Advanced Parameterized Trait
trait T7[X, Y] { val x: T5[X, Y]; val b: X }
case class TBar5[T, U](thing1: T, thing2: U) extends T5[T, U]
case class TFoo4[A, B](x: T5[A, B], b: A) extends T7[A, B]
trait T8[W, X, Y, Z] { val x: T9[Y, Z, W]; val y: X }
trait T9[T, U, V] { val pi: T; val po: U; val pu: V }
case class TBar6[A, B, C](pi: A, po: B, pu: C) extends T9[A, B, C]
case class TFoo5[A, B, C, D](x: T9[C, D, A], y: B) extends T8[A, B, C, D]
// Foo[A,B,C,D](x:Bar[C,Blah[D,A]], y:B)
trait T10[X, Y] { val x: X; val y: Y }
trait T11[W, Z] { val w: W; val z: Z }
case class TBlah1[A, B](w: A, z: B) extends T11[A, B]
case class TBar7[A, B](thing1: A, thing2: B) extends T5[A, B]
case class TFoo6[A, B, C, D](x: T11[C, T5[D, A]], y: B) extends T10[T11[C, T5[D, A]], B] | gzoller/ScalaJack | core/src/test/scala/co.blocke.scalajack/yaml/parameters/Model.scala | Scala | mit | 2,243 |
package com.vngrs.json
import annotation.implicitNotFound
@implicitNotFound(msg = "Cannot find JsonReaderT or JsonFormat type class for ${T}")
trait JsonReaderT[T] {
def read(in: JsonReader): T
private[json] def rootRead(in: JsonReader): T = {
in.beginObject
val t = read(in)
in.endObject
t
}
}
@implicitNotFound(msg = "Cannot find JsonWriterT or JsonFormat type class for ${T}")
trait JsonWriterT[T] {
def write(t: T, out: JsonWriter): JsonWriter
private[json] def namedWrite(name: String, t: T, out: JsonWriter): JsonWriter = {
out.name(name)
rootWrite(t, out)
}
private[json] def rootWrite(t: T, out: JsonWriter): JsonWriter = {
out.beginObject
val o = write(t, out)
out.endObject
o
}
}
trait JsonFormat[T] extends JsonReaderT[T] with JsonWriterT[T]
| csenol/jsonDroid | json/src/main/scala/com/vngrs/json/JsonFormat.scala | Scala | lgpl-3.0 | 818 |
package binconcifartests
import chisel3._
import chisel3.iotesters.{PeekPokeTester, Driver, ChiselFlatSpec}
import scala.util.Random
import binconcifar.MuxLayer
import scala.collection.mutable.ArrayBuffer
class MuxComputeTests( c : MuxLayer ) extends PeekPokeTester( c ) {
val myRand = new Random
val cycs = 500
def getRndFP() : BigInt = {
val x = 2 * myRand.nextDouble() - 1
BigInt( math.round( x * ( 1 << 4 ) ).toInt )
}
val inputs = List.fill( cycs ) { List.fill( c.inSize ){ getRndFP() } }
// val inputs = List.fill( cycs ) { ( 0 until c.inSize ).map( idx => BigInt( idx ) ).toList }
poke( c.io.dataOut.ready, true.B )
var input_cntr = c.noGrps
var output_cyc_cntr = 0
var output_grp_cntr = 0
var input_cyc_cntr = 0
poke( c.io.dataIn.valid, true.B )
for ( cyc <- 0 until cycs ) {
val rdy = peek( c.io.dataIn.ready ) == 1
for ( i <- 0 until c.inSize )
poke( c.io.dataIn.bits(i), inputs( input_cyc_cntr )( i ) )
if ( input_cntr >= c.noGrps - 1 && rdy ) {
input_cntr = 0
input_cyc_cntr += 1
} else {
input_cntr += 1
}
step( 1 )
val vld = peek( c.io.dataOut.valid ) == 1
if ( vld ) {
for ( i <- 0 until c.outSize ) {
expect( c.io.dataOut.bits( i ), inputs( output_cyc_cntr )( output_grp_cntr ) )
output_grp_cntr += 1
}
if ( output_grp_cntr >= c.inSize - 1 ) {
output_grp_cntr = 0
output_cyc_cntr += 1
}
}
}
}
class MuxLayerSuite extends ChiselFlatSpec {
behavior of "MuxLayer"
backends foreach {backend =>
it should s"correctly compute the mux $backend" in {
for ( sizes <- List[(Int, Int)](
/*( 128, 8 )*/
( 256, 4 )
/*( 1024, 1 )*/
) ) {
val inSize = sizes._1
val outSize = sizes._2
Driver(() => {
new MuxLayer( SInt( 16.W ), inSize, outSize )
}, "verilator", true )( c => new MuxComputeTests( c ) ) should be (true)
}
}
}
}
| da-steve101/binary_connect_cifar | src/test/scala/MuxLayerSuite.scala | Scala | gpl-3.0 | 1,984 |
package com.criteo.slab.core
import com.criteo.slab.utils.Jsonable._
import org.scalatest.{FlatSpec, Matchers}
class LayoutSpec extends FlatSpec with Matchers {
"Layout" should "be serializable to JSON" in {
val layout = Layout(
Column(50, Row("A", 25, List(
Box[String]("box1", check1 :: Nil, (vs, _) => vs.head._2.view)
)))
)
layout.toJSON shouldEqual """{"columns":[{"percentage":50.0,"rows":[{"title":"A","percentage":25.0,"boxes":[{"title":"box1","labelLimit":64}]}]}]}"""
}
}
| criteo/slab | src/test/scala/com/criteo/slab/core/LayoutSpec.scala | Scala | apache-2.0 | 521 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.physical.couchbase
import slamdata.Predef._
import quasar.{Planner => _, _}
import quasar.contrib.pathy._
import quasar.contrib.scalaz.eitherT._
import quasar.effect.MonotonicSeq
import quasar.fp._
import quasar.fp.ski.ι
import quasar.frontend.logicalplan.LogicalPlan
import quasar.Planner.PlannerError
import quasar.qscript.{Map => _, Read => _, _}
import quasar.sql._
import scala.collection.JavaConverters._
import com.couchbase.client._, core._, java._, java.env._
import eu.timepit.refined.auto._
import matryoshka._, data._
import matryoshka.data.Fix
import matryoshka.implicits._
import org.specs2.execute.Pending
import org.specs2.specification.core.Fragment
import pathy.Path._
import scalaz._, Scalaz._
import scalaz.concurrent.Task
// NB: These tests are incredibly fragile and should be removed once the
// implementation is sufficient to support existing query integration tests.
// TODO: Roll with N1QL AST and RenderN1QL tests instead
class BasicQueryEnablementSpec
extends Qspec
with QScriptHelpers
with CompilerHelpers {
import common._, planner._
sequential
object CB extends Couchbase {
override val QueryFileModule = new fs.queryfile with QueryFileModule {
override def listContents(dir: ADir): Backend[Set[PathSegment]] =
Set[PathSegment](FileName("beer").right, FileName("brewery")).η[Backend]
}
}
val cbEnv = DefaultCouchbaseEnvironment.builder.build
val docTypeKey = DocTypeKey("type")
val cfg =
Config(
ClientContext(
new CouchbaseBucket(
cbEnv,
new CouchbaseCore(cbEnv),
"beer-sample",
"",
List[transcoder.Transcoder[_, _]]().asJava),
docTypeKey,
ListContentsView(docTypeKey)),
CouchbaseCluster.create(cbEnv))
def compileLogicalPlan(query: Fix[Sql]): Fix[LogicalPlan] =
compile(query).map(optimizer.optimize).fold(e => scala.sys.error(e.shows), ι)
def interp: CB.Eff ~> Task = fs.interp.unsafePerformSync
def n1qlFromSql2(sql2: Fix[Sql]): String =
(CB.lpToRepr(compileLogicalPlan(sql2)) ∘ (_.repr) >>= (CB.QueryFileModule.explain))
.run.value.run(cfg)
.foldMap(interp)
.flatMap(_.fold(e => Task.fail(new RuntimeException(e.shows)), Task.now))
.unsafePerformSync
def n1qlFromQS(qs: Fix[QST]): String =
qs.cataM(Planner[Fix, EitherT[Kleisli[Free[MonotonicSeq, ?], Context, ?], PlannerError, ?], QST].plan)
.flatMapF(RenderQuery.compact(_).η[Kleisli[Free[MonotonicSeq, ?], Context, ?]])
.run(Context(BucketName(cfg.ctx.bucket.name), cfg.ctx.docTypeKey))
.foldMap(MonotonicSeq.from(0L).unsafePerformSync)
.unsafePerformSync
.valueOr(e => scala.sys.error(e.shows))
def testSql2ToN1ql(sql2: Fix[Sql], n1ql: String): Fragment =
pprint(sql2) in (n1qlFromSql2(sql2) must_= n1ql)
def testSql2ToN1qlPending(sql2: String, p: Pending): Fragment =
sql2 in p
"SQL² to N1QL" should {
testSql2ToN1ql(
sqlE"select * from `beer`",
"""select v from (select value `_1` from (select value ifmissing(`_0`.['value'], `_0`) from `beer-sample` as `_0` where (`type` = 'beer')) as `_1`) v""")
testSql2ToN1ql(
sqlE"select name from `beer`",
"""select v from (select value `_1`.['name'] from (select value ifmissing(`_0`.['value'], `_0`) from `beer-sample` as `_0` where (`type` = 'beer')) as `_1`) v""")
testSql2ToN1ql(
sqlE"select name, type from `beer`",
"""select v from (select value {'name': `_1`.['name'], 'type': `_1`.['type']} from (select value ifmissing(`_0`.['value'], `_0`) from `beer-sample` as `_0` where (`type` = 'beer')) as `_1`) v""")
testSql2ToN1ql(
sqlE"select name from `beer` offset 1",
"""select v from (select value `_7`.['name'] from (select value `_4` from (select (select value ifmissing(`_5`.['value'], `_5`) from `beer-sample` as `_5` where (`type` = 'beer')) as `_1`, (select value 1 from (select value (select value [])) as `_6`) as `_2` from (select value []) as `_0`) as `_3` unnest `_1`[`_2`[0]:] as `_4`) as `_7`) v""")
testSql2ToN1ql(
sqlE"select count(*) from `beer`",
"""select v from (select value `_2` from (select count(`_1`) as `_2` from (select value ifmissing(`_0`.['value'], `_0`) from `beer-sample` as `_0` where (`type` = 'beer')) as `_1` group by null) as `_3` where (`_2` is not null)) v""")
testSql2ToN1ql(
sqlE"select count(name) from `beer`",
"""select v from (select value `_2` from (select count(`_1`.['name']) as `_2` from (select value ifmissing(`_0`.['value'], `_0`) from `beer-sample` as `_0` where (`type` = 'beer')) as `_1` group by null) as `_3` where (`_2` is not null)) v""")
testSql2ToN1ql(
sqlE"select geo.lat + geo.lon from `brewery`",
"""select v from (select value (`_1`.['geo'].['lat'] + `_1`.['geo'].['lon']) from (select value ifmissing(`_0`.['value'], `_0`) from `beer-sample` as `_0` where (`type` = 'brewery')) as `_1`) v""")
}
"QScript to N1QL" should {
"read followed by a map" in {
import qstdsl._
// select (a + b) from foo
val qs =
fix.Map(
fix.ShiftedRead[AFile](rootDir </> file("foo"), ExcludeId),
func.Add(
func.ProjectKeyS(func.Hole, "a"),
func.ProjectKeyS(func.Hole, "b")))
val n1ql = n1qlFromQS(qs)
n1ql must_= """select v from (select value (`_1`.['a'] + `_1`.['b']) from (select value ifmissing(`_0`.['value'], `_0`) from `beer-sample` as `_0` where (`type` = 'foo')) as `_1`) v"""
}
}
}
| jedesah/Quasar | couchbase/src/test/scala/quasar/physical/couchbase/BasicQueryEnablementSpec.scala | Scala | apache-2.0 | 6,158 |
/*
* Copyright 2011 Andlabs, GbR.
*
* This is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* OsmRouting is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Foobar. If not, see <http://www.gnu.org/licenses/>.
*/
package de.andlabs.routing
import org.scalatra._
import java.net.URL
import scalate.ScalateSupport
import de.andlabs.routing.Graph.Node
class WebApi extends ScalatraFilter with ScalateSupport {
Graph.load // when starting server
get("/route.kml") {
//if (!params.contains("from") || !params.contains("to"))
// <h1>please specify from and to parameters</h1>
//else {
val start = System.currentTimeMillis()
val path = new Dijkstra(params("from").toInt, params("to").toInt).getPath
println((System.currentTimeMillis()-start)+"ms ("+path.size+" nodes)\n")
contentType = "application/vnd.google-earth.kml+xml"
kml.build(path)
//}
}
get("/hello") {
contentType = "text/html"
<h2>{MyJavaClass.sayHello}</h2>
}
notFound {
// If no route matches, then try to render a Scaml template
val templateBase = requestPath match {
case s if s.endsWith("/") => s + "index"
case s => s
}
val templatePath = "/WEB-INF/scalate/templates/" + templateBase + ".scaml"
servletContext.getResource(templatePath) match {
case url: URL =>
contentType = "text/html"
templateEngine.layout(templatePath)
case _ =>
filterChain.doFilter(request, response)
}
}
}
| orangeman/osm_routing | src/main/scala/WebAPI.scala | Scala | gpl-3.0 | 1,993 |
/***
* Copyright 2014 Rackspace US, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rackspace.com.papi.components.checker.step.startend
import javax.servlet.FilterChain
import javax.xml.namespace.QName
import javax.xml.validation.Schema
import com.rackspace.com.papi.components.checker.servlet._
import com.rackspace.com.papi.components.checker.step.XSDStringValidator
import com.rackspace.com.papi.components.checker.step.base.StepContext
import com.rackspace.com.papi.components.checker.step.results._
import scala.util.matching.Regex
//
// Like URLFailMatch, but fails only if the current uri path is not
// matched against any of a number of simple XSD types
//
class URLFailXSDMatch(id : String, label : String, uri : Regex, types : Array[QName], schema : Schema, priority : Long) extends URLFailMatch(id, label, uri, priority) {
//
// XSD validators
//
val validators : Array[XSDStringValidator] = types.map (t => new XSDStringValidator(t, schema, id))
override def check(req : CheckerServletRequest,
resp : CheckerServletResponse,
chain : FilterChain,
context : StepContext) : Option[Result] = {
var result : Option[Result] = super.check (req, resp, chain, context)
if (result.isDefined) {
val in = req.URISegment(context.uriLevel)
val errors = for (validator <- validators) yield {
val e = validator.validate(in)
if (e.isEmpty) return None
e.get.getMessage
}
val message = errors.foldLeft(result.get.message)(_ + " and "+_)
result = Some(new URLFailResult (message, context, id, priority))
}
result
}
}
| wdschei/api-checker | core/src/main/scala/com/rackspace/com/papi/components/checker/step/startend/URLFailXSDMatch.scala | Scala | apache-2.0 | 2,218 |
package io.youi.material
import io.youi.component.Component
import io.youi.component.support.{ContentSupport, InternalContainerSupport}
import io.youi.component.types.{Display, Prop}
import io.youi.dom
import io.youi.dom._
import io.youi.event.EventSupport
import io.youi.material.impl.{MDCRipple, MDCRippleImplementation}
import org.scalajs.dom.html
import reactify.Var
class MDCButton extends Component(dom.create.button) with InternalContainerSupport[Component] {
classes := List("mdc-button")
protected def button: html.Button = element.asInstanceOf[html.Button]
val label: Var[String] = Var("")
val raised: Prop[Boolean] = classes.toggle("mdc-button--raised")
val unelevated: Prop[Boolean] = classes.toggle("mdc-button--unelevated")
val outlined: Prop[Boolean] = classes.toggle("mdc-button--outlined")
val disabled: Prop[Boolean] = new Prop(button.disabled, button.disabled_=)
val leading: ButtonIcon = new ButtonIcon
val trailing: ButtonIcon = new ButtonIcon
label.on(measure.trigger())
def this(label: String, leading: MaterialIcon = Material.Icons.Empty, trailing: MaterialIcon = Material.Icons.Empty) = {
this()
this.label @= label
this.leading.value @= leading
this.trailing.value @= trailing
}
private object elements {
val ripple: html.Div = {
val div = dom.create.div
div.addClasses("mdc-button__ripple")
div
}
val label: html.Span = {
val span = dom.create.span
span.addClasses("mdc-button__label")
MDCButton.this.label.attach(span.innerHTML_=)
span
}
}
children += elements.ripple
children += leading
children += elements.label
children += trailing
private val adapter: MDCRippleImplementation = MaterialComponents.verified(MDCRipple.attachTo(element))
class ButtonIcon extends Component(dom.create.i) with EventSupport with ContentSupport {
val value: Var[MaterialIcon] = Var(Material.Icons.Empty)
classes ++= List("material-icons", "mdc-button__icon")
color := MDCButton.this.color
display := (if (value().isEmpty) Display.None else Display.InlineBlock)
content := value().name
}
} | outr/youi | gui/src/main/scala/io/youi/material/MDCButton.scala | Scala | mit | 2,146 |
package edu.gemini.pit.ui.action
import edu.gemini.pit.model.Model
import edu.gemini.ui.workspace.scala.RichShell
import edu.gemini.pit.ui.binding._
import edu.gemini.model.p1.immutable._
import edu.gemini.pit.ui.editor.TargetExporter
import swing.{UIElement, Action}
class TargetExportAction(shell: RichShell[Model]) extends Action("Export Targets...") with Bound[Model, List[Target]] {
enabled = false
def lens = Model.proposal andThen Proposal.targets
override def refresh(m: Option[List[Target]]) {
// enable when we actually have targets and at least one has data
enabled = m exists { _.exists(TargetExporter.isExportable) }
}
def apply() {
model foreach { targets => TargetExporter.open(UIElement.wrap(shell.peer), targets) }
}
}
| arturog8m/ocs | bundle/edu.gemini.pit/src/main/scala/edu/gemini/pit/ui/action/TargetExportAction.scala | Scala | bsd-3-clause | 766 |
/*
* Copyright (c) 2014. Webtrends (http://www.webtrends.com)
* @author cuthbertm on 11/20/14 12:23 PM
*/
package com.webtrends.harness.component.spotifyapi
import akka.actor._
import akka.pattern._
import com.webtrends.harness.app.HActor
import com.webtrends.harness.component.spotifyapi.SpotifyAPIActor.Messages._
import com.webtrends.harness.component.spotifyapi.SpotifyType.SpotifyType
import com.webtrends.harness.component.spotifyapi.data.{Track, Artist, Album}
import com.webtrends.harness.health.HealthComponent
import dispatch._, Defaults._
import net.liftweb.json._
import net.liftweb.json.Extraction._
import scala.concurrent.{Promise, Future}
import scala.util.{Success, Failure}
object SpotifyAPIActor {
def props = Props(classOf[SpotifyAPIActor])
object Messages {
case class GetAlbums(ids:List[String])
case class GetAlbumTracks(id:String)
case class GetArtists(ids:List[String])
case class GetArtistAlbums(id:String)
case class GetArtistTopTracks(id:String)
case class GetArtistRelatedArtists(id:String)
case class GetTracks(id:List[String])
case class Search(`type`:SpotifyType, searchString:String, limit:Int, offset:Int)
}
val hostname = host("spotify.com")
val PARAM_ALBUMS = "albums"
val PARAM_ARTISTS = "artists"
val PARAM_TRACKS = "tracks"
}
class SpotifyAPIActor(apiVersion:String="v1") extends HActor {
val apiPrefix = SpotifyAPIActor.hostname / apiVersion
override def receive = super.receive orElse {
case GetAlbums(ids) => pipe(getAlbums(ids)) to sender
case GetAlbumTracks(id) =>
case GetArtists(ids) => pipe(getArtists(ids)) to sender
case GetArtistAlbums(id) =>
case GetArtistTopTracks(id) =>
case GetArtistRelatedArtists(id) =>
case GetTracks(ids:List[String]) => pipe(getTracks(ids)) to sender
case Search(t, searchString, limit, offset) =>
}
private[spotifyapi] def service[T](req:Req, key:Option[String]=None) : Future[List[T]] = {
val p = Promise[List[T]]()
Http(req OK as.String).either onComplete {
case Success(s) => s match {
case Right(resp) => p success objectExtraction[T](resp, key)
case Left(t) => p failure t
}
case Failure(f) => p failure f
}
p.future
}
private[spotifyapi] def objectExtraction[T](resp:String, key:Option[String]=None) : List[T] = {
val json = parse(resp)
key match {
case Some(k) => extract[List[T]](json \\ k)
case None => List(extract[T](json))
}
}
def getAlbums(ids:List[String]) : Future[List[Album]] = _get[Album](ids, SpotifyAPIActor.PARAM_ALBUMS)
def getArtists(ids:List[String]) : Future[List[Artist]] = _get[Artist](ids, SpotifyAPIActor.PARAM_ARTISTS)
def getTracks(ids:List[String]) : Future[List[Track]] = _get[Track](ids, SpotifyAPIActor.PARAM_TRACKS)
private[spotifyapi] def _get[T](ids:List[String], key:String) : Future[List[T]] = {
if (ids == null || ids.length == 0) {
Future.failed(new IllegalArgumentException("Minimum one id required"))
} else {
val albumsReq = apiPrefix / key
val reqVals = ids.length match {
case 1 => (albumsReq / ids.head, None)
case _ => (albumsReq <<? Map("ids" -> ids.mkString(",")), Some(key))
}
service[T](reqVals._1, reqVals._2)
}
}
// This should probably be overriden to get some custom information about the health of this actor
override protected def getHealth: Future[HealthComponent] = super.getHealth
} | Crashfreak/SpotifyAPI | src/main/scala/com/webtrends/harness/component/spotifyapi/SpotifyAPIActor.scala | Scala | apache-2.0 | 3,476 |
package org.jetbrains.plugins.scala.statistics
import java.util.concurrent.ConcurrentHashMap
import com.intellij.openapi.diagnostic.Logger
import com.intellij.util.containers.ContainerUtil
import org.apache.log4j.Level
import org.github.jamm.MemoryMeter
import scala.collection.mutable
import scala.ref.WeakReference
/**
* Author: Svyatoslav Ilinskiy
* Date: 10/9/15.
*/
class CacheStatistics private(id: String, name: String) {
@volatile
var cachedAreaEntrances: Long = 0
@volatile
var cachesRecalculated: Long = 0
val objectsToKeepTrackOf = ContainerUtil.newConcurrentSet[WeakReference[AnyRef]]
val calculationTimes = ContainerUtil.newConcurrentSet[Long]()
val memoryMeter = new MemoryMeter()
//we could ask time of entrance to measure time locality
//also, we could find out whether multiple threads are calculating this cache at the same time
def aboutToEnterCachedArea(): Unit = {
cachedAreaEntrances += 1
}
def recalculatingCache(): Unit = {
cachesRecalculated += 1
}
def reportTimeToCalculate(time: Long): Unit = {
calculationTimes.add(time)
}
def hits: Long = cachedAreaEntrances - cachesRecalculated
def misses: Long = cachesRecalculated
def addCacheObject(obj: Any): Unit = obj match {
case ref: AnyRef => objectsToKeepTrackOf.add(new WeakReference[AnyRef](ref))
case _ => //it's a primitive, its size is so tiny, so let's ignore it for now
}
def removeCacheObject(obj: Any): Boolean = {
import scala.collection.JavaConversions._
var res = false
objectsToKeepTrackOf.foreach {
case WeakReference(el) if el.equals(obj) => res = objectsToKeepTrackOf.remove(el)
case WeakReference(el) =>
case t => objectsToKeepTrackOf.remove(t) //weak refernce has expired
}
res
}
def objectsToKeepTrackOfNormalReferences: mutable.Set[Any] = {
import scala.collection.JavaConversions._
objectsToKeepTrackOf.collect {
case WeakReference(ref) => ref
}
}
//this method may take a while time to run
def spaceTakenByCache: Long = {
-1 //turned off counting space taken by cache, it causes errors to happen and doesn't work overall
/*try {
objectsToKeepTrackOfNormalReferences.map(memoryMeter.measureDeep).sum
} catch {
case e@(_: AssertionError | _: IllegalStateException) =>
println(e.getMessage) //message is probably: Instrumentation is not set; Jamm must be set as -javaagent
print("Not counting size of cache")
-1
}*/
}
override def toString: String = {
import scala.collection.JavaConversions._
val calcTimes: Set[Long] = calculationTimes.toSet //efficient because not conccurent
if (calculationTimes.nonEmpty) {
val (maxTime, minTime, averageTime) = (calcTimes.max, calcTimes.min, calcTimes.sum.toDouble / calcTimes.size)
val timeSaved = hits * averageTime
s"""
|****************************
|$name
|hits: $hits, misses: $misses
|*approximate* spaceTaken: $spaceTakenByCache
|maxTime: $maxTime, minTime: $minTime, averageTime: $averageTime
|time saved (hits * averageTime): $timeSaved
|****************************
""".stripMargin
} else {
s"""
|**************************
|$name not used
|**************************
""".stripMargin
}
}
}
object CacheStatistics {
import scala.collection.JavaConverters._
private val caches = new ConcurrentHashMap[String, CacheStatistics]()
def printStats(): Unit = {
val logger = Logger.getInstance(this.getClass)
logger.setLevel(Level.INFO)
caches.values().asScala.foreach (c => logger.info(c.toString))
}
def apply(id: String, name: String) = Option(caches.get(id)) match {
case Some(res) => res
case _ => synchronized {
Option(caches.get(id)) match {
case Some(res) => res
case _ =>
val res = new CacheStatistics(id, name)
caches.put(id, res)
res
}
}
}
}
| LPTK/intellij-scala | src/org/jetbrains/plugins/scala/statistics/CacheStatistics.scala | Scala | apache-2.0 | 4,017 |
package actorAPI
import java.awt.image.BufferedImage
import java.io._
import java.util.Calendar
import javax.imageio.ImageIO
import akka.actor.Actor
import akka.actor.Props
import akka.dispatch._
import akka.event.Logging
abstract class genericActor extends Actor
with RequiresMessageQueue[BoundedMessageQueueSemantics] {
import context._
val log = Logging(context.system, this)
def actorProperties(name: String): Props = Props(classOf[genericActor], name)
}
trait ImageTraits {
val calendar = Calendar.getInstance()
val filename = (calendar.getTimeInMillis + ".png").toString
case class WriteableImage(image: BufferedImage)
}
object AsyncImageWriter extends genericActor with ImageTraits {
def receive: Receive = {
case WriteableImage =>
sender ! ImageIO.write(_: BufferedImage, "png", new File(filename))
}
}
| pensivearchitect/sweeper-redux | src/main/scala/actorAPI.scala | Scala | bsd-3-clause | 847 |
package gettingstarted
import fpinscala.gettingstarted.PolymorphicFunctions.isSorted
class IsSortedTest extends org.specs2.mutable.Specification{
"A SortedArray" should {
"be fully sorted" in {
isSorted(Array(2, 3, 6, 7, 8, 9), (a: Int, b: Int) => a > b) must beFalse
isSorted(Array(9, 8, 2, 6, 3, 7), (a: Int, b: Int) => a > b) must beFalse
isSorted(Array(2, 9, 6, 3, 7, 8), (a: Int, b: Int) => a > b) must beFalse
isSorted(Array(9, 8, 7, 6, 3, 2), (a: Int, b: Int) => a > b) must beTrue
isSorted(Array(2, 3, 6, 7, 8, 9), (a: Int, b: Int) => a < b) must beTrue
isSorted(Array(9, 8, 2, 6, 3, 7), (a: Int, b: Int) => a < b) must beFalse
isSorted(Array(2, 9, 6, 3, 7, 8), (a: Int, b: Int) => a < b) must beFalse
isSorted(Array(9, 8, 7, 6, 3, 2), (a: Int, b: Int) => a < b) must beFalse
isSorted(Array('a', 'b', 'd', 'e', 'h', 'z'), (a: Char, b: Char) => a > b) must beFalse
isSorted(Array('z', 'a', 'b', 'd', 'e', 'h'), (a: Char, b: Char) => a > b) must beFalse
isSorted(Array('a', 'd','b', 'e', 'h'), (a: Char, b: Char) => a > b) must beFalse
isSorted(Array('z', 'x','w', 'e', 'c', 'a'), (a: Char, b: Char) => a > b) must beTrue
}
}
}
| coughlac/fpinscala | exercises/src/test/scala/gettingstarted/IsSortedTest.scala | Scala | mit | 1,222 |
object Test {
def f(x: Any) = println(x match {
case List(_, _) => "two"
case List(_, _, _) => "three"
case xs @ List(_*) => "list: " + xs.length
case _ => "not a list"
})
def f2[T](x: List[T]) = println(x match {
case List(_, _) => "two"
case List(_, _, _) => "three"
case List(xs @ _*) => "list: " + xs.length
// bug: the default case is marked unreachable
// case _ => "not a list"
})
def main(args: Array[String]): Unit = {
f(List(1, 2))
f(List('a', 'b', 'c'))
f(List('a', 'b', 'c', 'd'))
f(Nil)
f(List(1,2,3,4,5))
f(null)
println
f2(List(1, 2))
f2(List('a', 'b', 'c'))
f2(List('a', 'b', 'c', 'd'))
f2(Nil)
f2(List(1,2,3,4,5))
// bug: this NPEs on xs.length
// f2(null)
}
}
| martijnhoekstra/scala | test/files/run/t3530.scala | Scala | apache-2.0 | 828 |
package strd.net.http
import io.netty.buffer.{ByteBuf, Unpooled}
import io.netty.util.CharsetUtil
/**
* @author Kirill chEbba Chebunin
*/
object Content {
def apply( str : String ): ByteBuf = {
Unpooled.wrappedBuffer( str.getBytes(CharsetUtil.UTF_8) )
}
def apply( bytes : Array[Byte] ): ByteBuf = {
Unpooled.wrappedBuffer( bytes )
}
}
| lembrd/strd-net-http | src/main/scala/strd/net/http/Content.scala | Scala | gpl-3.0 | 357 |
package client.appstate.groups.members
import autowire._
import client.MessageFeedback
import client.appstate.{GroupMemberFeedbackReporting, GroupMembers}
import client.services.AjaxClient
import diode.data._
import diode.{Effect, _}
import shared._
import shared.requests.groups.members._
import boopickle.Default._
import shared.responses.groups.members.MemberAssociatedToGroupInfo
import shared.utils.constants._
import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
import scala.concurrent.Future
// Actions
// Group members
case object ResetGroupMembers extends Action
case class FetchGroupMembers(request: MemberGroupRequest) extends Action
case class SetGroupMembers(groupMembers: Either[FoulkonError, (TotalGroupMembers, List[MemberAssociatedToGroupInfo])]) extends Action
case class UpdateTotalGroupMembersAndPages(totalGroupMembers: TotalGroupMembers) extends Action
case class UpdateSelectedPage(selectedPage: SelectedPage) extends Action
case class AddGroupMember(organizationId: String, groupName: String, userId: String) extends Action
case class RemoveGroupMember(organizationId: String, groupName: String, userId: String) extends Action
case class UpdateGroupMemberFeedbackReporting(organizationId: String, name: String, feedback: Either[FoulkonError, MessageFeedback])
extends Action
case object RemoveGroupMemberFeedbackReporting extends Action
// Handlers
class GroupMemberHandler[M](modelRW: ModelRW[M, Pot[GroupMembers]]) extends ActionHandler(modelRW) {
override protected def handle: PartialFunction[Any, ActionResult[M]] = {
case ResetGroupMembers =>
updated(Empty,
Effect(Future(UpdateTotalGroupMembersAndPages(0)))
>> Effect(Future(UpdateSelectedPage(0)))
)
case FetchGroupMembers(request) =>
effectOnly(
Effect(
AjaxClient[Api]
.readMemberGroup(request)
.call
.map(SetGroupMembers)
)
)
case SetGroupMembers(groupMembers) =>
groupMembers match {
case rightResult @ Right((total, _)) =>
updated(
Ready(GroupMembers(rightResult.map(_._2))),
Effect(Future(UpdateTotalGroupMembersAndPages(total)))
)
case leftResult @ Left(_) =>
updated(
Ready(GroupMembers(leftResult.map(_._2))),
Effect(Future(UpdateTotalGroupMembersAndPages(0))) >> Effect (Future(UpdateSelectedPage(0)))
)
}
case AddGroupMember(organizationId, name, userId) =>
val request = AddMemberGroupRequest(
AddMemberGroupRequestPathParams(organizationId, name, userId)
)
effectOnly(
Effect(
AjaxClient[Api]
.addMemberGroup(request)
.call
.map {
case Left(foulkonError) => UpdateGroupMemberFeedbackReporting(organizationId, name, Left(foulkonError))
case Right(_) => UpdateGroupMemberFeedbackReporting(organizationId, name, Right(s"member $userId associated successfully!"))
}
)
)
case RemoveGroupMember(organizationId, name, userId) =>
val request = RemoveMemberGroupRequest(
RemoveMemberGroupRequestPathParams(organizationId, name, userId)
)
effectOnly(
Effect(
AjaxClient[Api]
.removeMemberGroup(request)
.call
.map {
case Left(foulkonError) => UpdateGroupMemberFeedbackReporting(organizationId, name, Left(foulkonError))
case Right(_) => UpdateGroupMemberFeedbackReporting(organizationId, name, Right(s"member $userId disassociated successfully!"))
}
)
)
}
}
class GroupMemberFeedbackHandler[M](modelRW: ModelRW[M, Option[GroupMemberFeedbackReporting]]) extends ActionHandler(modelRW) {
override protected def handle: PartialFunction[Any, ActionResult[M]] = {
case UpdateGroupMemberFeedbackReporting(org, name, feedback) =>
updated(Some(GroupMemberFeedbackReporting(feedback)),
Effect(
Future(
ResetGroupMembers
)
) >>
Effect(
Future(
FetchGroupMembers(MemberGroupRequest(MemberGroupRequestPathParams(org, name), offset = 0))
)
)
)
case RemoveGroupMemberFeedbackReporting =>
updated(
None
)
}
}
class GroupMembersPagesAndTotalHandler[M](modelRW: ModelRW[M, (TotalGroupMembers, TotalPages, SelectedPage)]) extends ActionHandler(modelRW) {
override protected def handle: PartialFunction[Any, ActionResult[M]] = {
case UpdateTotalGroupMembersAndPages(totalGroupMembers) =>
val totalPages = (totalGroupMembers.toFloat / PageSize.toFloat).ceil.toInt
val stateSelectedPage = modelRW()._3
updated((totalGroupMembers, totalPages, stateSelectedPage))
case UpdateSelectedPage(selectedPage) =>
updated(modelRW().copy(_3 = selectedPage))
}
}
| beikern/foulkon-ui | client/src/main/scala/client/appstate/groups/members/DiodeGroupMember.scala | Scala | apache-2.0 | 4,955 |
package akka.io
import akka.testkit.{ImplicitSender, TestKit}
import akka.actor.{Props, ActorSystem}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import java.nio.file.Paths
import akka.io.FileWriter.{WriteLine, Write}
import akka.util.ByteString
import akka.io.File.{Closed, Close, Written}
class FileWriterSpec extends TestKit(ActorSystem("system")) with WordSpecLike with Matchers with ImplicitSender with BeforeAndAfterAll {
override def afterAll() = system.shutdown()
"A FileWriter" should {
"write to a file in correct order" in {
try {
val writer = system.actorOf(Props(classOf[FileWriter], Paths.get("/tmp", "test-file.txt")))
watch(writer)
writer ! Write(ByteString("test"))
writer ! Write(ByteString("foo"))
writer ! Write(ByteString("bar"))
expectMsgAllOf(Written(4), Written(3), Written(3))
writer ! Close
expectMsg(Closed)
expectTerminated(writer)
io.Source.fromFile("/tmp/test-file.txt").mkString should be("testfoobar")
} finally new java.io.File("/tmp/test-file.txt").delete()
}
"correctly add newlines on WriteLine" in {
try {
val writer = system.actorOf(Props(classOf[FileWriter], Paths.get("/tmp", "test-file.txt")))
watch(writer)
writer ! WriteLine(ByteString("test"))
writer ! WriteLine(ByteString("foo"))
writer ! Write(ByteString("bar"))
receiveN(3)
writer ! Close
expectMsg(Closed)
expectTerminated(writer)
io.Source.fromFile("/tmp/test-file.txt").mkString should be(Seq("test", "foo", "bar").mkString(System.lineSeparator()))
} finally new java.io.File("/tmp/test-file.txt").delete()
}
"be able append to a file" in {
try {
printToFile(new java.io.File("/tmp/test-file.txt")) { p =>
p.print("foobar")
}
val writer = system.actorOf(Props(classOf[FileWriter], Paths.get("/tmp", "test-file.txt"), true))
watch(writer)
writer ! Write(ByteString("baz"))
expectMsg(Written(3))
writer ! Close
expectMsg(Closed)
expectTerminated(writer)
io.Source.fromFile("/tmp/test-file.txt").mkString should be("foobarbaz")
} finally new java.io.File("/tmp/test-file.txt").delete()
}
}
}
| drexin/akka-io-file | src/test/scala/akka/io/FileWriterSpec.scala | Scala | apache-2.0 | 2,347 |
/*
* Copyright 2017 Anton Wierenga
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package activemq.cli.command
import activemq.cli.ActiveMQCLI
import activemq.cli.util.Console._
import activemq.cli.util.Implicits._
import javax.management.MBeanServerConnection
import javax.management.MBeanServerInvocationHandler
import javax.management.ObjectName
import org.apache.activemq.broker.jmx.BrokerViewMBean
import org.apache.activemq.broker.jmx.QueueViewMBean
import org.springframework.shell.core.annotation.CliAvailabilityIndicator
import org.springframework.shell.core.annotation.CliCommand
import org.springframework.shell.core.annotation.CliOption
import org.springframework.stereotype.Component
@Component
class QueueCommands extends Commands {
@CliAvailabilityIndicator(Array("add-queue", "purge-queue", "purge-all-queues", "remove-queue", "remove-all-queues", "list-queues"))
def isBrokerAvailable: Boolean = ActiveMQCLI.broker.isDefined
@CliCommand(value = Array("add-queue"), help = "Adds a queue")
def addQueue(@CliOption(key = Array("name"), mandatory = true, help = "The name of the queue") name: String): String = {
withBroker((brokerViewMBean: BrokerViewMBean, mBeanServerConnection: MBeanServerConnection) ⇒ {
validateQueueNotExists(brokerViewMBean, name)
brokerViewMBean.addQueue(name)
info(s"Queue '$name' added")
})
}
@CliCommand(value = Array("purge-queue"), help = "Purges a queue")
def purgeQueue(
@CliOption(key = Array("name"), mandatory = true, help = "The name of the queue") name: String,
@CliOption(key = Array("force"), specifiedDefaultValue = "yes", mandatory = false, help = "No prompt") force: String
): String = {
withBroker((brokerViewMBean: BrokerViewMBean, mBeanServerConnection: MBeanServerConnection) ⇒ {
confirm(force)
MBeanServerInvocationHandler.newProxyInstance(mBeanServerConnection, validateQueueExists(brokerViewMBean, name), classOf[QueueViewMBean], true).purge()
info(s"Queue '$name' purged")
})
}
@CliCommand(value = Array("pause-queue"), help = "Pauses a queue")
def pauseQueue(
@CliOption(key = Array("name"), mandatory = true, help = "The name of the queue") name: String,
@CliOption(key = Array("force"), specifiedDefaultValue = "yes", mandatory = false, help = "No prompt") force: String
): String = {
withBroker((brokerViewMBean: BrokerViewMBean, mBeanServerConnection: MBeanServerConnection) ⇒ {
confirm(force)
MBeanServerInvocationHandler.newProxyInstance(mBeanServerConnection, validateQueueExists(brokerViewMBean, name), classOf[QueueViewMBean], true).pause()
info(s"Queue '$name' paused")
})
}
@CliCommand(value = Array("resume-queue"), help = "Resumes a queue")
def resumeQueue(
@CliOption(key = Array("name"), mandatory = true, help = "The name of the queue") name: String,
@CliOption(key = Array("force"), specifiedDefaultValue = "yes", mandatory = false, help = "No prompt") force: String
): String = {
withBroker((brokerViewMBean: BrokerViewMBean, mBeanServerConnection: MBeanServerConnection) ⇒ {
confirm(force)
MBeanServerInvocationHandler.newProxyInstance(mBeanServerConnection, validateQueueExists(brokerViewMBean, name), classOf[QueueViewMBean], true).resume()
info(s"Queue '$name' resumed")
})
}
@CliCommand(value = Array("remove-queue"), help = "Removes a queue")
def removeQueue(
@CliOption(key = Array("name"), mandatory = true, help = "The name of the queue") name: String,
@CliOption(key = Array("force"), specifiedDefaultValue = "yes", mandatory = false, help = "No prompt") force: String
): String = {
withBroker((brokerViewMBean: BrokerViewMBean, mBeanServerConnection: MBeanServerConnection) ⇒ {
validateQueueExists(brokerViewMBean, name)
confirm(force)
brokerViewMBean.removeQueue(name)
info(s"Queue '$name' removed")
})
}
@CliCommand(value = Array("remove-all-queues"), help = "Removes all queues")
def removeAllQueues(
@CliOption(key = Array("force"), specifiedDefaultValue = "yes", mandatory = false, help = "No prompt") force: String,
@CliOption(key = Array("filter"), mandatory = false, help = "The query") filter: String,
@CliOption(key = Array("exclude-filter"), mandatory = false, help = "Only queues with a name that does not contain the value specified by exclude-filter are listed") excludeFilter: String, //scalastyle:ignore
@CliOption(key = Array("dry-run"), specifiedDefaultValue = "yes", mandatory = false, help = "Dry run") dryRun: String,
@CliOption(key = Array("pending"), mandatory = false, help = "Only queues that meet the pending filter are listed") pending: String,
@CliOption(key = Array("enqueued"), mandatory = false, help = "Only queues that meet the enqueued filter are listed") enqueued: String,
@CliOption(key = Array("dequeued"), mandatory = false, help = "Only queues that meet the dequeued filter are listed") dequeued: String,
@CliOption(key = Array("consumers"), mandatory = false, help = "Only queues that meet the consumers filter are listed") consumers: String
): String = {
withFilteredQueues("removed", force, filter, excludeFilter, dryRun, pending, enqueued, dequeued, consumers,
(queueViewMBean: QueueViewMBean, brokerViewMBean: BrokerViewMBean, dryRun: Boolean, pending: String, enqueued: String, dequeued: String,
consumers: String) ⇒ {
brokerViewMBean.removeQueue(queueViewMBean.getName)
})
}
@CliCommand(value = Array("purge-all-queues"), help = "Purges all queues")
def purgeAllQueues(
@CliOption(key = Array("force"), specifiedDefaultValue = "yes", mandatory = false, help = "No prompt") force: String,
@CliOption(key = Array("filter"), mandatory = false, help = "The query") filter: String,
@CliOption(key = Array("exclude-filter"), mandatory = false, help = "Only queues with a name that does not contain the value specified by exclude-filter are listed") excludeFilter: String, //scalastyle:ignore
@CliOption(key = Array("dry-run"), specifiedDefaultValue = "yes", mandatory = false, help = "Dry run") dryRun: String,
@CliOption(key = Array("pending"), mandatory = false, help = "Only queues that meet the pending filter are listed") pending: String,
@CliOption(key = Array("enqueued"), mandatory = false, help = "Only queues that meet the enqueued filter are listed") enqueued: String,
@CliOption(key = Array("dequeued"), mandatory = false, help = "Only queues that meet the dequeued filter are listed") dequeued: String,
@CliOption(key = Array("consumers"), mandatory = false, help = "Only queues that meet the consumers filter are listed") consumers: String
): String = {
withFilteredQueues("purged", force, filter, excludeFilter, dryRun, pending, enqueued, dequeued, consumers,
(queueViewMBean: QueueViewMBean, brokerViewMBean: BrokerViewMBean, dryRun: Boolean, pending: String, enqueued: String, dequeued: String,
consumers: String) ⇒ {
queueViewMBean.purge()
})
}
@CliCommand(value = Array("list-queues"), help = "Displays queues")
def listQueues( //scalastyle:ignore
@CliOption(key = Array("filter"), mandatory = false, help = "Only queues with a name that contains the value specified by filter are listed") filter: String, //scalastyle:ignore
@CliOption(key = Array("exclude-filter"), mandatory = false, help = "Only queues with a name that does not contain the value specified by exclude-filter are listed") excludeFilter: String, //scalastyle:ignore
@CliOption(key = Array("pending"), mandatory = false, help = "Only queues that meet the pending filter are listed") pending: String,
@CliOption(key = Array("enqueued"), mandatory = false, help = "Only queues that meet the enqueued filter are listed") enqueued: String,
@CliOption(key = Array("dequeued"), mandatory = false, help = "Only queues that meet the dequeued filter are listed") dequeued: String,
@CliOption(key = Array("consumers"), mandatory = false, help = "Only queues that meet the consumers filter are listed") consumers: String
): String = {
val headers = List("Queue Name", "Pending", "Consumers", "Enqueued", "Dequeued")
withBroker((brokerViewMBean: BrokerViewMBean, mBeanServerConnection: MBeanServerConnection) ⇒ {
val pendingCount = parseFilterParameter(pending, "pending")
val enqueuedCount = parseFilterParameter(enqueued, "enqueued")
val dequeuedCount = parseFilterParameter(dequeued, "dequeued")
val consumersCount = parseFilterParameter(consumers, "consumers")
val queueViewMBeans = brokerViewMBean.getQueues.filter(objectName ⇒
if (filter && excludeFilter) {
getDestinationKeyProperty(objectName).toLowerCase.contains(Option(filter).getOrElse("").toLowerCase) &&
!getDestinationKeyProperty(objectName).toLowerCase.contains(Option(excludeFilter).getOrElse("").toLowerCase)
} else if (filter && !excludeFilter) {
getDestinationKeyProperty(objectName).toLowerCase.contains(Option(filter).getOrElse("").toLowerCase)
} else if (excludeFilter) {
!getDestinationKeyProperty(objectName).toLowerCase.contains(Option(excludeFilter).getOrElse("").toLowerCase)
} else {
true
}).par.map({ objectName ⇒
(MBeanServerInvocationHandler.newProxyInstance(mBeanServerConnection, objectName, classOf[QueueViewMBean], true))
}).filter(queueViewMBean ⇒ applyFilterParameter(pending, queueViewMBean.getQueueSize, pendingCount) &&
applyFilterParameter(enqueued, queueViewMBean.getEnqueueCount, enqueuedCount) &&
applyFilterParameter(dequeued, queueViewMBean.getDequeueCount, dequeuedCount) &&
applyFilterParameter(consumers, queueViewMBean.getConsumerCount, consumersCount))
val rows = queueViewMBeans.par.map(queueViewMBean ⇒ List(queueViewMBean.getName, queueViewMBean.getQueueSize, queueViewMBean.getConsumerCount,
queueViewMBean.getEnqueueCount, queueViewMBean.getDequeueCount))
.seq.sortBy(ActiveMQCLI.Config.getOptionalString(s"command.queues.order.field") match {
case Some("Pending") ⇒ (row: Seq[Any]) ⇒ { "%015d".format(row(headers.indexOf("Pending"))).asInstanceOf[String] }
case Some("Consumers") ⇒ (row: Seq[Any]) ⇒ { "%015d".format(row(headers.indexOf("Consumers"))).asInstanceOf[String] }
case Some("Enqueued") ⇒ (row: Seq[Any]) ⇒ { "%015d".format(row(headers.indexOf("Enqueued"))).asInstanceOf[String] }
case Some("Dequeued") ⇒ (row: Seq[Any]) ⇒ { "%015d".format(row(headers.indexOf("Dequeued"))).asInstanceOf[String] }
case _ ⇒ (row: Seq[Any]) ⇒ { row(headers.indexOf("Queue Name" + 1)).asInstanceOf[String] }
})(ActiveMQCLI.Config.getOptionalString(s"command.queues.order.direction") match {
case Some("reverse") ⇒ Ordering[String].reverse
case _ ⇒ Ordering[String]
})
if (rows.size > 0) {
renderTable(rows, headers) + s"\\nTotal queues: ${rows.size}"
} else {
warn(s"No queues found")
}
})
}
def withFilteredQueues(action: String, force: String, filter: String, excludeFilter: String, dryRun: Boolean, pending: String, enqueued: String, dequeued: String, //scalastyle:ignore
consumers: String, callback: (QueueViewMBean, BrokerViewMBean, Boolean, String, String, String, String) ⇒ Unit): String = {
withBroker((brokerViewMBean: BrokerViewMBean, mBeanServerConnection: MBeanServerConnection) ⇒ {
val pendingCount = parseFilterParameter(pending, "pending")
val enqueuedCount = parseFilterParameter(enqueued, "enqueued")
val dequeuedCount = parseFilterParameter(dequeued, "dequeued")
val consumersCount = parseFilterParameter(consumers, "consumers")
if (!dryRun) confirm(force)
val rows = brokerViewMBean.getQueues.filter(objectName ⇒
if (filter && excludeFilter) {
getDestinationKeyProperty(objectName).toLowerCase.contains(Option(filter).getOrElse("").toLowerCase) &&
!getDestinationKeyProperty(objectName).toLowerCase.contains(Option(excludeFilter).getOrElse("").toLowerCase)
} else if (filter && !excludeFilter) {
getDestinationKeyProperty(objectName).toLowerCase.contains(Option(filter).getOrElse("").toLowerCase)
} else if (excludeFilter) {
!getDestinationKeyProperty(objectName).toLowerCase.contains(Option(excludeFilter).getOrElse("").toLowerCase)
} else {
true
}).par.map({ objectName ⇒
(MBeanServerInvocationHandler.newProxyInstance(mBeanServerConnection, objectName, classOf[QueueViewMBean], true))
}).filter(queueViewMBean ⇒ applyFilterParameter(pending, queueViewMBean.getQueueSize, pendingCount) &&
applyFilterParameter(enqueued, queueViewMBean.getEnqueueCount, enqueuedCount) &&
applyFilterParameter(dequeued, queueViewMBean.getDequeueCount, dequeuedCount) &&
applyFilterParameter(consumers, queueViewMBean.getConsumerCount, consumersCount)).par.map(queueViewMBean ⇒ {
val queueName = queueViewMBean.getName
if (dryRun) {
s"Queue to be ${action}: '${queueName}'"
} else {
callback(queueViewMBean, brokerViewMBean, dryRun, pending, enqueued, dequeued, consumers)
s"Queue ${action}: '${queueName}'"
}
})
if (rows.size > 0) {
val dryRunText = if (dryRun) "to be " else ""
(rows.seq.sorted :+ s"Total queues ${dryRunText}${action}: ${rows.size}").mkString("\\n")
} else {
warn(s"No queues found")
}
})
}
}
| antonwierenga/activemq-cli | src/main/scala/activemq/cli/command/QueueCommands.scala | Scala | apache-2.0 | 14,244 |
package sri.test.components
import sri.core.ElementFactory._
import sri.core.{React, ReactComponent}
import sri.universal.ReactUniversal
import scala.scalajs.js
import scala.scalajs.js.Dynamic.{literal => json}
import scala.scalajs.js.annotation.{JSName, ScalaJSDefined}
object HelloLifeCycle {
var willMount = false
var willUnMount = false
var didMount = false
var willUpdate = false
var didUpdate = false
var willReceiveProps = false
var rendered = false
var shouldUpdate = false
@ScalaJSDefined
class Component extends ReactComponent[String, String] {
initialState("")
override def componentWillMount(): Unit = {
willMount = true
}
override def componentDidMount(): Unit = {
println(s"component did mount")
didMount = true
}
@JSName("sComponentWillReceiveProps")
override def componentWillReceiveProps(nextProps: => String): Unit = {
willReceiveProps = true
}
@JSName("sComponentWillUpdate")
override def componentWillUpdate(nextProps: => String, nextState: => String): Unit = {
willUpdate = true
}
def render() = {
rendered = true
null
}
@JSName("sShouldComponentUpdate")
override def shouldComponentUpdate(nextProps: => String, nextState: => String): Boolean = {
shouldUpdate = true
true
}
@JSName("sComponentDidUpdate")
override def componentDidUpdate(prevProps: => String, prevState: => String): Unit = {
didUpdate = true
}
override def componentWillUnmount(): Unit = {
println(s"*********************** unmount")
}
def updateState() = {
setState("newState")
}
}
def apply(props : String = "hi",key: js.UndefOr[String] = js.undefined, ref: js.Function1[Component, Unit] = null) = makeElement[Component](props, key = "hkey", ref = ref)
}
| chandu0101/sri | test/src/main/scala/sri/test/components/HelloLifeCycle.scala | Scala | apache-2.0 | 1,860 |
package com.geeksville.dapi
import com.github.aselab.activerecord.ActiveRecord
import org.scalatra.swagger.Swagger
import com.geeksville.dapi.model.CRUDOperations
import org.json4s.Formats
import com.geeksville.json.ActiveRecordSerializer
import com.github.aselab.activerecord.dsl._
import com.geeksville.dapi.model.DapiRecordCompanion
import com.github.aselab.activerecord.ActiveRecordException
import org.json4s.JsonAST.JValue
import com.geeksville.dapi.model.DapiRecord
/**
* A controller that assumes the backing object comes from ActiveRecord (allows easy field finding)
*/
class ActiveRecordController[T <: DapiRecord: Manifest, JsonT <: Product: Manifest](aName: String, swagger: Swagger, protected val myCompanion: DapiRecordCompanion[T])
extends ApiController[T, JsonT](aName, swagger, myCompanion) {
/// Fields we never want to share with clients
/// FIXME - add annotations for this?
def blacklist = Set[String]()
private val findParamOp =
(apiOperation[JValue]("getParam") // FIXME - this is not correct - the type should depend on the type of the a param itself
summary "Get a parameter from an object"
parameters (
pathParam[String]("id").description(s"Id of $aName that needs to be fetched"),
pathParam[String]("param").description(s"The parameter to read from the object")))
get("/:id/:param", operation(findParamOp)) {
// use for comprehension to stack up all the possibly missing values
(for {
param <- params.get("param")
pval <- if (blacklist.contains(param)) None else findById.toMap.get(param)
} yield {
pval
}).getOrElse(haltNotFound("object or parameter not found"))
}
/// Subclasses can override if they want to make finding fields smarter
protected def applyFilterExpressions(r: myCompanion.Relation, whereExp: Seq[LogicalBoolean]) = {
val eqFilters = whereExp.map { l => (l.colName, l.cmpValue) }
if (!eqFilters.isEmpty)
r.findAllBy(eqFilters.head, eqFilters.tail: _*)
else
r
}
/**
* Use activerecord methods to find our records
*
* @param whereExp, tuples of the form (fieldname, opcode, value)
*/
final override protected def getWithQuery(pageOffset: Option[Int] = None,
pagesizeOpt: Option[Int] = None,
orderBy: Option[String] = None,
orderDir: Option[String] = None,
whereExp: Iterable[LogicalBoolean] = Iterable.empty) = {
try {
var r = getFiltered
// FIXME - use the where expression more correctly
if (!whereExp.isEmpty) {
debug("Applying filter expressions: " + whereExp.mkString(", "))
r = applyFilterExpressions(r, whereExp.toSeq)
}
// Apply ordering
for {
orderOn <- orderBy
} yield {
val dir = orderDir.getOrElse("asc")
r = r.orderBy(orderOn, dir)
}
// Apply paging restriction - to prevent casual scraping
val maxPageSize = 100
val pagesize = pagesizeOpt.getOrElse(maxPageSize)
if (pagesize > maxPageSize)
haltBadRequest("page_size is too large")
val offset = pageOffset.getOrElse(0)
r = r.page(offset, pagesize)
r.toIterable
} catch {
case ex: ActiveRecordException =>
haltBadRequest(ex.getMessage)
}
}
/**
* Subclasses should override to 'chain' any parameter based filters.
* This baseclass provides support for paging and ordering
*/
protected def getFiltered() = {
myCompanion.collection
}
/// Subclasses can provide suitable behavior if they want to allow DELs to /:id to result in deleting objects
override protected def doDelete(o: T): Any = {
val desc = o.toString
o.delete()
s"Deleted $desc"
}
} | dronekit/dronekit-server | src/main/scala/com/geeksville/dapi/ActiveRecordController.scala | Scala | gpl-3.0 | 3,708 |
def min[T](a: Seq[T])(implicit comparator: Comparator[T]): T =
a.reduceLeft{(x,y) => if (comparator.compare(x,y)<0) x else y}
| lkuczera/scalatypeclasses | steps/Comparatormin.scala | Scala | mit | 130 |
package loaders
import org.apache.spark.rdd.RDD
import scala.reflect.ClassTag
/**
* A case class containing an RDD of labeled data
* @tparam Label The type of the labels
* @tparam Datum The type of the data
*/
case class LabeledData[Label : ClassTag, Datum : ClassTag](labeledData: RDD[(Label, Datum)]) {
val data: RDD[Datum] = labeledData.map(_._2)
val labels: RDD[Label] = labeledData.map(_._1)
}
| dongjoon-hyun/keystone | src/main/scala/loaders/LabeledData.scala | Scala | apache-2.0 | 412 |
package com.mesosphere.universe
/**
* Conforms to: https://github.com/mesosphere/universe/blob/version-2.x/repo/meta/schema/package-schema.json
*/
case class PackageDetails(
packagingVersion: PackagingVersion,
name: String,
version: PackageDetailsVersion,
maintainer: String,
description: String,
tags: List[String] = Nil, //TODO: pattern: "^[^\\s]+$"
selected: Option[Boolean] = None,
scm: Option[String] = None,
website: Option[String] = None,
framework: Option[Boolean] = None,
preInstallNotes: Option[String] = None,
postInstallNotes: Option[String] = None,
postUninstallNotes: Option[String] = None,
licenses: Option[List[License]] = None
)
| movicha/cosmos | cosmos-model/src/main/scala/com/mesosphere/universe/PackageDetails.scala | Scala | apache-2.0 | 687 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
/**
* Superclass for the possible outcomes of running a test.
*
* <p>
* <code>Outcome</code> is the result type of the <code>withFixture</code> methods of traits
* <a href="Suite.html#withFixture"><code>Suite</code></a> and <a href="fixture/Suite.html#withFixture"><code>fixture.Suite</code></a>, as well as their
* <a href="Suite$NoArgTest.html"><code>NoArgTest</code></a> and <a href="fixture/Suite$OneArgTest.html"><code>OneArgTest</code></a> function types.
* The four possible outcomes are:
* </p>
*
* <ul>
* <li><a href="Succeeded$.html"><code>Succeeded</code></a> - indicates a test succeeded</li>
* <li><a href="Failed.html"><code>Failed</code></a> - indicates a test failed and contains an exception describing the failure</li>
* <li><a href="Canceled.html"><code>Canceled</code></a> - indicates a test was canceled and contains an exception describing the cancelation</li>
* <li><a href="Pending$.html"><code>Pending</code></a> - indicates a test was pending</li>
* </ul>
*
* <p>
* Note that "ignored" does not appear as a type of <code>Outcome</code>, because tests are
* marked as ignored on the outside and skipped over as the suite executes. So an ignored test never runs, and therefore
* never has an outcome. By contrast, a test is determined to be pending by running the test
* and observing the actual outcome. If the test body completes abruptly with a <code>TestPendingException</code>,
* then the outcome was that the test was pending.
* </p>
*/
sealed abstract class Outcome {
/**
* Indicates whether this <code>Outcome</code> represents a test that succeeded.
*
* <p>
* This class's implementation of this method always returns <code>false</code>.
* </p>
*
* @return true if this <code>Outcome</code> is an instance of <code>Succeeded</code>.
*/
val isSucceeded: Boolean = false
/**
* Indicates whether this <code>Outcome</code> represents a test that failed.
*
* <p>
* This class's implementation of this method always returns <code>false</code>.
* </p>
*
* @return true if this <code>Outcome</code> is an instance of <code>Failed</code>.
*/
val isFailed: Boolean = false
/**
* Indicates whether this <code>Outcome</code> represents a test that was canceled.
*
* <p>
* This class's implementation of this method always returns <code>false</code>.
* </p>
*
* @return true if this <code>Outcome</code> is an instance of <code>Canceled</code>.
*/
val isCanceled: Boolean = false
/**
* Indicates whether this <code>Outcome</code> represents a test that was pending.
*
* <p>
* This class's implementation of this method always returns <code>false</code>.
* </p>
*
* @return true if this <code>Outcome</code> is an instance of <code>Pending</code>.
*/
val isPending: Boolean = false
/**
* Indicates whether this <code>Outcome</code> represents a test that either failed or was canceled, in which case this <code>Outcome</code> will contain an exception.
*
* @return true if this <code>Outcome</code> is an instance of either <code>Failed</code> or <code>Canceled</code>.
*/
val isExceptional: Boolean = false
/**
* Converts this <code>Outcome</code> to an <code>Option[Throwable]</code>.
*
* <p>
* This class's implementation of this method always returns <code>None</code>.
* </p>
*
* @return a <code>Some</code> wrapping the contained exception if this <code>Outcome</code> is an instance of either <code>Failed</code> or <code>Canceled</code>.
*/
def toOption: Option[Throwable] = None
/**
* Converts this <code>Outcome</code> to a <code>Succeeded</code>.
*
* <p>
* When this <code>Outcome</code> instance is not Succeeded, it behaves as followed:
* </p>
*
* <ul>
* <li>Failed(ex) - throws ex</li>
* <li>Canceled(tce) - throws tce</li>
* <li>Pending - throws TestPendingException</li>
* </ul>
*
* @return Succeeded if this <code>Outcome</code> instance is a Succeeded.
*/
def toSucceeded: Succeeded.type
// Used internally to resuse the old code that was catching these exceptions when running tests. Eventually I would
// like to rewrite that old code to use the result type, but it will still needs to catch and handle these exceptions
// in the same way in case they come back from a user's withFixture implementation.
private[scalatest] def toUnit {
this match {
case Succeeded =>
case Exceptional(e) => throw e
case Pending => throw new exceptions.TestPendingException
}
}
}
/**
* Companion object for trait <code>Outcome</code> that contains an implicit method that enables
* collections of <code>Outcome</code>s to be flattened into a collections of contained exceptions.
*/
object Outcome {
import scala.language.implicitConversions
/**
* Enables collections of <code>Outcome</code>s to be flattened into a collections of contained exceptions.
*
*
* <p>
* Here's an example:
* </p>
*
* <pre class="stREPL">
* scala> import org.scalatest._
* import org.scalatest._
*
* scala> import prop.TableDrivenPropertyChecks._
* import prop.TableDrivenPropertyChecks._
*
* scala> val squares = // (includes errors)
* | Table(
* | ("x", "square"),
* | ( 0 , 0 ),
* | ( 1 , 1 ),
* | ( 2 , 4 ),
* | ( 3 , 8 ),
* | ( 4 , 16 ),
* | ( 5 , 26 ),
* | ( 6 , 36 )
* | )
* squares: org.scalatest.prop.TableFor2[Int,Int] =
* TableFor2((x,square), (0,0), (1,1), (2,4), (3,8), (4,16), (5,26), (6,36))
* </pre>
*
* <p>
* Given the above table, which includes some errors, you can obtain an <code>IndexedSeq</code> of the <code>Outcome</code>s
* of executing an assertion on each row of the table with <code>outcomeOf</code>, like this:
* </p>
*
* <pre class="stREPL">
* scala> import OutcomeOf._
* import OutcomeOf._
*
* scala> import Matchers._
* import Matchers._
*
* scala> val outcomes = for ((x, square) <- squares) yield outcomeOf { square shouldEqual x * x }
* outcomes: IndexedSeq[org.scalatest.Outcome] =
* Vector(Succeeded, Succeeded, Succeeded,
* Failed(org.scalatest.exceptions.TestFailedException: 8 did not equal 9), Succeeded,
* Failed(org.scalatest.exceptions.TestFailedException: 26 did not equal 25), Succeeded)
* </pre>
*
* <p>
* Now you have a collection of all the outcomes, including successful ones. If you just want the <code>Failed</code> and <code>Canceled</code> outcomes, which
* contain exceptions, you can filter out anything that isn't "exceptional," like this:
* </p>
*
* <pre class="stREPL">
* scala> outcomes.filter(_.isExceptional)
* res1: IndexedSeq[org.scalatest.Outcome] =
* Vector(Failed(org.scalatest.exceptions.TestFailedException: 8 did not equal 9),
* Failed(org.scalatest.exceptions.TestFailedException: 26 did not equal 25))
* </pre>
*
* <p>
* But if you just wanted the contained exceptions, you can (thanks to this implicit method) invoke <code>flatten</code> on your collection:
* </p>
*
* <pre class="stREPL">
* scala> outcomes.flatten
* res2: IndexedSeq[Throwable] =
* Vector(org.scalatest.exceptions.TestFailedException: 8 did not equal 9,
* org.scalatest.exceptions.TestFailedException: 26 did not equal 25)
* </pre>
*/
implicit def convertOutcomeToIterator(outcome: Outcome): Iterator[Throwable] =
outcome match {
case Exceptional(ex) => // Return an iterator with one Throwable in it
new Iterator[Throwable] {
private var spent: Boolean = false
def hasNext: Boolean = !spent
def next: Throwable =
if (!spent) {
spent = true
ex
} else throw new NoSuchElementException
}
case _ => // Return an empty iterator
new Iterator[Throwable] {
def hasNext: Boolean = false
def next: Throwable = throw new NoSuchElementException
}
}
}
/**
* Superclass for the two outcomes of running a test that contain an exception: <code>Failed</code> and <code>Canceled</code>.
*
* <p>
* This class provides a <code>toOption</code> method that returns a <code>Some</code> wrapping the contained exception, and
* an <code>isExceptional</code> field with the value <code>true</code>. It's companion object provides an extractor that
* enables patterns that match a test that either failed or canceled, as in:
* </p>
*
* <pre>
* outcome match {
* case Exceptional(ex) => // handle failed or canceled case
* case _ => // handle succeeded, pending, or omitted case
* }
* </pre>
*
* @param ex the <code>Throwable</code> contained in this <code>Exceptional</code>.
*/
sealed abstract class Exceptional(ex: Throwable) extends Outcome {
/**
* Indicates that this <code>Outcome</code> represents a test that either failed or was canceled.
*
* @return true
*/
override val isExceptional: Boolean = true
/**
* Converts this <code>Exceptional</code> to a <code>Some</code> that wraps the contained exception.
*
* @return A <code>Some</code> wrapping the exception contained in this <code>Exceptional</code>.
*/
override def toOption: Option[Throwable] = Some(ex)
}
/**
* Companion object to class <code>Exceptional</code> that provides a factory method and an extractor that enables
* patterns that match both <code>Failed</code> and <code>Canceled</code> outcomes and
* extracts the contained exception and a factory method.
*/
object Exceptional {
/**
* Creates an <code>Exceptional</code> instance given the passed <code>Throwable</code>.
*
* <p>
* If the passed <code>Throwable</code> is an instance of <code>TestCanceledException</code>, this
* method will return <code>Canceled</code> containing that <code>TestCanceledException</code>. Otherwise,
* it returns a <code>Failed</code> containing the <code>Throwable</code>.
* </p>
*
* <p>
* For example, trait <a href="SeveredStackTraces.html"><code>SeveredStackTraces</code></a> uses this
* factory method to sever the stack trace of the exception contained in either a <code>Failed</code> and <code>Canceled</code>
* like this:
* </p>
*
* <pre>
* abstract override def withFixture(test: NoArgTest): Outcome = {
* super.withFixture(test) match {
* case Exceptional(e: StackDepth) => Exceptional(e.severedAtStackDepth)
* case o => o
* }
* }
* </pre>
*
* @return a <code>Failed</code> or <code>Canceled</code> containing the passed exception.
*/
def apply(e: Throwable): Exceptional =
e match {
case tce: exceptions.TestCanceledException => Canceled(tce)
case _ => Failed(e)
}
/**
* Extractor enabling patterns that match both <code>Failed</code> and </code>Canceled</code> outcomes,
* extracting the contained exception.
*
* <p>
* For example, trait <a href="SeveredStackTraces.html"><code>SeveredStackTraces</code></a> uses this
* extractor to sever the stack trace of the exception contained in either a <code>Failed</code> and <code>Canceled</code>
* like this:
* </p>
*
* <pre>
* abstract override def withFixture(test: NoArgTest): Outcome = {
* super.withFixture(test) match {
* case Exceptional(e: StackDepth) => Exceptional(e.severedAtStackDepth)
* case o => o
* }
* }
* </pre>
*
* @param res the <code>Outcome</code> to extract the throwable from.
* @return a <code>Some</code> wrapping the contained throwable if <code>res</code> is an instance of
* either <code>Failed</code> or <code>Canceled</code>, else <code>None</code>.
*/
def unapply(res: Outcome): Option[Throwable] =
res match {
case Failed(ex) => Some(ex)
case Canceled(ex) => Some(ex)
case _ => None
}
}
/**
* Outcome for a test that succeeded.
*
* <p>
* Note: the difference between this <code>Succeeded</code> object and the similarly named <a href="SucceededStatus$.html"><code>SucceededStatus</code></a>
* object is that this object indicates one test succeeded, whereas the <code>SucceededStatus</code> object indicates the absence of any failed tests or
* aborted suites during a run. Both are used as the result type of <a href="Suite.html#lifecycle-methods"><code>Suite</code></a> lifecycle methods, but <code>Succeeded</code>
* is a possible result of <code>withFixture</code>, whereas <code>SucceededStatus</code> is a possible result of <code>run</code>, <code>runNestedSuites</code>,
* <code>runTests</code>, or <code>runTest</code>. In short, <code>Succeeded</code> is always just about one test, whereas <code>SucceededStatus</code> could be
* about something larger: multiple tests or an entire suite.
* </p>
*/
case object Succeeded extends Outcome {
/**
* Indicates that this <code>Outcome</code> represents a test that succeeded.
*
* <p>
* This class's implementation of this method always returns <code>true</code>.
* </p>
*
* @return true
*/
override val isSucceeded: Boolean = true
/**
* Converts this <code>Outcome</code> to a <code>Succeeded</code>.
*
* @return This Succeeded instance.
*/
def toSucceeded: Succeeded.type = this
}
/**
* Outcome for a test that failed, containing an exception describing the cause of the failure.
*
* <p>
* Note: the difference between this <code>Failed</code> class and the similarly named <a href="FailedStatus$.html"><code>FailedStatus</code></a>
* object is that an instance of this class indicates one test failed, whereas the <code>FailedStatus</code> object indicates either one or more tests failed
* and/or one or more suites aborted during a run. Both are used as the result type of <code>Suite</code> lifecycle methods, but <code>Failed</code>
* is a possible result of <code>withFixture</code>, whereas <code>FailedStatus</code> is a possible result of <code>run</code>, <code>runNestedSuites</code>,
* <code>runTests</code>, or <code>runTest</code>. In short, <code>Failed</code> is always just about one test, whereas <code>FailedStatus</code> could be
* about something larger: multiple tests or an entire suite.
* </p>
*
* @param ex the <code>Throwable</code> contained in this <code>Failed</code>.
*/
case class Failed(exception: Throwable) extends Exceptional(exception) {
require(!exception.isInstanceOf[exceptions.TestCanceledException], "a TestCanceledException was passed to Failed's constructor")
require(!exception.isInstanceOf[exceptions.TestPendingException], "a TestPendingException was passed to Failed's constructor")
/**
* Indicates that this <code>Outcome</code> represents a test that failed.
*
* <p>
* This class's implementation of this method always returns <code>true</code>.
* </p>
*
* @return true
*/
override val isFailed: Boolean = true
/**
* Converts this <code>Outcome</code> to a <code>Succeeded</code>.
*
* <p>
* The implmentation of this class will re-throw the passed in exception.
* </p>
*/
def toSucceeded: Succeeded.type = throw exception
}
object Failed {
def apply(): Failed = new Failed(new TestFailedException(1))
def apply(message: String): Failed = new Failed(new TestFailedException(message, 1))
// I always wrap this in a TFE because I need to do that to get the message in there.
def apply(message: String, cause: Throwable): Failed = {
require(!cause.isInstanceOf[exceptions.TestCanceledException], "a TestCanceledException was passed to a factory method in object Failed")
require(!cause.isInstanceOf[exceptions.TestPendingException], "a TestPendingException was passed to a factory method in object Failed")
new Failed(new TestFailedException(message, cause, 1))
}
def here(cause: Throwable): Failed = {
require(!cause.isInstanceOf[exceptions.TestCanceledException], "a TestCanceledException was passed to the \\"here\\" factory method in object Failed")
require(!cause.isInstanceOf[exceptions.TestPendingException], "a TestPendingException was passed to the \\"here\\" factory method in object Failed")
new Failed(
if (cause.getMessage != null)
new exceptions.TestFailedException(cause.getMessage, cause, 1)
else
new exceptions.TestFailedException(cause, 1)
)
}
}
/**
* Outcome for a test that was canceled, containing an exception describing the cause of the cancelation.
*
* @param ex the <code>TestCanceledException</code> contained in this <code>Exceptional</code>.
*/
case class Canceled(exception: exceptions.TestCanceledException) extends Exceptional(exception) {
/**
* Indicates that this <code>Outcome</code> represents a test that was canceled.
*
* <p>
* This class's implementation of this method always returns <code>true</code>.
* </p>
*
* @return true
*/
override val isCanceled: Boolean = true
/**
* Converts this <code>Outcome</code> to a <code>Succeeded</code>.
*
* <p>
* The implmentation of this class will re-throw the passed in exception.
* </p>
*/
def toSucceeded: Succeeded.type = throw exception
}
/**
* Companion object to class <code>Canceled</code> that provides, in addition to the extractor and factory method
* provided by the compiler given its companion is a case class, a second factory method
* that produces a <code>Canceled</code> outcome given a string message.
*/
object Canceled {
def apply(): Canceled = new Canceled(new exceptions.TestCanceledException(1))
def apply(message: String, cause: Throwable): Canceled = // TODO write tests for NPEs
new Canceled(new exceptions.TestCanceledException(message, cause, 1))
def apply(ex: Throwable): Canceled = { // TODO write tests for NPEs
ex match {
case tce: exceptions.TestCanceledException =>
new Canceled(tce)
case _ =>
val msg = ex.getMessage
if (msg == null)
new Canceled(new exceptions.TestCanceledException(ex, 1))
else
new Canceled(new exceptions.TestCanceledException(msg, ex, 1))
}
}
/**
* Creates a <code>Canceled</code> outcome given a string message.
*
* <p>
* For example, trait <code>CancelAfterFailure</code> uses this factory method to create
* a <code>Canceled</code> status if a <code>cancelRemaining</code> flag is set, which will
* be the case if a test failed previously while running the suite:
* </p>
*
* <pre>
* abstract override def withFixture(test: NoArgTest): Outcome = {
* if (cancelRemaining)
* Canceled("Canceled by CancelOnFailure because a test failed previously")
* else
* super.withFixture(test) match {
* case failed: Failed =>
* cancelRemaining = true
* failed
* case outcome => outcome
* }
* }
* </pre>
*/
def apply(message: String): Canceled = {
if (message == null)
throw new NullPointerException("message was null")
val e = new exceptions.TestCanceledException(message, 1)
e.fillInStackTrace()
Canceled(e)
}
def here(cause: Throwable): Canceled = {
new Canceled(
if (cause.getMessage != null)
new exceptions.TestCanceledException(cause.getMessage, cause, 1)
else
new exceptions.TestCanceledException(cause, 1)
)
}
}
/**
* Outcome for a test that was pending, which contains an optional string giving more information on what exactly is needed
* for the test to become non-pending.
*
* @param message an optional message describing the reason the test is pending
*/
case object Pending extends Outcome {
/**
* Indicates that this <code>Outcome</code> represents a test that was pending.
*
* <p>
* This class's implementation of this method always returns <code>true</code>.
* </p>
*
* @return true
*/
override val isPending: Boolean = true
/**
* Converts this <code>Outcome</code> to a <code>Succeeded</code>.
*
* <p>
* The implmentation of this class will throw <code>TestPendingException</code> with the passed in message.
* </p>
*/
def toSucceeded: Succeeded.type = throw new exceptions.TestPendingException
}
| travisbrown/scalatest | src/main/scala/org/scalatest/Outcome.scala | Scala | apache-2.0 | 21,075 |
/**
* Copyright (C) 2010-2012 LShift Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lshift.diffa.kernel.config.migrations
import org.hibernate.cfg.Configuration
import java.sql.Types
import scala.collection.JavaConversions._
import net.lshift.hibernate.migrations.MigrationBuilder
import net.lshift.diffa.kernel.config.{ConfigOption, DefinePartitionInformationTable, Domain, HibernateMigrationStep}
/**
* This Step 'migrates' a schema/database to version 22 -
* that is, it creates the base schema from scratch.
*/
object Step0022 extends HibernateMigrationStep {
def versionId = 22
def name = "Create schema"
def createMigration(config: Configuration) = {
val migration = new MigrationBuilder(config)
migration.createTable("category_descriptor").
column("category_id", Types.INTEGER, false).
column("constraint_type", Types.VARCHAR, 20, false).
column("prefix_length", Types.INTEGER, true).
column("max_length", Types.INTEGER, true).
column("step", Types.INTEGER, true).
pk("category_id").
withNativeIdentityGenerator()
migration.createTable("config_options").
column("domain", Types.VARCHAR, 50, false, Domain.DEFAULT_DOMAIN.name).
column("opt_key", Types.VARCHAR, 255, false).
column("opt_val", Types.VARCHAR, 255, true).
pk("opt_key", "domain")
val diffsTable = migration.createTable("diffs").
column("seq_id", Types.INTEGER, false).
column("domain", Types.VARCHAR, 50, false).
column("pair", Types.VARCHAR, 50, false).
column("entity_id", Types.VARCHAR, 255, false).
column("is_match", Types.BIT, false).
column("detected_at", Types.TIMESTAMP, false).
column("last_seen", Types.TIMESTAMP, false).
column("upstream_vsn", Types.VARCHAR, 255, true).
column("downstream_vsn", Types.VARCHAR, 255, true).
column("ignored", Types.BIT, false).
pk("seq_id", "domain", "pair").
withNativeIdentityGenerator()
// N.B. include the partition info table on all DBs (support may be added in future)
DefinePartitionInformationTable.defineTable(migration)
if (migration.canUseListPartitioning) {
diffsTable.virtualColumn("partition_name", Types.VARCHAR, 512, "domain || '_' || pair").
listPartitioned("partition_name").
listPartition("part_dummy_default", "default")
DefinePartitionInformationTable.applyPartitionVersion(migration, "diffs", versionId)
migration.executeDatabaseScript("sync_pair_diff_partitions", "net.lshift.diffa.kernel.config.procedures")
}
migration.createTable("domains").
column("name", Types.VARCHAR, 50, false).
pk("name")
migration.createTable("endpoint").
column("domain", Types.VARCHAR, 50, false, Domain.DEFAULT_DOMAIN.name).
column("name", Types.VARCHAR, 50, false).
column("scan_url", Types.VARCHAR, 1024, true).
column("content_retrieval_url", Types.VARCHAR, 1024, true).
column("version_generation_url", Types.VARCHAR, 1024, true).
column("inbound_url", Types.VARCHAR, 1024, true).
pk("domain", "name")
migration.createTable("endpoint_categories").
column("domain", Types.VARCHAR, 50, false, Domain.DEFAULT_DOMAIN.name).
column("id", Types.VARCHAR, 50, false).
column("category_descriptor_id", Types.INTEGER, false).
column("name", Types.VARCHAR, 50, false).
pk("id", "name")
migration.createTable("endpoint_views").
column("domain", Types.VARCHAR, 50, false).
column("endpoint", Types.VARCHAR, 50, false).
column("name", Types.VARCHAR, 50, false).
pk("domain", "endpoint", "name")
migration.createTable("endpoint_views_categories").
column("domain", Types.VARCHAR, 50, false).
column("endpoint", Types.VARCHAR, 50, false).
column("name", Types.VARCHAR, 50, false).
column("category_name", Types.VARCHAR, 50, false).
column("category_descriptor_id", Types.INTEGER, false).
pk("domain", "endpoint", "category_name", "name")
migration.createTable("escalations").
column("domain", Types.VARCHAR, 50, false, Domain.DEFAULT_DOMAIN.name).
column("name", Types.VARCHAR, 50, false).
column("pair_key", Types.VARCHAR, 50, false).
column("action", Types.VARCHAR, 50, false).
column("action_type", Types.VARCHAR, 255, false).
column("event", Types.VARCHAR, 255, false).
column("origin", Types.VARCHAR, 255, true).
pk("pair_key", "name")
migration.createTable("members").
column("domain_name", Types.VARCHAR, 50, false).
column("user_name", Types.VARCHAR, 50, false).
pk("domain_name", "user_name")
migration.createTable("pair").
column("domain", Types.VARCHAR, 50, false, Domain.DEFAULT_DOMAIN.name).
column("pair_key", Types.VARCHAR, 50, false).
column("upstream", Types.VARCHAR, 50, false).
column("downstream", Types.VARCHAR, 50, false).
column("version_policy_name", Types.VARCHAR, 50, true).
column("matching_timeout", Types.INTEGER, true).
column("scan_cron_spec", Types.VARCHAR, 50, true).
column("allow_manual_scans", Types.BIT, 1, true, 0).
column("events_to_log", Types.INTEGER, 11, false, 0).
column("max_explain_files", Types.INTEGER, 11, false, 0).
pk("domain", "pair_key")
migration.createTable("pair_reports").
column("domain", Types.VARCHAR, 50, false).
column("pair_key", Types.VARCHAR, 50, false).
column("name", Types.VARCHAR, 50, false).
column("report_type", Types.VARCHAR, 50, false).
column("target", Types.VARCHAR, 1024, false).
pk("domain", "pair_key", "name")
migration.createTable("pair_views").
column("domain", Types.VARCHAR, 50, false).
column("pair", Types.VARCHAR, 50, false).
column("name", Types.VARCHAR, 50, false).
column("scan_cron_spec", Types.VARCHAR, 50, true).
pk("domain", "pair", "name")
migration.createTable("pending_diffs").
column("oid", Types.INTEGER, false).
column("domain", Types.VARCHAR, 50, false).
column("pair", Types.VARCHAR, 50, false).
column("entity_id", Types.VARCHAR, 50, false).
column("detected_at", Types.TIMESTAMP, false).
column("last_seen", Types.TIMESTAMP, false).
column("upstream_vsn", Types.VARCHAR, 255, true).
column("downstream_vsn", Types.VARCHAR, 255, true).
pk("oid").
withNativeIdentityGenerator()
migration.createTable("prefix_category_descriptor").
column("id", Types.INTEGER, false).
pk("id")
migration.createTable("range_category_descriptor").
column("id", Types.INTEGER, false).
column("data_type", Types.VARCHAR, 20, true).
column("upper_bound", Types.VARCHAR, 255, true).
column("lower_bound", Types.VARCHAR, 255, true).
column("max_granularity", Types.VARCHAR, 20, true).
pk("id")
migration.createTable("repair_actions").
column("domain", Types.VARCHAR, 50, false, Domain.DEFAULT_DOMAIN.name).
column("pair_key", Types.VARCHAR, 50, false).
column("name", Types.VARCHAR, 50, false).
column("url", Types.VARCHAR, 1024, true).
column("scope", Types.VARCHAR, 20, true).
pk("pair_key", "name")
migration.createTable("schema_version").
column("version", Types.INTEGER, false).
pk("version")
migration.createTable("set_category_descriptor").
column("id", Types.INTEGER, false).
pk("id")
migration.createTable("set_constraint_values").
column("value_id", Types.INTEGER, false).
column("value_name", Types.VARCHAR, 255, false).
pk("value_id", "value_name")
migration.createTable("store_checkpoints").
column("domain", Types.VARCHAR, 50, false).
column("pair", Types.VARCHAR, 50, false).
column("latest_version", Types.BIGINT, false).
pk("domain", "pair")
migration.createTable("system_config_options").
column("opt_key", Types.VARCHAR, 255, false).
column("opt_val", Types.VARCHAR, 255, false).
pk("opt_key")
migration.createTable("users").
column("name", Types.VARCHAR, 50, false).
column("email", Types.VARCHAR, 1024, true).
column("password_enc", Types.VARCHAR, 100, false, "LOCKED").
column("superuser", Types.BIT, 1, false, 0).
column("token", Types.VARCHAR, 50, true).
pk("name")
migration.alterTable("config_options").
addForeignKey("fk_cfop_dmns", "domain", "domains", "name")
migration.alterTable("diffs")
.addForeignKey("fk_diff_pair", Array("domain", "pair"), "pair", Array("domain", "pair_key"))
migration.alterTable("endpoint").
addForeignKey("fk_edpt_dmns", "domain", "domains", "name")
migration.alterTable("endpoint_categories").
addForeignKey("fk_epct_edpt", Array("domain", "id"), "endpoint", Array("domain", "name")).
addForeignKey("fk_epct_ctds", "category_descriptor_id", "category_descriptor", "category_id")
migration.alterTable("endpoint_views").
addForeignKey("fk_epvw_edpt", Array("domain", "endpoint"), "endpoint", Array("domain", "name"))
migration.alterTable("endpoint_views_categories").
addForeignKey("fk_epvc_ctds", Array("category_descriptor_id"), "category_descriptor", Array("category_id"))
migration.alterTable("escalations").
addForeignKey("fk_escl_pair", Array("domain", "pair_key"), "pair", Array("domain", "pair_key"))
migration.alterTable("pair").
addForeignKey("fk_pair_dmns", "domain", "domains", "name").
addForeignKey("fk_pair_upstream_edpt", Array("domain", "upstream"), "endpoint", Array("domain", "name")).
addForeignKey("fk_pair_downstream_edpt", Array("domain", "downstream"), "endpoint", Array("domain", "name"))
migration.alterTable("pair_reports").
addForeignKey("fk_prep_pair", Array("domain", "pair_key"), "pair", Array("domain", "pair_key"))
migration.alterTable("pair_views").
addForeignKey("fk_prvw_pair", Array("domain", "pair"), "pair", Array("domain", "pair_key"))
migration.alterTable("members").
addForeignKey("fk_mmbs_dmns", "domain_name", "domains", "name").
addForeignKey("fk_mmbs_user", "user_name", "users", "name")
migration.alterTable("pending_diffs")
.addForeignKey("fk_pddf_pair", Array("domain", "pair"), "pair", Array("domain", "pair_key"))
migration.alterTable("prefix_category_descriptor").
addForeignKey("fk_pfcd_ctds", "id", "category_descriptor", "category_id")
migration.alterTable("range_category_descriptor").
addForeignKey("fk_rctd_ctds", "id", "category_descriptor", "category_id")
migration.alterTable("repair_actions").
addForeignKey("fk_rpac_pair", Array("domain", "pair_key"), "pair", Array("domain", "pair_key"))
migration.alterTable("set_category_descriptor").
addForeignKey("fk_sctd_ctds", "id", "category_descriptor", "category_id")
migration.alterTable("set_constraint_values").
addForeignKey("fk_sctv_ctds", "value_id", "category_descriptor", "category_id")
migration.alterTable("store_checkpoints").
addForeignKey("fk_stcp_pair", Array("domain", "pair"), "pair", Array("domain", "pair_key"))
migration.alterTable("users").
addUniqueConstraint("token")
migration.createIndex("diff_last_seen", "diffs", "last_seen")
migration.createIndex("diff_detection", "diffs", "detected_at")
migration.createIndex("rdiff_is_matched", "diffs", "is_match")
migration.createIndex("rdiff_domain_idx", "diffs", "entity_id", "domain", "pair")
migration.createIndex("pdiff_domain_idx", "pending_diffs", "entity_id", "domain", "pair")
migration.insert("domains").values(Map("name" -> Domain.DEFAULT_DOMAIN.name))
migration.insert("config_options").
values(Map("domain" -> Domain.DEFAULT_DOMAIN.name, "opt_key" -> "configStore.schemaVersion", "opt_val" -> "0"))
migration.insert("system_config_options").values(Map(
"opt_key" -> ConfigOption.eventExplanationLimitKey,
"opt_val" -> "100"))
migration.insert("system_config_options").values(Map(
"opt_key" -> ConfigOption.explainFilesLimitKey,
"opt_val" -> "20"))
migration.insert("users").
values(Map(
"name" -> "guest", "email" -> "guest@diffa.io",
"password_enc" -> "84983c60f7daadc1cb8698621f802c0d9f9a3c3c295c810748fb048115c186ec",
"superuser" -> Boolean.box(true)))
migration.insert("schema_version").
values(Map("version" -> new java.lang.Integer(versionId)))
if (migration.canAnalyze) {
migration.analyzeTable("diffs");
}
migration
}
}
| aprescott/diffa | kernel/src/main/scala/net/lshift/diffa/kernel/config/migrations/Step0022.scala | Scala | apache-2.0 | 13,187 |
package injector
import java.io.{File, PrintWriter}
import scala.annotation.tailrec
import scala.reflect.macros.blackbox
class InjectorMacro[C <: blackbox.Context](val c: C) {
import c.universe._
def resolve[T: c.WeakTypeTag]: c.Tree = {
val self = c.prefix
val tpe = weakTypeOf[T]
val name = tpe.typeSymbol.fullName
q"""
val fun = $self.accessors($name)
fun().asInstanceOf[$tpe]
"""
}
def configure(name: c.Tree)(xs: Seq[c.Tree]): c.Tree = {
val configName = name match { case q"${s: String}" ⇒ s }
configureBase(configName, xs)
}
def configureDefault(xs: Seq[c.Tree]): c.Tree = {
configureBase("default", xs)
}
def configureBase(configName: String, xs: Seq[c.Tree]): c.Tree = {
// Wrap types with CT cause `c.Type`
// hasn't equals and hashcode functions
// that are needs to make diff.
case class CT(tpe: c.Type) {
def name = tpe.typeSymbol.name.toString
def fullName = tpe.typeSymbol.fullName
override def hashCode(): Int = fullName.hashCode
override def equals(obj: scala.Any): Boolean = obj match {
case ct: CT ⇒ ct.fullName == fullName
case _ ⇒ super.equals(obj)
}
}
sealed trait Node {
def n: Int
def ct: CT
}
sealed trait NodeWithDependencies extends Node {
def dependencies: List[CT]
}
case class Singleton(
n: Int,
ct: CT,
dependencies: List[CT],
impl: Option[CT],
lzy: Boolean) extends NodeWithDependencies
case class Prototype(
n: Int,
ct: CT,
dependencies: List[CT],
impl: Option[CT]) extends NodeWithDependencies
case class Instance(
n: Int,
ct: CT,
value: c.Tree
) extends Node
def checkIsNotAbstract(t: c.Type, pos: c.Position): Unit = {
if (t.typeSymbol.isAbstract) {
c.abort(pos, s"`$t` shouldn't be abstract")
}
}
def checkIsSuper(trt: c.Type, cls: c.Type, pos: c.Position): Unit = {
if (!cls.baseClasses.contains(trt.typeSymbol)) {
c.abort(pos, s"`$cls` should inherit `$trt`")
}
}
// Take type of all arguments of constructor of `t`
def extractDependencies(t: c.Type): List[c.Type] = {
t.decls.toList flatMap {
case m: MethodSymbol if m.isConstructor ⇒
m.paramLists.flatten.map(_.typeSignature)
case _ ⇒ Nil
}
}
def topSort[T](gs: List[(T, List[T])]): List[T] = gs match {
case (value, Nil) :: tail ⇒
val diff = tail.diff(List(value)) map {
case (fst, snd) ⇒ (fst, snd.diff(List(value)))
}
value :: topSort(diff)
case node :: tail ⇒ topSort(tail :+ node)
case _ => Nil
}
val configuration: List[Node] = {
def matchConstructor(tree: c.Tree) = tree match {
case q"injector.`package`.lazySingleton[${tpe: c.Type}]" ⇒
(tpe, Singleton(_: Int, CT(tpe), _: List[CT], _: Option[CT], lzy = true))
case q"injector.`package`.singleton[${tpe: c.Type}]" ⇒
(tpe, Singleton(_: Int, CT(tpe), _: List[CT], _: Option[CT], lzy = false))
case q"injector.`package`.prototype[${tpe: c.Type}]" ⇒
(tpe, Prototype(_: Int, CT(tpe), _: List[CT], _: Option[CT]))
}
xs.toList.zipWithIndex map {
case (q"injector.`package`.instance[${tpe: c.Type}](${expr: c.Tree})", n) ⇒
Instance(n, CT(tpe), expr)
case (tree @ q"${expr: c.Tree}.use[${impl: c.Type}]", n) ⇒
val (tpe, f) = matchConstructor(expr)
checkIsNotAbstract(impl, tree.pos)
checkIsSuper(tpe, impl, tree.pos)
f(n, extractDependencies(impl).map(CT), Some(CT(impl)))
case (tree, n) ⇒
val (tpe, f) = matchConstructor(tree)
checkIsNotAbstract(tpe, tree.pos)
f(n, extractDependencies(tpe).map(CT), None)
}
}
val configMap = configuration.map(node ⇒ (node.ct, node)).toMap
def nodeName(node: Node): TermName = TermName(s"dep${node.n.toString}")
def ctName(ct: CT): TermName = nodeName(configMap(ct))
val definitions = {
val withDeps = configuration map {
case x: NodeWithDependencies ⇒ (x.ct, x.dependencies)
case x ⇒ (x.ct, Nil)
}
topSort(withDeps).map(configMap) map {
case node: NodeWithDependencies ⇒
def depsCode(tpe: c.Type) = {
val arguments = node.dependencies.map(d ⇒ q"${ctName(d)}")
val sizeOfConstructors = tpe.decls.toList flatMap {
case m: MethodSymbol if m.isConstructor ⇒
m.paramLists.map(_.length)
case _ ⇒ Nil
}
@tailrec def rec(acc: List[List[c.Tree]], sizes: List[Int], args: List[c.Tree]): List[List[c.Tree]] = {
sizes match {
case Nil ⇒ acc
case n :: ns ⇒
val (argsPart, tl) = args.splitAt(n)
rec(argsPart :: acc, ns, tl)
}
}
rec(Nil, sizeOfConstructors, arguments).reverse
}
node match {
case node @ Singleton(_, value, deps, impl, false) ⇒
val constructor = impl.getOrElse(value).tpe
q"val ${nodeName(node)} = new $constructor(...${depsCode(constructor)})"
case node @ Singleton(_, value, deps, impl, true) ⇒
val constructor = impl.getOrElse(value).tpe
q"lazy val ${nodeName(node)} = new $constructor(...${depsCode(constructor)})"
case node @ Prototype(_, value, deps, impl) ⇒
val constructor = impl.getOrElse(value).tpe
q"def ${nodeName(node)} = new $constructor(...${depsCode(constructor)})"
}
case node: Instance ⇒
q"val ${nodeName(node)} = ${node.value}"
}
}
def generateDotFile() = {
val content = {
def typeToShape(x: Node): String = x match {
case _: Singleton ⇒ "box"
case _: Prototype ⇒ "ellipse"
case _: Instance ⇒ "component"
}
val defs = configuration.map(x ⇒ s"${x.ct.name} [shape=${typeToShape(x)}];")
def arrows = configuration.
collect { case x: NodeWithDependencies ⇒ x}.
flatMap(node ⇒ node.dependencies.map(dep ⇒ s"${dep.name} -> ${node.ct.name};"))
s"""digraph $configName {
| ${defs.mkString("\\n ")}
| ${arrows.mkString("\\n ")}
|}
|""".stripMargin
}
// Write is dot folder is exists
val dotFolder = new File("dot")
if (dotFolder.exists()) {
val source = new File(dotFolder, configName + ".dot")
new PrintWriter(source) {
write(content)
close()
}
}
}
generateDotFile()
q"""
new Injector {
..$definitions
val accessors = Map[String, () ⇒ Any](..${
configuration map { x ⇒
val dep = nodeName(x)
q"(${x.ct.fullName}, () => $dep)"
}
})
}
"""
}
}
object InjectorMacro {
def resolve[T: c.WeakTypeTag](c: blackbox.Context): c.Tree = {
val helper = new InjectorMacro[c.type](c)
helper.resolve[T]
}
def configure(c: blackbox.Context)(name: c.Tree)(xs: c.Tree*): c.Tree = {
val helper = new InjectorMacro[c.type](c)
helper.configure(name)(xs)
}
def configureDefault(c: blackbox.Context)(xs: c.Tree*): c.Tree = {
val helper = new InjectorMacro[c.type](c)
helper.configureDefault(xs)
}
}
| fomkin/injector | src/main/scala/injector/InjectorMacro.scala | Scala | apache-2.0 | 7,582 |
package functional
import models._
import play.api.i18n.MessagesApi
import play.api.i18n.{Lang, Messages, MessagesImpl, MessagesProvider}
import java.util.concurrent.TimeUnit
import play.api.test._
import play.api.test.Helpers._
import java.sql.Connection
import helpers.Helper._
import org.specs2.mutable.Specification
import play.api.test.{Helpers, TestServer}
import play.api.i18n.{Lang, Messages}
import play.api.test.TestServer
import java.time.Instant
import play.api.{Application => PlayApp}
import play.api.inject.guice.GuiceApplicationBuilder
import helpers.InjectorSupport
import play.api.db.Database
class CategoryMaintenanceSpec extends Specification with InjectorSupport {
"Category maintenance" should {
"List nothing when there are no categories." in new WithBrowser(
WebDriverFactory(CHROME), appl()
) {
inject[Database].withConnection { implicit conn =>
val currencyInfo = inject[CurrencyRegistry]
val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = loginWithTestUser(browser)
browser.goTo(
controllers.routes.CategoryMaintenance.editCategory(None).url + "?lang=" + lang.code
)
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle === Messages("commonTitle", Messages("editCategoryTitle"))
browser.find("#langSpec option").size === localeInfo.registry.size
browser.find(".categoryTableBody").size === 0
}
}
"Can query all categories in order." in new WithBrowser(
WebDriverFactory(CHROME), appl()
) {
inject[Database].withConnection { implicit conn =>
val currencyInfo = inject[CurrencyRegistry]
val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = loginWithTestUser(browser)
browser.goTo(
controllers.routes.CategoryMaintenance.startCreateNewCategory().url + "?lang=" + lang.code
)
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle === Messages("commonTitle", Messages("createNewCategoryTitle"))
browser.find("#langId option").size === localeInfo.registry.size
browser.find("#langId option[value='" + localeInfo.Ja.id + "']").click()
browser.find("#categoryName").fill().`with`("カテゴリ001")
browser.find("#createNewCategoryButton").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
11 to 2 by -1 foreach { i =>
browser.find("#langId option[value='" + localeInfo.Ja.id + "']").click()
browser.find("#categoryName").fill().`with`(f"カテゴリ$i%03d")
browser.find("#createNewCategoryButton").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
}
browser.goTo(
controllers.routes.CategoryMaintenance.editCategory(None).url + "?lang=" + lang.code
)
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("tr.categoryTableBody").size === 10
browser.find(".categoryTableName").index(0).text === "カテゴリ001"
browser.find(".categoryTableName").index(1).text === "カテゴリ011"
browser.find(".categoryTableName").index(2).text === "カテゴリ010"
browser.find(".categoryTableName").index(3).text === "カテゴリ009"
browser.find(".categoryTableName").index(4).text === "カテゴリ008"
browser.find(".categoryTableName").index(5).text === "カテゴリ007"
browser.find(".categoryTableName").index(6).text === "カテゴリ006"
browser.find(".categoryTableName").index(7).text === "カテゴリ005"
browser.find(".categoryTableName").index(8).text === "カテゴリ004"
browser.find(".categoryTableName").index(9).text === "カテゴリ003"
browser.find(".nextPageButton").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("tr.categoryTableBody").size === 1
browser.find(".categoryTableName").text === "カテゴリ002"
browser.find(".categoryTableHeaderId .orderColumn").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
// Reverse order by id
browser.find(".categoryTableName").index(0).text === "カテゴリ002"
browser.find(".categoryTableName").index(1).text === "カテゴリ003"
browser.find(".categoryTableName").index(2).text === "カテゴリ004"
browser.find(".categoryTableName").index(3).text === "カテゴリ005"
browser.find(".categoryTableName").index(4).text === "カテゴリ006"
browser.find(".categoryTableName").index(5).text === "カテゴリ007"
browser.find(".categoryTableName").index(6).text === "カテゴリ008"
browser.find(".categoryTableName").index(7).text === "カテゴリ009"
browser.find(".categoryTableName").index(8).text === "カテゴリ010"
browser.find(".categoryTableName").index(9).text === "カテゴリ011"
browser.find(".nextPageButton").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("tr.categoryTableBody").size === 1
browser.find(".categoryTableName").text === "カテゴリ001"
browser.find(".categoryTableHeaderName .orderColumn").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find(".categoryTableName").index(0).text === "カテゴリ001"
browser.find(".categoryTableName").index(1).text === "カテゴリ002"
browser.find(".categoryTableName").index(2).text === "カテゴリ003"
browser.find(".categoryTableName").index(3).text === "カテゴリ004"
browser.find(".categoryTableName").index(4).text === "カテゴリ005"
browser.find(".categoryTableName").index(5).text === "カテゴリ006"
browser.find(".categoryTableName").index(6).text === "カテゴリ007"
browser.find(".categoryTableName").index(7).text === "カテゴリ008"
browser.find(".categoryTableName").index(8).text === "カテゴリ009"
browser.find(".categoryTableName").index(9).text === "カテゴリ010"
browser.find(".nextPageButton").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("tr.categoryTableBody").size === 1
browser.find(".categoryTableName").text === "カテゴリ011"
}
}
"Can change category name." in new WithBrowser(
WebDriverFactory(CHROME), appl()
) {
inject[Database].withConnection { implicit conn =>
val currencyInfo = inject[CurrencyRegistry]
val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = loginWithTestUser(browser)
browser.goTo(
controllers.routes.CategoryMaintenance.startCreateNewCategory().url + "?lang=" + lang.code
)
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle === Messages("commonTitle", Messages("createNewCategoryTitle"))
browser.find("#langId option").size === localeInfo.registry.size
browser.find("#langId option[value='" + localeInfo.Ja.id + "']").click()
browser.find("#categoryName").fill().`with`("カテゴリ001")
browser.find("#createNewCategoryButton").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
val page: Page[(Category, CategoryName)] = inject[CategoryRepo].list(page = 0, pageSize = 10, locale = Ja)
page.total === 1
page.list.head._1.id.get.toString === page.list.head._1.categoryCode
browser.goTo(
controllers.routes.CategoryMaintenance.editCategory(None).url + "?lang=" + lang.code
)
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find(".categoryTableName").index(0).text === "カテゴリ001"
browser.find("#langSpec option[value='" + localeInfo.En.id + "']").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find(".categoryTableName").index(0).text === "-"
browser.find(".editCategoryNameLink").index(0).click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle === Messages("commonTitle", Messages("editCategoryNameTitle"))
browser.find(".langName").text === Messages("lang.ja")
browser.find("#categoryNames_0_name").attribute("value") === "カテゴリ001"
browser.find("#categoryNames_0_name").fill().`with`("カテゴリ999")
browser.find("#submitCategoryNameUpdate").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle === Messages("commonTitle", Messages("editCategoryTitle"))
browser.find(".editCategoryNameLink").index(0).click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle === Messages("commonTitle", Messages("editCategoryNameTitle"))
browser.find("#createCategoryNameForm #localeId option[value='" + localeInfo.En.id + "']").click()
browser.find("#createCategoryNameForm #name").fill().`with`("category999")
browser.find("#submitCategoryNameCreate").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle === Messages("commonTitle", Messages("editCategoryNameTitle"))
if (browser.find(".langName").index(0).text == Messages("lang.ja")) {
browser.find("#categoryNames_0_name").attribute("value") === "カテゴリ999"
browser.find("#categoryNames_1_name").attribute("value") === "category999"
browser.find(".updateCategoryName button").index(0).click()
}
else {
browser.find("#categoryNames_0_name").attribute("value") === "category999"
browser.find("#categoryNames_1_name").attribute("value") === "カテゴリ999"
browser.find(".updateCategoryName button").index(1).click()
}
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find(".langName").size === 1
browser.find(".langName").text == Messages("lang.en")
}
}
"Can change category code." in new WithBrowser(
WebDriverFactory(CHROME), appl()
) {
inject[Database].withConnection { implicit conn =>
val currencyInfo = inject[CurrencyRegistry]
val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = loginWithTestUser(browser)
val cat01 = inject[CategoryRepo].createNew(Map(Ja -> "Cat01"))
val cat02 = inject[CategoryRepo].createNew(Map(Ja -> "Cat02"))
browser.goTo(
controllers.routes.CategoryMaintenance.editCategory(None).url + "?lang=" + lang.code
)
browser.find(".editCategoryNameLink").index(0).text === cat01.id.get.toString
browser.find(".editCategoryNameLink").index(1).text === cat02.id.get.toString
browser.find(".editCategoryCodeLink").index(0).text === cat01.categoryCode
browser.find(".editCategoryCodeLink").index(1).text === cat02.categoryCode
browser.find(".editCategoryCodeLink").index(0).click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("#categoryCode_field dd[class='info']").index(0).text === Messages("constraint.required")
browser.find("#categoryCode_field dd[class='info']").index(1).text === Messages("constraint.maxLength", 20)
browser.find("#categoryCode_field dd[class='info']").index(2).text === Messages("categoryCodePattern")
browser.find("#submitCategoryCodeUpdate").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("#categoryCode_field dd[class='error']").index(0).text === Messages("error.required")
browser.find("#categoryCode_field dd[class='error']").index(1).text === Messages("categoryCodePatternError")
browser.find("#categoryCode").fill().`with`("#")
browser.find("#submitCategoryCodeUpdate").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("#categoryCode_field dd[class='error']").index(0).text === Messages("categoryCodePatternError")
browser.find("#categoryCode").fill().`with`("123456789012345678901")
browser.find("#submitCategoryCodeUpdate").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("#categoryCode_field dd[class='error']").index(0).text === Messages("error.maxLength", 20)
browser.find("#categoryCode").fill().`with`("ABCD")
browser.find("#submitCategoryCodeUpdate").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find(".editCategoryCodeLink").index(0).text === "ABCD"
}
}
}
}
| ruimo/store2 | test/functional/CategoryMaintenanceSpec.scala | Scala | apache-2.0 | 14,081 |
package org.k33nteam.jade.propagation.methodplugins
import org.k33nteam.jade.propagation.base.IAPICheckPlugin
import org.k33nteam.jade.helpers.SyntaxHelpers._
import org.k33nteam.jade.bean.VulnResult
import soot.Scene
import soot.SootMethod
import scala.collection.mutable.Map
import soot.jimple.InstanceInvokeExpr
import soot.jimple.IntConstant
import soot.jimple.Stmt
import scala.collection.JavaConversions._
object WebviewFileDomainPlugin extends IAPICheckPlugin{
private val ALLOW_FILE_ACCESS: Int = 0x1
private val ALLOW_UNIVERSAL_ACCESS_FROM_FILE_URLS: Int = 0x10
private val ALLOW_FILE_ACCESS_FROM_FILE_URLS: Int = 0x100
private val collections: Map[String, Integer] = Map(
"void setAllowFileAccess(boolean)" -> 0x1,
"void setAllowFileAccessFromFileURLs(boolean)" -> 0x10,
"void setAllowUniversalAccessFromFileURLs(boolean)" -> 0x100
)
private def translateLevelToString(level: Int): String = {
val builder: StringBuilder = new StringBuilder
if ((level & ALLOW_FILE_ACCESS) != 0) {
builder.append(" allow-file-access\\n")
}
if ((level & ALLOW_UNIVERSAL_ACCESS_FROM_FILE_URLS) != 0) {
builder.append(" allow-universal-access-from-file-urls\\n")
}
if ((level & ALLOW_FILE_ACCESS_FROM_FILE_URLS) != 0) {
builder.append(" allow-file-access-from-file-urls\\n")
}
return builder.toString
}
def check(scene: Scene) : Map[SootMethod, (String,Int)] = {
val results= Map[SootMethod, (String,Int)]()
for (sootClass <- scene.getApplicationClasses) {
for (sootMethod <- sootClass.getMethods; if sootMethod.hasActiveBody) {
var ret: Int = 0
for (stmt <- sootMethod.getActiveBody.getUnits) {
if (stmt.containsInvokeExpr && stmt.getInvokeExpr.isInstanceOf[InstanceInvokeExpr]) {
val instanceInvokeExpr: InstanceInvokeExpr = stmt.getInvokeExpr.asInstanceOf[InstanceInvokeExpr]
val invokeName: String = instanceInvokeExpr.getMethod.getSubSignature
if ((collections contains invokeName) && instanceInvokeExpr.getArgCount == 1 && instanceInvokeExpr.getArg(0).isInstanceOf[IntConstant]) {
val intConstant: IntConstant = instanceInvokeExpr.getArg(0).asInstanceOf[IntConstant]
if (intConstant.value == 1) {
ret = ret | collections.getOrElse[Integer](invokeName, 0)
}
}
}
}
if (ret != 0) {
results += (sootMethod -> (translateLevelToString(ret),ret))
}
}
}
results
}
override def getResult(scene: Scene): Iterable[VulnResult] =
{
val methodmaps = check(scene)
methodmaps.map({
case (method, (string, weight)) => VulnResult.toMethodAPIVulnResult(method, getDesc, string, score = weight*0.1f)
})
}
override def getDesc(): String = "Webview js file access misconfigurations"
}
| flankerhqd/JAADAS | jade/src/main/scala/org/k33nteam/jade/propagation/methodplugins/WebviewFileDomainPlugin.scala | Scala | gpl-3.0 | 2,875 |
package com.tpl.hamcraft.machines.incubator
import net.bdew.lib.machine.{ProcessorMachine, Machine}
import net.bdew.lib.gui.GuiProvider
import cpw.mods.fml.relauncher.{Side, SideOnly}
import net.minecraft.entity.player.EntityPlayer
import net.minecraft.client.gui.inventory.GuiContainer
import net.minecraft.inventory.Container
class MachineIncubator extends Machine("Incubator", new BlockIncubator(_)) with GuiProvider with ProcessorMachine {
def guiId = 3
type TEClass = TileIncubator
lazy val tankSize = tuning.getInt("TankSize")
lazy val feedPerCycle = tuning.getInt("feedPerCycle")
@SideOnly(Side.CLIENT)
def getGui(te: TileIncubator, player: EntityPlayer): GuiContainer = new GuiIncubator(te, player)
def getContainer(te: TileIncubator, player: EntityPlayer): Container = new ContainerIncubator(te, player)
}
| piotrb/hamcraft | src/main/scala/com/tpl/hamcraft/machines/incubator/MachineIncubator.scala | Scala | bsd-2-clause | 833 |
package org.skycastle.network.protocol.binary
import java.math.BigInteger
import java.nio.ByteBuffer
/**
* Utility methods for dealing with packed numbers.
*
* @author Hans Haggstrom
*/
// TODO: Implement the packing algorithm directly, instead of instantiating a BigInteger.
object PackedNumbers {
private val MAX_NUMBER_OF_NUMBER_BYTES = 10
/**
* Calculates the number of bytes a given number will use when packed.
*/
def length( value : Long ) : Int = {
if (value > Math.MIN_BYTE + MAX_NUMBER_OF_NUMBER_BYTES && value <= Math.MAX_BYTE) {
// The number fits in one byte, above the number-of-bytes indicator area
1
}
else {
val bytes = BigInteger.valueOf( value ).toByteArray
val numBytes = bytes.length
if (numBytes > MAX_NUMBER_OF_NUMBER_BYTES) throw new IllegalStateException( "Problem when encoding packed number "+value+", way too big BigInteger representation." )
else if (numBytes <= 0) throw new IllegalStateException( "Problem when encoding packed number "+value+", empty representation." )
numBytes + 1 // The bytes used to store the number + indicator byte.
}
}
/**
* Encodes values between around -110 to 127 in one byte, and larger values in as many bytes as necessary + 1
*/
def encode( buffer : ByteBuffer, value : Long ) {
if (value > Math.MIN_BYTE + MAX_NUMBER_OF_NUMBER_BYTES && value <= Math.MAX_BYTE) {
// The number fits in one byte, above the number-of-bytes indicator area
buffer.put(value.toByte)
}
else {
val bytes = BigInteger.valueOf( value ).toByteArray
val numBytes = bytes.length
if (numBytes > MAX_NUMBER_OF_NUMBER_BYTES) throw new IllegalStateException( "Problem when encoding packed number "+value+", way too big BigInteger representation." )
else if (numBytes <= 0) throw new IllegalStateException( "Problem when encoding packed number "+value+", empty representation." )
// Encode number of bytes used near the negative lower range of a byte
val indicatorByte : Byte = (Math.MIN_BYTE + numBytes).toByte
buffer.put( indicatorByte )
buffer.put( bytes )
}
}
/**
* Extracts an encoded packed number.
*/
def decode( buffer : ByteBuffer ) : Long = {
val indicatorByte : Byte = buffer.get
if (indicatorByte > Math.MIN_BYTE + MAX_NUMBER_OF_NUMBER_BYTES) {
// The number is small, was stored in the first byte
indicatorByte.toLong
}
else {
// Extract number of bytes in representation
val numBytes = (indicatorByte.toInt) - Math.MIN_BYTE
if (numBytes > MAX_NUMBER_OF_NUMBER_BYTES) throw new IllegalStateException( "Problem when decoding packed number, too many bytes in representation ("+numBytes+")." )
else if (numBytes <= 0 ) throw new IllegalStateException( "Problem when decoding packed number, no bytes in representation." )
// Read representation
val bytes = new Array[Byte](numBytes)
buffer.get( bytes )
// Initialize to big integer, and get Long value
new BigInteger( bytes ).longValue
}
}
}
| weimingtom/skycastle | src/main/scala/org/skycastle/network/protocol/binary/PackedNumbers.scala | Scala | gpl-2.0 | 3,109 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.shuffle.sort
import java.util.concurrent.ConcurrentHashMap
import org.apache.spark._
import org.apache.spark.serializer.Serializer
import org.apache.spark.shuffle._
/**
* In sort-based shuffle, incoming records are sorted according to their target partition ids, then
* written to a single map output file. Reducers fetch contiguous regions of this file in order to
* read their portion of the map output. In cases where the map output data is too large to fit in
* memory, sorted subsets of the output can are spilled to disk and those on-disk files are merged
* to produce the final output file.
*
* Sort-based shuffle has two different write paths for producing its map output files:
*
* - Serialized sorting: used when all three of the following conditions hold:
* 1. The shuffle dependency specifies no aggregation or output ordering.
* 2. The shuffle serializer supports relocation of serialized values (this is currently
* supported by KryoSerializer and Spark SQL's custom serializers).
* 3. The shuffle produces fewer than 16777216 output partitions.
* - Deserialized sorting: used to handle all other cases.
*
* -----------------------
* Serialized sorting mode
* -----------------------
*
* In the serialized sorting mode, incoming records are serialized as soon as they are passed to the
* shuffle writer and are buffered in a serialized form during sorting. This write path implements
* several optimizations:
*
* - Its sort operates on serialized binary data rather than Java objects, which reduces memory
* consumption and GC overheads. This optimization requires the record serializer to have certain
* properties to allow serialized records to be re-ordered without requiring deserialization.
* See SPARK-4550, where this optimization was first proposed and implemented, for more details.
*
* - It uses a specialized cache-efficient sorter ([[ShuffleExternalSorter]]) that sorts
* arrays of compressed record pointers and partition ids. By using only 8 bytes of space per
* record in the sorting array, this fits more of the array into cache.
*
* - The spill merging procedure operates on blocks of serialized records that belong to the same
* partition and does not need to deserialize records during the merge.
*
* - When the spill compression codec supports concatenation of compressed data, the spill merge
* simply concatenates the serialized and compressed spill partitions to produce the final output
* partition. This allows efficient data copying methods, like NIO's `transferTo`, to be used
* and avoids the need to allocate decompression or copying buffers during the merge.
*
* For more details on these optimizations, see SPARK-7081.
*/
private[spark] class SortShuffleManager(conf: SparkConf) extends ShuffleManager with Logging {
if (!conf.getBoolean("spark.shuffle.spill", true)) {
logWarning(
"spark.shuffle.spill was set to false, but this configuration is ignored as of Spark 1.6+." +
" Shuffle will continue to spill to disk when necessary.")
}
/**
* A mapping from shuffle ids to the number of mappers producing output for those shuffles.
*/
private[this] val numMapsForShuffle = new ConcurrentHashMap[Int, Int]()
override val shuffleBlockResolver = new IndexShuffleBlockResolver(conf)
/**
* Register a shuffle with the manager and obtain a handle for it to pass to tasks.
*/
override def registerShuffle[K, V, C](
shuffleId: Int,
numMaps: Int,
dependency: ShuffleDependency[K, V, C]): ShuffleHandle = {
if (SortShuffleWriter.shouldBypassMergeSort(SparkEnv.get.conf, dependency)) {
// If there are fewer than spark.shuffle.sort.bypassMergeThreshold partitions and we don't
// need map-side aggregation, then write numPartitions files directly and just concatenate
// them at the end. This avoids doing serialization and deserialization twice to merge
// together the spilled files, which would happen with the normal code path. The downside is
// having multiple files open at a time and thus more memory allocated to buffers.
new BypassMergeSortShuffleHandle[K, V](
shuffleId, numMaps, dependency.asInstanceOf[ShuffleDependency[K, V, V]])
} else if (SortShuffleManager.canUseSerializedShuffle(dependency)) {
// Otherwise, try to buffer map outputs in a serialized form, since this is more efficient:
new SerializedShuffleHandle[K, V](
shuffleId, numMaps, dependency.asInstanceOf[ShuffleDependency[K, V, V]])
} else {
// Otherwise, buffer map outputs in a deserialized form:
new BaseShuffleHandle(shuffleId, numMaps, dependency)
}
}
/**
* Get a reader for a range of reduce partitions (startPartition to endPartition-1, inclusive).
* Called on executors by reduce tasks.
*/
override def getReader[K, C](
handle: ShuffleHandle,
startPartition: Int,
endPartition: Int,
context: TaskContext): ShuffleReader[K, C] = {
new BlockStoreShuffleReader(
handle.asInstanceOf[BaseShuffleHandle[K, _, C]], startPartition, endPartition, context)
}
/** Get a writer for a given partition. Called on executors by map tasks. */
override def getWriter[K, V](
handle: ShuffleHandle,
mapId: Int,
context: TaskContext): ShuffleWriter[K, V] = {
numMapsForShuffle.putIfAbsent(
handle.shuffleId, handle.asInstanceOf[BaseShuffleHandle[_, _, _]].numMaps)
val env = SparkEnv.get
handle match {
case unsafeShuffleHandle: SerializedShuffleHandle[K @unchecked, V @unchecked] =>
new UnsafeShuffleWriter(
env.blockManager,
shuffleBlockResolver.asInstanceOf[IndexShuffleBlockResolver],
context.taskMemoryManager(),
unsafeShuffleHandle,
mapId,
context,
env.conf)
case bypassMergeSortHandle: BypassMergeSortShuffleHandle[K @unchecked, V @unchecked] =>
new BypassMergeSortShuffleWriter(
env.blockManager,
shuffleBlockResolver.asInstanceOf[IndexShuffleBlockResolver],
bypassMergeSortHandle,
mapId,
context,
env.conf)
case other: BaseShuffleHandle[K @unchecked, V @unchecked, _] =>
new SortShuffleWriter(shuffleBlockResolver, other, mapId, context)
}
}
/** Remove a shuffle's metadata from the ShuffleManager. */
override def unregisterShuffle(shuffleId: Int): Boolean = {
Option(numMapsForShuffle.remove(shuffleId)).foreach { numMaps =>
(0 until numMaps).foreach { mapId =>
shuffleBlockResolver.removeDataByMap(shuffleId, mapId)
}
}
true
}
/** Shut down this ShuffleManager. */
override def stop(): Unit = {
shuffleBlockResolver.stop()
}
}
private[spark] object SortShuffleManager extends Logging {
/**
* The maximum number of shuffle output partitions that SortShuffleManager supports when
* buffering map outputs in a serialized form. This is an extreme defensive programming measure,
* since it's extremely unlikely that a single shuffle produces over 16 million output partitions.
* */
val MAX_SHUFFLE_OUTPUT_PARTITIONS_FOR_SERIALIZED_MODE =
PackedRecordPointer.MAXIMUM_PARTITION_ID + 1
/**
* Helper method for determining whether a shuffle should use an optimized serialized shuffle
* path or whether it should fall back to the original path that operates on deserialized objects.
*/
def canUseSerializedShuffle(dependency: ShuffleDependency[_, _, _]): Boolean = {
val shufId = dependency.shuffleId
val numPartitions = dependency.partitioner.numPartitions
val serializer = Serializer.getSerializer(dependency.serializer)
if (!serializer.supportsRelocationOfSerializedObjects) {
log.debug(s"Can't use serialized shuffle for shuffle $shufId because the serializer, " +
s"${serializer.getClass.getName}, does not support object relocation")
false
} else if (dependency.aggregator.isDefined) {
log.debug(
s"Can't use serialized shuffle for shuffle $shufId because an aggregator is defined")
false
} else if (numPartitions > MAX_SHUFFLE_OUTPUT_PARTITIONS_FOR_SERIALIZED_MODE) {
log.debug(s"Can't use serialized shuffle for shuffle $shufId because it has more than " +
s"$MAX_SHUFFLE_OUTPUT_PARTITIONS_FOR_SERIALIZED_MODE partitions")
false
} else {
log.debug(s"Can use serialized shuffle for shuffle $shufId")
true
}
}
}
/**
* Subclass of [[BaseShuffleHandle]], used to identify when we've chosen to use the
* serialized shuffle.
*/
private[spark] class SerializedShuffleHandle[K, V](
shuffleId: Int,
numMaps: Int,
dependency: ShuffleDependency[K, V, V])
extends BaseShuffleHandle(shuffleId, numMaps, dependency) {
}
/**
* Subclass of [[BaseShuffleHandle]], used to identify when we've chosen to use the
* bypass merge sort shuffle path.
*/
private[spark] class BypassMergeSortShuffleHandle[K, V](
shuffleId: Int,
numMaps: Int,
dependency: ShuffleDependency[K, V, V])
extends BaseShuffleHandle(shuffleId, numMaps, dependency) {
}
| chenc10/Spark-PAF | core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleManager.scala | Scala | apache-2.0 | 9,997 |
class C(private val x: Any) extends AnyVal
// Checking that makeNotPrivate(paramAccessor) doesn't make this visible during typer.
// The output is identical with/without `extends AnyVal`.
object Test {
new p1.A(x).x
new B(x).x
new C(x).x
}
| yusuke2255/dotty | tests/untried/neg/t7859/B_2.scala | Scala | bsd-3-clause | 247 |
package net.sansa_stack.rdf.flink.qualityassessment.metrics.relevancy
import net.sansa_stack.rdf.common.qualityassessment.utils.NodeUtils._
import org.apache.flink.api.scala._
import org.apache.jena.graph.{Node, Triple}
/**
* @author Gezim Sejdiu
*/
object CoverageScope {
/**
* This metric calculate the coverage of a dataset referring to the covered scope.
* This covered scope is expressed as the number of 'instances' statements are made about.
*/
def assessCoverageScope(dataset: DataSet[Triple]): Double = {
val triples = dataset.count().toDouble
// ?o a rdfs:Class UNION ?o a owl:Class
val instances = dataset.filter(f => isRDFSClass(f.getPredicate)).map(_.getObject).distinct(_.hashCode())
.union(dataset.filter(f => isOWLClass(f.getPredicate)).map(_.getObject).distinct(_.hashCode()))
.count().toDouble
val value = if (triples > 0.0) {
instances / triples
} else 0
value
}
}
| SANSA-Stack/Spark-RDF | sansa-rdf-flink/src/main/scala/net/sansa_stack/rdf/flink/qualityassessment/metrics/relevancy/CoverageScope.scala | Scala | gpl-3.0 | 955 |
package scalaxy.streams
import scala.collection.breakOut
trait TuploidValues extends Utils with Tuploids
{
val global: scala.reflect.api.Universe
import global._
import definitions._
// private[this] def isTupleType(tpe: Type): Boolean =
// Option(tpe).exists(t => isTupleSymbol(t.typeSymbol))
// private[this] def isTupleSymbol(sym: Symbol): Boolean =
// Option(sym).exists(_.fullName.toString.matches("scala\\\\.Tuple\\\\d+"))
object TupleType {
def unapply(tpe: Type): Boolean = isTupleType(tpe)
}
private[this] lazy val primTypes =
Set(IntTpe, LongTpe, ShortTpe, CharTpe, BooleanTpe, DoubleTpe, FloatTpe, ByteTpe)
private[this] def isPrimitiveType(tpe: Type) =
Option(tpe).map(normalize).exists(primTypes.contains)
private[this] def getTupleComponentTypes(tpe: Type): List[Type] = {
tpe match {
case ref @ TypeRef(pre, sym, args @ (_ :: _)) if isTupleTypeRef(ref) => args
}
}
// def isTupleTypeRef(ref: TypeRef): Boolean = {
// !ref.args.isEmpty &&
// ref.pre.typeSymbol == ScalaPackageClass &&
// isTupleSymbol(ref.sym)
// }
def isValOrVar(s: Symbol): Boolean =
s.isTerm && !s.isMethod && !s.isJava
def isStableNonLazyVal(ts: TermSymbol): Boolean =
ts.isStable && ts.isVal && !ts.isLazy
def isImmutableClassMember(s: Symbol): Boolean = {
//println(s + " <- " + s.owner + " overrides " + s.allOverriddenSymbols)
//println(s"\\tisFinal = ${s.isFinal}, isMethod = ${s.isMethod}, isTerm = ${s.isTerm}")
if (isValOrVar(s)) {
isStableNonLazyVal(s.asTerm)
} else {
// Either a method or a sub-type
true
}
}
// A tuploid is a scalar, a tuple of tuploids or an immutable case class with tuploid fields.
def isTuploidType(tpe: Type): Boolean = tpe != null && {
isPrimitiveType(tpe) ||
isTupleType(tpe) && getTupleComponentTypes(tpe).forall(isTuploidType _) ||
{
tpe.decls.exists(isValOrVar _) &&
tpe.decls.forall(isImmutableClassMember _)
}
}
object TupleCreation {
def unapply(tree: Tree): Option[List[Tree]] =
Option(tree).filter(tree => TupleType.unapply(tree.tpe)) collect {
case q"$tup[..${_}](..$subs)" if isTupleSymbol(tup.symbol) =>
subs
case q"$tup.apply[..${_}](..$subs)" if isTupleSymbol(tup.symbol) =>
subs
}
}
type TuploidPath = List[Int]
val RootTuploidPath = Nil
case class TuploidPathsExtractionDecls(
statements: List[Tree],
value: TuploidValue[Tree],
coercionSuccessVarDefRef: (Option[Tree], Option[Tree]))
def createTuploidPathsExtractionDecls(
tpe: Type,
target: Tree,
paths: Set[TuploidPath],
fresh: String => TermName,
typed: Tree => Tree,
coercionSuccessVarDefRef: (Option[Tree], Option[Tree]) = (None, None))
: TuploidPathsExtractionDecls =
{
var coerces = false
def aux(tpe: Type, target: Tree, paths: Set[TuploidPath])
: (List[Tree], List[Tree], TuploidValue[Tree]) = {
val headToSubs = for ((head, pathsWithSameHead) <- paths.filter(_.nonEmpty).groupBy(_.head)) yield {
val subPaths = pathsWithSameHead.map(_.tail)
val selector = "_" + (head + 1)
val name = fresh(selector)
val rhs = typed(q"$target.${TermName(selector)}")
val subTpe = rhs.tpe
val Block(List(decl, assign), ref) = typed(q"""
${newVar(name, subTpe)};
$name = $rhs;
$name
""")
val (subDecls, subAssigns, subValue) =
aux(rhs.tpe, ref, subPaths)
(decl :: subDecls, assign :: subAssigns, head -> subValue)
}
val subDecls: List[Tree] = headToSubs.flatMap(_._1).toList
val subAssigns: List[Tree] = headToSubs.flatMap(_._2).toList
val assigns: List[Tree] = coercionSuccessVarDefRef match {
case (Some(successVarDef), Some(successVarRef))
if subAssigns != Nil =>
coerces = true
val Block(statements, _) = typed(q"""
$successVarDef;
if ((${target.duplicate} ne null) &&
${successVarRef.duplicate}) {
..$subAssigns
} else {
${successVarRef.duplicate} = false;
};
null
""")
statements
case _ =>
subAssigns
}
(
subDecls,
assigns,
TupleValue[Tree](
tpe = tpe,//target.tpe,
values = headToSubs.map(_._3).toMap,
alias = target.asOption,
couldBeNull = false)
)
}
val (defs, assigns, value) = aux(tpe, target, paths)
val statements =
if (defs.isEmpty && assigns.isEmpty)
Nil
else {
val Block(list, _) = typed(q"""
..${defs ++ assigns};
""
""")
list
}
val ret = TuploidPathsExtractionDecls(
statements = statements,
value = value,
if (coerces) coercionSuccessVarDefRef else (None, None))
// println(s"""
// createTuploidPathsExtractionDecls
// target: $target
// paths: $paths
// ret: $ret
// defs: $defs
// assigns: $assigns
// statements: $statements
// coercionSuccessVarDefRef: $coercionSuccessVarDefRef
// """)
ret
}
/** A tuploid value is either a scalar or a tuple of tuploid values. */
sealed trait TuploidValue[A]
{
def collectSet[B](pf: PartialFunction[(TuploidPath, TuploidValue[A]), B]): Set[B] =
collect(pf).toSet
def collectMap[B, C](pf: PartialFunction[(TuploidPath, TuploidValue[A]), (B, C)]): Map[B, C] =
collect(pf).toMap
def collect[B](pf: PartialFunction[(TuploidPath, TuploidValue[A]), B]): List[B] = {
val res = collection.mutable.ListBuffer[B]()
foreachDefined(pf andThen {
case a =>
res += a
})
res.result
}
def foreachDefined(pf: PartialFunction[(TuploidPath, TuploidValue[A]), Unit]) {
new TuploidTraverser[A] {
override def traverse(path: TuploidPath, t: TuploidValue[A]) {
pf.applyOrElse((path, t), (_: (TuploidPath, TuploidValue[A])) => ())
super.traverse(path, t)
}
} traverse (RootTuploidPath, this)
}
def collectAliases: Map[TuploidPath, A] =
collectMap {
case (path, t) if t.alias.nonEmpty =>
path -> t.alias.get
}
def collectValues: Seq[Tree] =
collect {
case (_, ScalarValue(_, Some(t), _)) =>
t
}
def find(target: A): Option[TuploidPath]
def get(path: TuploidPath): TuploidValue[A]
def exists(path: TuploidPath): Boolean
def alias: Option[A]
def tpe: Type
}
case class ScalarValue[A](tpe: Type, value: Option[Tree] = None, alias: Option[A] = None)
extends TuploidValue[A]
{
assert((tpe + "") != "Null" && tpe != NoType)
override def find(target: A) =
alias.filter(_ == target).map(_ => RootTuploidPath)
override def get(path: TuploidPath) = {
val RootTuploidPath = path
this
}
override def exists(path: TuploidPath) =
path == RootTuploidPath
}
case class TupleValue[A](
tpe: Type,
values: Map[Int, TuploidValue[A]],
alias: Option[A] = None,
couldBeNull: Boolean = true)
extends TuploidValue[A]
{
assert((tpe + "") != "Null" && tpe != NoType, "Created tuple value with tpe " + tpe)
override def find(target: A) = {
if (alias.exists(_ == target))
Some(RootTuploidPath)
else
values.toIterator.map {
case (i, v) =>
v.find(target).map(i :: _)
} collectFirst {
case Some(path) =>
path
}
}
override def get(path: TuploidPath) = path match {
case RootTuploidPath =>
this
case i :: subPath =>
values(i).get(subPath)
}
override def exists(path: TuploidPath) = path match {
case RootTuploidPath =>
true
case i :: subPath =>
i < values.size &&
values(i).exists(subPath)
}
}
// object BindList {
// def unapply(trees: List[Tree]): Option[List[Bind]] = {
// var success = true
// val result = trees map {
// case b @ Bind(_, _) => b
// case _ =>
// success = false
// null
// }
// if (success)
// Some(result)
// else
// None
// }
// }
object MethodTypeTree {
def unapply(tree: Tree): Option[(List[Symbol], Type)] = tree match {
case TypeTree() =>
tree.tpe match {
case MethodType(params, restpe) =>
Some(params, restpe)
case _ =>
None
}
case _ =>
None
}
}
object TuploidValue
{
def extractSymbols(tree: Tree, alias: Option[Symbol] = None, isInsideCasePattern: Boolean = false): TuploidValue[Symbol] = {
def sub(subs: List[Tree]): Map[Int, TuploidValue[Symbol]] =
(subs.zipWithIndex.map {
case (b @ Bind(_, _), i) =>
i -> extractSymbolsFromBind(b)
case (t, i) =>
i -> extractSymbols(t, isInsideCasePattern = isInsideCasePattern)
})(breakOut)
tree match {
case TupleCreation(subs) =>
TupleValue(tree.tpe, values = sub(subs), alias = alias, couldBeNull = isInsideCasePattern)
case Ident(termNames.WILDCARD) | Typed(Ident(termNames.WILDCARD), _) =>
ScalarValue(tree.tpe, alias = alias)
case Ident(n) if tree.symbol.name == n =>
ScalarValue(tree.tpe, alias = tree.symbol.asOption)
case Apply(MethodTypeTree(_, restpe @ TupleType()), binds)
if binds.forall({ case Bind(_, _) => true case _ => false }) =>
val values = for ((bind: Bind, i) <- binds.zipWithIndex) yield {
i -> extractSymbolsFromBind(bind)
}
TupleValue(restpe, values.toMap)
case UnApply(_, _) =>
sys.error("Cannot handle " + tree)
case _ if !isInsideCasePattern =>
ScalarValue(tree.tpe, value = Some(tree), alias = alias)
}
}
def extractSymbolsFromBind(bind: Bind): TuploidValue[Symbol] = {
extractSymbols(bind.body, bind.symbol.asOption, isInsideCasePattern = true)
}
def unapply(tree: Tree): Option[TuploidValue[Symbol]] =
trySome(extractSymbols(tree))
}
object UnitTreeScalarValue extends ScalarValue[Tree](typeOf[Unit])
class TuploidTraverser[A] {
def traverse(path: TuploidPath, t: TuploidValue[A]) {
t match {
case TupleValue(_, values, _, _) =>
for ((i, value) <- values) {
traverse(path :+ i, value)
}
case _ =>
}
}
}
trait TuploidTransformer[A, B] {
def transform(path: TuploidPath, t: TuploidValue[A]): TuploidValue[B]
}
/** Extract TuploidValue from a CaseDef */
object CaseTuploidValue {
def unapply(caseDef: CaseDef): Option[(TuploidValue[Symbol], Tree)] = {
def sub(binds: List[Tree]): Map[Int, TuploidValue[Symbol]] =
binds.zipWithIndex.map({
case (b, i) =>
i -> (b match {
case bind: Bind =>
TuploidValue.extractSymbolsFromBind(bind)
case ident @ Ident(n) =>
ScalarValue[Symbol](tpe = ident.tpe, alias = ident.symbol.asOption)
case _ =>
TuploidValue.extractSymbols(b, isInsideCasePattern = true)
})
})(breakOut)
require(caseDef.tpe != null && caseDef.tpe != NoType)
tryOrNone {
Option(caseDef) collect {
case cq"($tuple(..$binds)) => $body" if TupleType.unapply(caseDef.pat.tpe) =>
TupleValue(
tpe = caseDef.pat.tpe,
values = sub(binds), alias = None) -> body
case cq"($alias @ $tuple(..$binds)) => $body" if TupleType.unapply(caseDef.pat.tpe) =>
TupleValue(
tpe = caseDef.pat.tpe,
values = sub(binds), alias = caseDef.pat.symbol.asOption) -> body
}
}
}
}
}
| nativelibs4java/scalaxy-streams | src/main/scala/streams/matchers/TuploidValues.scala | Scala | bsd-3-clause | 12,072 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.convert.text
import com.typesafe.config.ConfigFactory
import org.junit.runner.RunWith
import org.locationtech.geomesa.convert.SimpleFeatureConverters
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class CompositeTextConverterTest extends Specification {
val data =
"""
|1 ,hello,badvalue,45.0
|asfastofail,f
|3
|2 ,world,,90.0
""".stripMargin
val conf = ConfigFactory.parseString(
"""
| {
| type = "composite-converter",
| converters = [
| { converter = "first", predicate = "strEq('1', trim(substr($0, 0, 2)))" },
| { converter = "second", predicate = "strEq('2', trim(substr($0, 0, 2)))" }
| ]
| first = {
| type = "delimited-text",
| format = "DEFAULT",
| id-field = "concat('first', trim($1))",
| fields = [
| { name = "phrase", transform = "concat($1, $2)" }
| { name = "lineNr", transform = "lineNo()" }
| { name = "lat", transform = "stringToDouble($3, '0.0'::double)" }
| { name = "lon", transform = "stringToDouble($4, '0.0'::double)" }
| { name = "geom", transform = "point($lat, $lon)" }
| ]
| }
|
| second = {
| type = "delimited-text",
| format = "DEFAULT",
| id-field = "concat('second', trim($1))",
| fields = [
| { name = "phrase", transform = "concat($1, $2)" }
| { name = "lat", transform = "stringToDouble($3, '0.0'::double)" }
| { name = "lon", transform = "stringToDouble($4, '0.0'::double)" }
| { name = "geom", transform = "point($lat, $lon)" }
| { name = "lineNr", transform = "lineNo()" }
| ]
| }
| }
""".stripMargin)
"be built from a conf" >> {
val sft = SimpleFeatureTypes.createType(ConfigFactory.load("sft_testsft.conf"))
val converter = SimpleFeatureConverters.build[String](sft, conf)
converter must not beNull
val res = converter.processInput(data.split("\n").toIterator.filterNot( s => "^\\s*$".r.findFirstIn(s).size > 0)).toList
"and process some data" >> {
res.size must be equalTo 2
res(0).getID must be equalTo "first1"
res(1).getID must be equalTo "second2"
}
"and get correct line numbers" >> {
res(0).getAttribute("lineNr").asInstanceOf[Long] must be equalTo 1
res(1).getAttribute("lineNr").asInstanceOf[Long] must be equalTo 4
}
"testing string2 function defaults" >> {
res(0).getAttribute("lat").asInstanceOf[Double] must be equalTo 0.0
res(1).getAttribute("lat").asInstanceOf[Double] must be equalTo 0.0
}
}
}
| jahhulbert-ccri/geomesa | geomesa-convert/geomesa-convert-text/src/test/scala/org/locationtech/geomesa/convert/text/CompositeTextConverterTest.scala | Scala | apache-2.0 | 3,517 |
package reactivemongo.core.netty
import reactivemongo.io.netty.channel.{ Channel, EventLoopGroup }
import reactivemongo.io.netty.channel.nio.NioEventLoopGroup
import reactivemongo.io.netty.channel.socket.nio.NioSocketChannel
import reactivemongo.util.LazyLogger
/**
* @param eventLoopGroup the event loop group
* @param channelClassTag the channel class tag
*/
private[core] final class Pack(
val eventLoopGroup: () => EventLoopGroup,
val channelClass: Class[_ <: Channel]) {
override def equals(that: Any): Boolean = that match {
case other: Pack =>
other.channelClass.getName == channelClass.getName
case _ => false
}
override def hashCode: Int = channelClass.getName.hashCode
override def toString = s"NettyPack(${channelClass.getName})"
}
private[core] object Pack {
private val shaded: Boolean = try {
// Type alias but no class if not shaded
Class.forName("reactivemongo.io.netty.channel.Channel")
true
} catch {
case _: Throwable => false
}
private val logger = LazyLogger("reactivemongo.core.netty.Pack")
def apply(): Pack = {
val pack = kqueue.orElse(epoll).getOrElse(nio)
logger.info(s"Instantiated ${pack.getClass.getName}")
pack
}
private val kqueuePkg: String = {
if (shaded) "reactivemongo.io.netty.channel.kqueue"
else "io.netty.channel.kqueue"
}
private[core] def kqueue: Option[Pack] = try {
Some(Class.forName(
s"${kqueuePkg}.KQueueSocketChannel")).map { cls =>
val chanClass = cls.asInstanceOf[Class[_ <: Channel]]
val groupClass = Class.forName(s"${kqueuePkg}.KQueueEventLoopGroup").
asInstanceOf[Class[_ <: EventLoopGroup]]
val pack = new Pack(() =>
groupClass.getDeclaredConstructor().newInstance(), chanClass)
logger.info(s"Netty KQueue successfully loaded (shaded: $shaded)")
pack
}
} catch {
case cause: Exception =>
logger.debug(s"Cannot use Netty KQueue (shaded: $shaded)", cause)
None
}
private val epollPkg: String = {
if (shaded) "reactivemongo.io.netty.channel.epoll"
else "io.netty.channel.epoll"
}
private[core] def epoll: Option[Pack] = try {
Some(Class.forName(
s"${epollPkg}.EpollSocketChannel")).map { cls =>
val chanClass = cls.asInstanceOf[Class[_ <: Channel]]
val groupClass = Class.forName(s"${epollPkg}.EpollEventLoopGroup").
asInstanceOf[Class[_ <: EventLoopGroup]]
val pack = new Pack(() =>
groupClass.getDeclaredConstructor().newInstance(), chanClass)
logger.info(s"Netty EPoll successfully loaded (shaded: $shaded)")
pack
}
} catch {
case cause: Exception =>
logger.debug(s"Cannot use Netty EPoll (shaded: $shaded)", cause)
None
}
@inline private[core] def nio = new Pack(
() => new NioEventLoopGroup(), classOf[NioSocketChannel])
}
| cchantep/ReactiveMongo | driver/src/main/scala/core/netty/Pack.scala | Scala | apache-2.0 | 2,869 |
/*
* Copyright 2011 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.cdevreeze.springjdbc
package namedparam
import java.{ util => jutil }
import scala.collection.JavaConverters._
import org.springframework.jdbc.core.namedparam.{ SqlParameterSource, MapSqlParameterSource }
/**
* Named typed query argument lists, used in the Scala `NamedParamJdbcTemplate`. The types are integer constants from `java.sql.Types`.
*
* @author Chris de Vreeze
*/
final class TypedArgMap(val typedArgs: Map[String, TypedArg]) extends Immutable {
require(typedArgs ne null)
def toSqlParameterSource: SqlParameterSource = {
val result = new MapSqlParameterSource
for ((argName, typedArgValue) <- typedArgs) {
val argValue: AnyRef = typedArgValue.argValue
val argType: Int = typedArgValue.argType
result.addValue(argName, argValue, argType)
}
result
}
}
object TypedArgMap {
def apply(typedArgs: (String, TypedArg)*): TypedArgMap = new TypedArgMap(Map(typedArgs: _*))
}
| dvreeze/spring-jdbc-scala-utils | src/main/scala/eu/cdevreeze/springjdbc/namedparam/TypedArgMap.scala | Scala | apache-2.0 | 1,539 |
package scala.collection
import org.junit.Assert._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
@RunWith(classOf[JUnit4])
class IterableViewLikeTest {
@Test
def hasCorrectDropAndTakeMethods(): Unit = {
val iter = Iterable(1, 2, 3)
import scala.language.postfixOps
assertEquals(Iterable.empty[Int], iter.view take Int.MinValue force)
assertEquals(Iterable.empty[Int], iter.view takeRight Int.MinValue force)
assertEquals(iter, iter.view drop Int.MinValue force)
assertEquals(iter, iter.view dropRight Int.MinValue force)
}
}
| martijnhoekstra/scala | test/junit/scala/collection/IterableViewLikeTest.scala | Scala | apache-2.0 | 599 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.geotools
import java.util.Date
import com.typesafe.config.Config
import org.apache.commons.lang.StringEscapeUtils
import org.geotools.feature.simple.SimpleFeatureTypeBuilder
import org.locationtech.geomesa.utils.geotools.AttributeSpec.GeomAttributeSpec
import org.locationtech.geomesa.utils.geotools.NameableFeatureTypeFactory.NameableSimpleFeatureType
import org.opengis.feature.`type`.{AttributeDescriptor, FeatureTypeFactory, GeometryDescriptor}
import org.opengis.feature.simple.SimpleFeatureType
import org.parboiled.errors.ParsingException
import scala.collection.JavaConversions._
object SimpleFeatureTypes {
import org.locationtech.geomesa.utils.geotools.RichAttributeDescriptors._
object Configs {
val TABLE_SHARING_KEY = "geomesa.table.sharing"
val DEFAULT_DATE_KEY = "geomesa.index.dtg"
val IGNORE_INDEX_DTG = "geomesa.ignore.dtg"
val VIS_LEVEL_KEY = "geomesa.visibility.level"
val Z3_INTERVAL_KEY = "geomesa.z3.interval"
val XZ_PRECISION_KEY = "geomesa.xz.precision"
val TABLE_SPLITTER = "table.splitter.class"
val TABLE_SPLITTER_OPTS = "table.splitter.options"
val MIXED_GEOMETRIES = "geomesa.mixed.geometries"
val RESERVED_WORDS = "override.reserved.words" // note: doesn't start with geomesa so we don't persist it
val DEFAULT_DTG_JOIN = "override.index.dtg.join"
val KEYWORDS_KEY = "geomesa.keywords"
val ENABLED_INDICES = "geomesa.indices.enabled"
// keep around old values for back compatibility
val ENABLED_INDEX_OPTS = Seq(ENABLED_INDICES, "geomesa.indexes.enabled", "table.indexes.enabled")
val ST_INDEX_SCHEMA_KEY = "geomesa.index.st.schema"
val Z_SPLITS_KEY = "geomesa.z.splits"
val ATTR_SPLITS_KEY = "geomesa.attr.splits"
val ID_SPLITS_KEY = "geomesa.id.splits"
val LOGICAL_TIME_KEY = "geomesa.logical.time"
val COMPRESSION_ENABLED = "geomesa.table.compression.enabled"
val COMPRESSION_TYPE = "geomesa.table.compression.type" // valid: snappy, lzo, gz(default), bzip2, lz4, zstd
val FID_UUID_KEY = "geomesa.fid.uuid"
val FID_UUID_ENCODED_KEY = "geomesa.fid.uuid-encoded"
}
private [geomesa] object InternalConfigs {
val GEOMESA_PREFIX = "geomesa."
val SHARING_PREFIX_KEY = "geomesa.table.sharing.prefix"
val USER_DATA_PREFIX = "geomesa.user-data.prefix"
val INDEX_VERSIONS = "geomesa.indices"
val REMOTE_VERSION = "gm.remote.version" // note: doesn't start with geomesa so we don't persist it
val KEYWORDS_DELIMITER = "\\u0000"
}
object AttributeOptions {
val OPT_DEFAULT = "default"
val OPT_SRID = "srid"
val OPT_INDEX_VALUE = "index-value"
val OPT_INDEX = "index"
val OPT_STATS = "keep-stats"
val OPT_CARDINALITY = "cardinality"
val OPT_COL_GROUPS = "column-groups"
val OPT_BIN_TRACK_ID = "bin-track-id"
val OPT_CQ_INDEX = "cq-index"
val OPT_JSON = "json"
}
private [geomesa] object AttributeConfigs {
val USER_DATA_LIST_TYPE = "subtype"
val USER_DATA_MAP_KEY_TYPE = "keyclass"
val USER_DATA_MAP_VALUE_TYPE = "valueclass"
}
/**
* Create a simple feature type from a specification. Extends DataUtilities.createType with
* GeoMesa-specific functionality like list/map attributes, indexing options, etc.
*
* @param typeName type name - may include namespace
* @param spec specification
* @return
*/
def createType(typeName: String, spec: String): SimpleFeatureType = {
val (namespace, name) = parseTypeName(typeName)
createType(namespace, name, spec)
}
/**
* Create a simple feature type from a specification. Extends DataUtilities.createType with
* GeoMesa-specific functionality like list/map attributes, indexing options, etc.
*
* @param namespace namespace
* @param name name
* @param spec specification
* @return
*/
def createType(namespace: String, name: String, spec: String): SimpleFeatureType = {
val parsed = try { SimpleFeatureSpecParser.parse(spec) } catch {
case e: ParsingException => throw new IllegalArgumentException(e.getMessage, e)
}
createFeatureType(namespace, name, parsed)
}
/**
* Parse a SimpleFeatureType spec from a typesafe Config
*
* @param conf config
* @param typeName optional typename to use for the sft. will be overridden if the config contains a type-name key
* @param path optional config path to parse. defaults to 'sft'
* @return
*/
def createType(conf: Config,
typeName: Option[String] = None,
path: Option[String] = Some("sft")): SimpleFeatureType = {
val (nameFromConf, spec) = SimpleFeatureSpecConfig.parse(conf, path)
val (namespace, name) = parseTypeName(nameFromConf.orElse(typeName).getOrElse {
throw new IllegalArgumentException("Unable to parse type name from provided argument or config")
})
createFeatureType(namespace, name, spec)
}
/**
* Creates a type that can be renamed
*
* @param spec spec
* @return
*/
def createNameableType(spec: String): NameableSimpleFeatureType = {
val parsed = try { SimpleFeatureSpecParser.parse(spec) } catch {
case e: ParsingException => throw new IllegalArgumentException(e.getMessage, e)
}
createFeatureType(null, "", parsed, Some(new NameableFeatureTypeFactory())).asInstanceOf[NameableSimpleFeatureType]
}
/**
* Create a single attribute descriptor
*
* @param spec attribute spec, e.g. 'foo:String'
* @return
*/
def createDescriptor(spec: String): AttributeDescriptor = {
try { SimpleFeatureSpecParser.parseAttribute(spec).toDescriptor } catch {
case e: ParsingException => throw new IllegalArgumentException(e.getMessage, e)
}
}
/**
* Encode a SimpleFeatureType as a comma-separated String
*
* @param sft - SimpleFeatureType to encode
* @param includeUserData - defaults to false
* @return a string representing a serialization of the sft
*/
def encodeType(sft: SimpleFeatureType, includeUserData: Boolean = false): String = {
val userData = if (includeUserData) { encodeUserData(sft) } else { "" }
sft.getAttributeDescriptors.map(encodeDescriptor(sft, _)).mkString("", ",", userData)
}
def encodeDescriptor(sft: SimpleFeatureType, descriptor: AttributeDescriptor): String =
AttributeSpec(sft, descriptor).toSpec
def encodeUserData(sft: SimpleFeatureType): String = {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
val prefixes = sft.getUserDataPrefixes
val userData = sft.getUserData.filter { case (k, v) => v != null && prefixes.exists(k.toString.startsWith) }
if (userData.isEmpty) { "" } else {
userData.map { case (k, v) => encodeUserData(k, v) }.mkString(";", ",", "")
}
}
def encodeUserData(key: AnyRef, value: AnyRef): String = s"$key='${StringEscapeUtils.escapeJava(value.toString)}'"
def toConfig(sft: SimpleFeatureType,
includeUserData: Boolean = true,
includePrefix: Boolean = true): Config =
SimpleFeatureSpecConfig.toConfig(sft, includeUserData, includePrefix)
def toConfigString(sft: SimpleFeatureType,
includeUserData: Boolean = true,
concise: Boolean = false,
includePrefix: Boolean = true,
json: Boolean = false): String =
SimpleFeatureSpecConfig.toConfigString(sft, includeUserData, concise, includePrefix, json)
/**
* Renames a simple feature type. Preserves user data
*
* @param sft simple feature type
* @param newName new name
* @return
*/
def renameSft(sft: SimpleFeatureType, newName: String): SimpleFeatureType = {
val builder = new SimpleFeatureTypeBuilder()
builder.init(sft)
builder.setName(newName)
val renamed = builder.buildFeatureType()
renamed.getUserData.putAll(sft.getUserData)
renamed
}
private def createFeatureType(namespace: String,
name: String,
spec: SimpleFeatureSpec,
factory: Option[FeatureTypeFactory] = None): SimpleFeatureType = {
import AttributeOptions.OPT_DEFAULT
import Configs.{DEFAULT_DATE_KEY, IGNORE_INDEX_DTG}
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
val defaultGeom = {
val geomAttributes = spec.attributes.collect { case g: GeomAttributeSpec => g }
geomAttributes.find(_.options.get(OPT_DEFAULT).exists(_.toBoolean))
.orElse(geomAttributes.headOption)
.map(_.name)
}
val defaultDate = if (spec.options.get(IGNORE_INDEX_DTG).exists(toBoolean)) { None } else {
val dateAttributes = spec.attributes.filter(_.clazz.isAssignableFrom(classOf[Date]))
spec.options.get(DEFAULT_DATE_KEY).flatMap(dtg => dateAttributes.find(_.name == dtg))
.orElse(dateAttributes.find(_.options.get(OPT_DEFAULT).exists(_.toBoolean)))
.orElse(dateAttributes.headOption)
.map(_.name)
}
val b = factory.map(new SimpleFeatureTypeBuilder(_)).getOrElse(new SimpleFeatureTypeBuilder())
b.setNamespaceURI(namespace)
b.setName(name)
b.addAll(spec.attributes.map(_.toDescriptor))
defaultGeom.foreach(b.setDefaultGeometry)
val sft = b.buildFeatureType()
sft.getUserData.putAll(spec.options)
defaultDate.foreach(sft.setDtgField)
sft
}
private def parseTypeName(name: String): (String, String) = {
val nsIndex = name.lastIndexOf(':')
val (namespace, local) = if (nsIndex == -1 || nsIndex == name.length - 1) {
(null, name)
} else {
(name.substring(0, nsIndex), name.substring(nsIndex + 1))
}
(namespace, local)
}
def getSecondaryIndexedAttributes(sft: SimpleFeatureType): Seq[AttributeDescriptor] =
sft.getAttributeDescriptors.filter(ad => ad.isIndexed && !ad.isInstanceOf[GeometryDescriptor])
private [utils] def toBoolean(value: AnyRef): Boolean = value match {
case null => false
case bool: java.lang.Boolean => bool.booleanValue
case bool: String => java.lang.Boolean.valueOf(bool).booleanValue
case bool => java.lang.Boolean.valueOf(bool.toString).booleanValue
}
}
| ddseapy/geomesa | geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/geotools/SimpleFeatureTypes.scala | Scala | apache-2.0 | 10,916 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.commands
import org.neo4j.cypher.internal.compiler.v2_3._
import org.neo4j.cypher.internal.compiler.v2_3.commands.expressions.{SizeFunction, ExtractFunction, Identifier, LengthFunction}
import org.neo4j.cypher.internal.compiler.v2_3.pipes.QueryStateHelper
import org.neo4j.cypher.internal.frontend.v2_3.test_helpers.CypherFunSuite
class ExtractTest extends CypherFunSuite {
test("canReturnSomethingFromAnIterable") {
val l = Seq("x", "xxx", "xx")
val expression = SizeFunction(Identifier("n"))
val collection = Identifier("l")
val m = ExecutionContext.from("l" -> l)
val extract = ExtractFunction(collection, "n", expression)
extract.apply(m)(QueryStateHelper.empty) should equal(Seq(1, 3, 2))
}
}
| HuangLS/neo4j | community/cypher/cypher-compiler-2.3/src/test/scala/org/neo4j/cypher/internal/compiler/v2_3/commands/ExtractTest.scala | Scala | apache-2.0 | 1,574 |
/*
* Copyright (c) 2018 OVO Energy
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.ovoenergy.comms.model
import com.ovoenergy.comms.KafkaMessage
import com.ovoenergy.comms.model.email.EmailFeedback
import com.ovoenergy.comms.model.types.HasEventMetadata
import com.ovoenergy.kafka.common.event.EventMetadata
import com.sksamuel.avro4s.{AvroDoc, SchemaFor}
@KafkaMessage
case class Feedback(
@AvroDoc(
"An ID generated by the comms platform to link the requests and Kafka messages of a transaction.")
commId: String,
@AvroDoc("A detailed description of the comm")
commDescription: Option[String] = None,
@AvroDoc("The ID of the customer who is the target of the communication")
customer: Option[Customer],
@AvroDoc("The status of the comm")
status: FeedbackStatus,
@AvroDoc("A detailed description for the status")
description: Option[String],
@AvroDoc("Email specific feedback")
email: Option[EmailFeedback],
@AvroDoc("Channel associated with the given comm")
channel: Option[Channel],
templateManifest: Option[TemplateManifest] = None,
metadata: EventMetadata)
extends LoggableEvent
with HasEventMetadata {
override def loggableString: Option[String] = prettyPrint(this)
override def mdcMap: Map[String, String] =
combineMDCS(
metadata.mdcMap,
customer.map(_.customerId).map("customerId" -> _).toMap,
templateManifest.map(_.mdcMap).fold(Map[String, String]())(identity),
Map(
"commId" -> commId,
"status" -> status.toString
)
)
}
object Feedback {
implicit val schemaFor: SchemaFor[Feedback] = SchemaFor[Feedback]
}
| ovotech/comms-kafka-messages | modules/core/src/main/scala/com/ovoenergy/comms/model/Feedback.scala | Scala | mit | 2,702 |
package mesosphere.marathon
package core.deployment
import akka.Done
import akka.actor.{ ActorRef, Props }
import akka.event.EventStream
import akka.stream.Materializer
import mesosphere.marathon.core.deployment.impl.{ DeploymentActor, DeploymentManagerActor, DeploymentManagerDelegate }
import mesosphere.marathon.core.health.HealthCheckManager
import mesosphere.marathon.core.launchqueue.LaunchQueue
import mesosphere.marathon.core.leadership.LeadershipModule
import mesosphere.marathon.core.readiness.ReadinessCheckExecutor
import mesosphere.marathon.core.task.termination.KillService
import mesosphere.marathon.core.task.tracker.InstanceTracker
import mesosphere.marathon.storage.repository.DeploymentRepository
import scala.concurrent.Promise
/**
* Provides a [[DeploymentManager]] implementation that can be used to start and cancel a deployment and
* to list currently running deployments.
*/
class DeploymentModule(
config: DeploymentConfig,
leadershipModule: LeadershipModule,
taskTracker: InstanceTracker,
killService: KillService,
launchQueue: LaunchQueue,
scheduler: SchedulerActions,
healthCheckManager: HealthCheckManager,
eventBus: EventStream,
readinessCheckExecutor: ReadinessCheckExecutor,
deploymentRepository: DeploymentRepository,
deploymentActorProps: (ActorRef, Promise[Done], KillService, SchedulerActions, DeploymentPlan, InstanceTracker, LaunchQueue, HealthCheckManager, EventStream, ReadinessCheckExecutor) => Props = DeploymentActor.props)(implicit val mat: Materializer) {
private[this] val deploymentManagerActorRef: ActorRef = {
val props = DeploymentManagerActor.props(
taskTracker: InstanceTracker,
killService,
launchQueue,
scheduler,
healthCheckManager,
eventBus,
readinessCheckExecutor,
deploymentRepository,
deploymentActorProps)
leadershipModule.startWhenLeader(props, "deploymentManager")
}
val deploymentManager: DeploymentManager = new DeploymentManagerDelegate(config, deploymentManagerActorRef)
}
| Caerostris/marathon | src/main/scala/mesosphere/marathon/core/deployment/DeploymentModule.scala | Scala | apache-2.0 | 2,064 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding
import org.scalatest.WordSpec
import java.util.Calendar
class DateTest extends WordSpec {
implicit val tz = DateOps.PACIFIC
implicit def dateParser: DateParser = DateParser.default
"A RichDate" should {
"implicitly convert strings" in {
val rd1: RichDate = "2011-10-20"
val rd2: RichDate = "2011-10-20"
assert(rd1 === rd2)
}
"implicitly convert calendars" in {
val rd1: RichDate = "2011-10-20"
val cal = Calendar.getInstance(tz)
cal.setTime(rd1.value)
val rd2: RichDate = cal
assert(rd1 === rd2)
}
"deal with strings with spaces" in {
val rd1: RichDate = " 2011-10-20 "
val rd2: RichDate = "2011-10-20 "
val rd3: RichDate = " 2011-10-20 "
assert(rd1 === rd2)
assert(rd1 === rd3)
}
"handle dates with slashes and underscores" in {
val rd1: RichDate = "2011-10-20"
val rd2: RichDate = "2011/10/20"
val rd3: RichDate = "2011_10_20"
assert(rd1 === rd2)
assert(rd1 === rd3)
}
"be able to parse milliseconds" in {
val rd1: RichDate = "2011-10-20 20:01:11.0"
val rd2: RichDate = "2011-10-20 22:11:24.23"
val rd3: RichDate = "2011-10-20 22:11:24.023 "
assert(rd2 === rd3)
}
"throw an exception when trying to parse illegal strings" in {
// Natty is *really* generous about what it accepts
intercept[IllegalArgumentException] { RichDate("jhbjhvhjv") }
intercept[IllegalArgumentException] { RichDate("99-99-99") }
}
"be able to deal with arithmetic operations with whitespace" in {
val rd1: RichDate = RichDate("2010-10-02") + Seconds(1)
val rd2: RichDate = " 2010-10-02 T 00:00:01 "
assert(rd1 === rd2)
}
"Have same equals & hashCode as Date (crazy?)" in {
val rd1: RichDate = "2011-10-20"
assert(rd1 === rd1.value)
assert(rd1.hashCode === rd1.value.hashCode)
}
"be well ordered" in {
val rd1: RichDate = "2011-10-20"
val rd2: RichDate = "2011-10-21"
assert(rd1 < rd2)
assert(rd1 <= rd2)
assert(rd2 > rd1)
assert(rd2 >= rd1)
assert(rd1 >= rd1)
assert(rd2 >= rd2)
}
"implicitly convert from long" in {
// This kind of implicit is not safe (what does the long mean?)
implicit def longToDate(l: Long): RichDate = RichDate(l)
//This is close to: Mon Oct 24 20:03:13 PDT 2011
val long_val = 1319511818135L
val rd1 = "2011-10-24T20:03:00"
val rd2 = "2011-10-24T20:04:00"
assert(DateRange(rd1, rd2).contains(RichDate(long_val)))
//Check edge cases:
assert(DateRange(rd1, long_val).contains(long_val))
assert(DateRange(rd1, (long_val + 1)).contains(long_val))
assert(DateRange(long_val, rd2).contains(long_val))
assert(DateRange((long_val - 1), rd2).contains(long_val))
assert(!DateRange(rd1, "2011-10-24T20:03:01").contains(long_val))
assert(!DateRange(rd1, (long_val - 1)).contains(long_val))
assert(!DateRange((long_val + 1), rd2).contains(long_val))
}
"roundtrip successfully" in {
val start_str = "2011-10-24 20:03:00"
//string -> date -> string
assert(RichDate(start_str).toString(DateOps.DATETIME_HMS_WITH_DASH) === start_str)
//long -> date == date -> long -> date
val long_val = 1319511818135L
val date = RichDate(long_val)
val long2 = date.value.getTime
val date2 = RichDate(long2)
assert(date === date2)
assert(long_val === long2)
}
"know the most recent time units" in {
//10-25 is a Tuesday, earliest in week is a monday
assert(Weeks(1).floorOf("2011-10-25") === RichDate("2011-10-24"))
assert(Days(1).floorOf("2011-10-25 10:01") === RichDate("2011-10-25 00:00"))
//Leaving off the time should give the same result:
assert(Days(1).floorOf("2011-10-25 10:01") === RichDate("2011-10-25"))
assert(Hours(1).floorOf("2011-10-25 10:01") === RichDate("2011-10-25 10:00"))
}
"correctly do arithmetic" in {
val d1: RichDate = "2011-10-24"
(-4 to 4).foreach { n =>
List(Hours, Minutes, Seconds, Millisecs).foreach { u =>
val d2 = d1 + u(n)
assert((d2 - d1) === u(n))
}
}
}
"correctly calculate upperBound" in {
assert(Seconds(1).floorOf(RichDate.upperBound("2010-10-01")) === Seconds(1).floorOf(RichDate("2010-10-01 23:59:59")))
assert(Seconds(1).floorOf(RichDate.upperBound("2010-10-01 14")) === Seconds(1).floorOf(RichDate("2010-10-01 14:59:59")))
assert(Seconds(1).floorOf(RichDate.upperBound("2010-10-01 14:15")) === Seconds(1).floorOf(RichDate("2010-10-01 14:15:59")))
}
}
"A DateRange" should {
"correctly iterate on each duration" in {
def rangeContainTest(d1: DateRange, dur: Duration) = {
assert(d1.each(dur).forall((d1r: DateRange) => d1.contains(d1r)))
}
rangeContainTest(DateRange("2010-10-01", "2010-10-13"), Weeks(1))
rangeContainTest(DateRange("2010-10-01", "2010-10-13"), Weeks(2))
rangeContainTest(DateRange("2010-10-01", "2010-10-13"), Days(1))
//Prime non one:
rangeContainTest(DateRange("2010-10-01", "2010-10-13"), Days(5))
//Prime number of Minutes
rangeContainTest(DateRange("2010-10-01", "2010-10-13"), Minutes(13))
rangeContainTest(DateRange("2010-10-01", "2010-10-13"), Hours(13))
assert(DateRange("2010-10-01", "2010-10-10").each(Days(1)).size === 10)
assert(DateRange("2010-10-01 00:00", RichDate("2010-10-02") - Millisecs(1)).each(Hours(1)).size === 24)
assert(DateRange("2010-10-01 00:00", RichDate("2010-10-02") + Millisecs(1)).each(Hours(1)).size === 25)
assert(DateRange("2010-10-01", RichDate.upperBound("2010-10-20")).each(Days(1)).size === 20)
assert(DateRange("2010-10-01", RichDate.upperBound("2010-10-01")).each(Hours(1)).size === 24)
assert(DateRange("2010-10-31", RichDate.upperBound("2010-10-31")).each(Hours(1)).size === 24)
assert(DateRange("2010-10-31", RichDate.upperBound("2010-10-31")).each(Days(1)).size === 1)
assert(DateRange("2010-10-31 12:00", RichDate.upperBound("2010-10-31 13")).each(Minutes(1)).size === 120)
}
"have each partition disjoint and adjacent" in {
def eachIsDisjoint(d: DateRange, dur: Duration) {
val dl = d.each(dur)
assert(dl.zip(dl.tail).forall {
case (da, db) =>
da.isBefore(db.start) && db.isAfter(da.end) && ((da.end + Millisecs(1)) == db.start)
})
}
eachIsDisjoint(DateRange("2010-10-01", "2010-10-03"), Days(1))
eachIsDisjoint(DateRange("2010-10-01", "2010-10-03"), Weeks(1))
eachIsDisjoint(DateRange("2010-10-01", "2011-10-03"), Weeks(1))
eachIsDisjoint(DateRange("2010-10-01", "2010-10-03"), Months(1))
eachIsDisjoint(DateRange("2010-10-01", "2011-10-03"), Months(1))
eachIsDisjoint(DateRange("2010-10-01", "2010-10-03"), Hours(1))
eachIsDisjoint(DateRange("2010-10-01", "2010-10-03"), Hours(2))
eachIsDisjoint(DateRange("2010-10-01", "2010-10-03"), Minutes(1))
}
}
"Time units" should {
def isSame(d1: Duration, d2: Duration) = {
(RichDate("2011-12-01") + d1) == (RichDate("2011-12-01") + d2)
}
"have 1000 milliseconds in a sec" in {
assert(isSame(Millisecs(1000), Seconds(1)))
assert(Seconds(1).toMillisecs === 1000L)
assert(Millisecs(1000).toSeconds === 1.0)
assert(Seconds(2).toMillisecs === 2000L)
assert(Millisecs(2000).toSeconds === 2.0)
}
"have 60 seconds in a minute" in {
assert(isSame(Seconds(60), Minutes(1)))
assert(Minutes(1).toSeconds === 60.0)
assert(Minutes(1).toMillisecs === 60 * 1000L)
assert(Minutes(2).toSeconds === 120.0)
assert(Minutes(2).toMillisecs === 120 * 1000L)
}
"have 60 minutes in a hour" in {
assert(isSame(Minutes(60), Hours(1)))
assert(Hours(1).toSeconds === 60.0 * 60.0)
assert(Hours(1).toMillisecs === 60 * 60 * 1000L)
assert(Hours(2).toSeconds === 2 * 60.0 * 60.0)
assert(Hours(2).toMillisecs === 2 * 60 * 60 * 1000L)
}
"have 7 days in a week" in { assert(isSame(Days(7), Weeks(1))) }
}
"AbsoluteDurations" should {
"behave as comparable" in {
assert(Hours(5) >= Hours(2))
assert(Minutes(60) >= Minutes(60))
assert(Hours(1) < Millisecs(3600001))
}
"add properly" in {
assert((Hours(2) + Hours(1)).compare(Hours(3)) === 0)
}
"have a well behaved max function" in {
assert(AbsoluteDuration.max(Hours(1), Hours(2)).compare(Hours(2)) === 0)
}
}
"Globifiers" should {
"handle specific hand crafted examples" in {
val t1 = Globifier("/%1$tY/%1$tm/%1$td/%1$tH")
val t2 = Globifier("/%1$tY/%1$tm/%1$td/")
val testcases =
(t1.globify(DateRange("2011-12-01T14", "2011-12-04")),
List("/2011/12/01/14", "/2011/12/01/15", "/2011/12/01/16", "/2011/12/01/17", "/2011/12/01/18",
"/2011/12/01/19", "/2011/12/01/20", "/2011/12/01/21", "/2011/12/01/22", "/2011/12/01/23",
"/2011/12/02/*", "/2011/12/03/*", "/2011/12/04/00")) ::
(t1.globify(DateRange("2011-12-01", "2011-12-01T23:59")),
List("/2011/12/01/*")) ::
(t1.globify(DateRange("2014-06-30T00", "2014-07-01T00")),
List("/2014/06/30/*", "/2014/07/01/00")) ::
(t1.globify(DateRange("2011-12-01T12", "2011-12-01T12:59")),
List("/2011/12/01/12")) ::
(t1.globify(DateRange("2011-12-01T12", "2011-12-01T14")),
List("/2011/12/01/12", "/2011/12/01/13", "/2011/12/01/14")) ::
(t2.globify(DateRange("2011-12-01T14", "2011-12-04")),
List("/2011/12/01/", "/2011/12/02/", "/2011/12/03/", "/2011/12/04/")) ::
(t2.globify(DateRange("2011-12-01", "2011-12-01T23:59")),
List("/2011/12/01/")) ::
(t2.globify(DateRange("2011-12-01T12", "2011-12-01T12:59")),
List("/2011/12/01/")) ::
(t2.globify(DateRange("2011-12-01T12", "2012-01-02T14")),
List("/2011/12/*/", "/2012/01/01/", "/2012/01/02/")) ::
(t2.globify(DateRange("2011-11-01T12", "2011-12-02T14")),
List("/2011/11/*/", "/2011/12/01/", "/2011/12/02/")) ::
Nil
testcases.foreach { case (l, r) => assert(l === r) }
}
"The forward and reverser should match" in {
val globifierOps = GlobifierOps()
val hourlyTestCases = List(
DateRange("2011-12-01T14", "2011-12-04"),
DateRange("2011-12-01", "2011-12-01T23:59"),
DateRange("2014-06-30T00", "2014-07-01T00"),
DateRange("2011-12-01T12", "2011-12-01T12:59"),
DateRange("2011-12-01T12", "2011-12-01T14"))
hourlyTestCases.foreach { dr =>
val resultantDR = globifierOps.hourlyRtGlobifier(dr)
assert(globifierOps.normalizeHrDr(dr) === globifierOps.normalizeHrDr(resultantDR))
}
val dailyTestCases = List(
DateRange("2011-12-01T14", "2011-12-04"),
DateRange("2011-12-01", "2011-12-01T23:59"),
DateRange("2011-12-01T12", "2011-12-01T12:59"),
DateRange("2011-12-01T12", "2012-01-02T14"),
DateRange("2011-11-01T12", "2011-12-02T14"))
dailyTestCases.foreach { dr =>
val resultantDR = globifierOps.dailyRtGlobifier(dr)
assert(globifierOps.normalizeDayDr(dr) === globifierOps.normalizeDayDr(resultantDR))
}
}
def eachElementDistinct(dates: List[String]) = dates.size == dates.toSet.size
def globMatchesDate(glob: String)(date: String) = {
java.util.regex.Pattern.matches(glob.replaceAll("\\\\*", "[0-9]*"), date)
}
def bruteForce(pattern: String, dr: DateRange, dur: Duration)(implicit tz: java.util.TimeZone) = {
dr.each(dur)
.map { (dr: DateRange) => String.format(pattern, dr.start.toCalendar(tz)) }
}
"handle random test cases" in {
// This kind of implicit is not safe (what does the long mean?)
implicit def longToDate(l: Long): RichDate = RichDate(l)
val pattern = "/%1$tY/%1$tm/%1$td/%1$tH"
val t1 = Globifier(pattern)
val r = new java.util.Random()
(0 until 100) foreach { step =>
val start = RichDate("2011-08-03").value.getTime + r.nextInt(Int.MaxValue)
val dr = DateRange(start, start + r.nextInt(Int.MaxValue))
val splits = bruteForce(pattern, dr, Hours(1))
val globed = t1.globify(dr)
assert(eachElementDistinct(globed))
//See that each path is matched by exactly one glob:
assert(splits.map { path => globed.filter { globMatchesDate(_)(path) }.size }
.forall { _ == 1 })
}
}
}
}
| nvoron23/scalding | scalding-date/src/test/scala/com/twitter/scalding/DateTest.scala | Scala | apache-2.0 | 13,473 |
package com.databricks.spark.sql.perf.mllib.feature
/** Trait defining common state/methods for featurizers taking a single input col */
private[feature] trait UnaryTransformer {
private[feature] val inputCol = "inputCol"
private[feature] val outputCol = "outputCol"
}
| databricks/spark-sql-perf | src/main/scala/com/databricks/spark/sql/perf/mllib/feature/UnaryTransformer.scala | Scala | apache-2.0 | 274 |
/**
* Copyright (c) Lambda Innovation, 2013-2016
* This file is part of LambdaLib modding library.
* https://github.com/LambdaInnovation/LambdaLib
* Licensed under MIT, see project root for more information.
*/
package cn.lambdalib.cgui
import cn.lambdalib.cgui.gui.component.Component
import cn.lambdalib.cgui.gui.Widget
import cn.lambdalib.cgui.gui.event.{IGuiEventHandler, GuiEvent}
import scala.reflect.ClassTag
class RichWidget(val w: Widget) extends AnyVal {
def listens[T <: GuiEvent](handler: (Widget, T) => Any, priority: Int = 0)(implicit evidence: ClassTag[T]): Widget = {
w.listen[T](evidence.runtimeClass.asInstanceOf[Class[T]], new IGuiEventHandler[T] {
override def handleEvent(w: Widget, event: T) = {
handler(w, event)
}
}, priority)
w
}
def listens[T <: GuiEvent](handler: T => Any)(implicit evidence: ClassTag[T]): Widget = {
listens((_, e: T) => handler(e))
w
}
def listens[T <: GuiEvent](handler: () => Any)(implicit evidence: ClassTag[T]): Widget = {
listens((_, _: T) => handler())
w
}
def :+(add: Widget): Unit = w.addWidget(add)
def :+(pair: (String, Widget)): Unit = w.addWidget(pair._1, pair._2)
def :+(c: Component): Unit = w.addComponent(c)
def component[T <: Component](implicit evidence: ClassTag[T]) = {
w.getComponent(evidence.runtimeClass.asInstanceOf[Class[T]])
}
def child(name: String) = w.getWidget(name)
def child(idx: Int) = w.getWidget(idx)
}
class RichComponent(val c: Component) extends AnyVal {
def listens[T <: GuiEvent](handler: (Widget, T) => Any)(implicit tag: ClassTag[T]): Unit = {
c.listen[T](tag.runtimeClass.asInstanceOf[Class[T]], new IGuiEventHandler[T] {
override def handleEvent(w: Widget, e: T) = handler(w, e)
})
}
def listens[T <: GuiEvent](handler: T => Any)(implicit tag: ClassTag[T]): Unit = {
listens((_, e:T) => handler(e))
}
def listens[T <: GuiEvent](handler: () => Any)(implicit tag: ClassTag[T]): Unit = {
listens((_, _:T) => handler())
}
}
/**
* CGUI scala extensions to reduce syntax burden.
*/
object ScalaCGUI {
implicit def toWrapper(w: Widget): RichWidget = new RichWidget(w)
implicit def toComponentWrapper(c: Component): RichComponent = new RichComponent(c)
} | LambdaInnovation/LambdaLib | src/main/scala/cn/lambdalib/cgui/ScalaCGUI.scala | Scala | mit | 2,276 |
/*
* Copyright (c) 2009 Sony Pictures Imageworks Inc.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the
* distribution. Neither the name of Sony Pictures Imageworks nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.imageworks.migration
/**
* The set of migrator operations that can be performed.
*/
sealed abstract class MigratorOperation
/**
* Install all available migrations.
*/
case object InstallAllMigrations
extends MigratorOperation
/**
* Remove all installed migrations. This should effectively return
* the database to a pristine state, except if any migration throws a
* IrreversibleMigrationException.
*/
case object RemoveAllMigrations
extends MigratorOperation
/**
* Remove all migrations with versions greater than the given version
* and install all migrations less then or equal to the given version.
*/
case class MigrateToVersion(version: Long)
extends MigratorOperation
/**
* Rollback 'count' migrations in the database. This is different
* than using MigrateToVersion to migrate to the same version, as
* MigrateToVersion will also install any missing migration with a
* version less then the target version. This rollback operation only
* removes migrations from the database.
*/
case class RollbackMigration(count: Int)
extends MigratorOperation {
if (count < 1) {
val message = "The number of migrations to rollback must be greater " +
"than zero."
throw new IllegalArgumentException(message)
}
}
| imageworks/scala-migrations | src/main/scala/com/imageworks/migration/MigratorOperation.scala | Scala | bsd-3-clause | 2,858 |
/**
* Copyright (C) 2010-2011 LShift Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lshift.diffa.agent.itest.support
import net.lshift.diffa.participants.ParticipantRpcServer
import concurrent.SyncVar
import org.slf4j.LoggerFactory
import net.lshift.diffa.participant.content.ContentParticipantHandler
import net.lshift.diffa.participant.scanning.ScanningParticipantRequestHandler
import net.lshift.diffa.participant.correlation.VersioningParticipantHandler
/**
* Helper objects for creation of HTTP/AMQP RPC chain for remote-controlling participants
*/
trait Participants {
val upstreamScanUrl: String
val upstreamContentUrl: String
val downstreamScanUrl: String
val downstreamContentUrl: String
val downstreamVersionUrl: String
def startUpstreamServer(scanning:ScanningParticipantRequestHandler, content:ContentParticipantHandler)
def startDownstreamServer(scanning:ScanningParticipantRequestHandler, content:ContentParticipantHandler, versioning:VersioningParticipantHandler)
}
class HttpParticipants(usPort: Int, dsPort: Int) extends Participants {
val log = LoggerFactory.getLogger(getClass)
val upstreamUrl = "http://localhost:" + usPort
val upstreamScanUrl = upstreamUrl + "/scan"
val upstreamContentUrl = upstreamUrl + "/content"
val downstreamUrl = "http://localhost:" + dsPort
val downstreamScanUrl = downstreamUrl + "/scan"
val downstreamContentUrl = downstreamUrl + "/content"
val downstreamVersionUrl = downstreamUrl + "/corr-version"
def startUpstreamServer(scanning:ScanningParticipantRequestHandler, content:ContentParticipantHandler) =
forkServer(usPort, scanning, content, null)
def startDownstreamServer(scanning:ScanningParticipantRequestHandler, content:ContentParticipantHandler, versioning:VersioningParticipantHandler) =
forkServer(dsPort, scanning, content, versioning)
private def forkServer(port: Int, scanning:ScanningParticipantRequestHandler, content:ContentParticipantHandler, versioning:VersioningParticipantHandler) {
val server = new ParticipantRpcServer(port, scanning, content, versioning)
val startupSync = new SyncVar[Boolean]
new Thread {
override def run = {
try {
server.start
}
catch {
case x:Exception => {
log.error("Cannot start server on port: " + port)
throw x
}
}
startupSync.set(true)
}
}.start
startupSync.get(5000) match {
case None => throw new Exception("Forked server on " + port + " failed to start")
case _ =>
}
}
}
| aprescott/diffa | agent/src/test/scala/net/lshift/diffa/agent/itest/support/Participants.scala | Scala | apache-2.0 | 3,117 |
package xitrum.routing
import scala.annotation.tailrec
import scala.collection.mutable.{ArrayBuffer, Map => MMap}
import io.netty.handler.codec.http.HttpMethod
import xitrum.{Action, Log}
import xitrum.annotation.Swagger
import xitrum.scope.request.{Params, PathInfo}
import xitrum.util.LocalLruCache
object RouteCollection {
def fromSerializable(acc: DiscoveredAcc, withSwagger: Boolean): RouteCollection = {
val normal = acc.normalRoutes
val sockJsWithoutPrefix = acc.sockJsWithoutPrefixRoutes
val sockJsMap = acc.sockJsMap
val swaggerMap: Map[Class[_ <: Action], Swagger] = if (withSwagger) acc.swaggerMap else Map.empty
// Add prefixes to SockJS routes
sockJsMap.keys.foreach { prefix =>
sockJsWithoutPrefix.firstGETs .foreach { r => normal.firstGETs .append(r.addPrefix(prefix)) }
sockJsWithoutPrefix.firstPOSTs .foreach { r => normal.firstPOSTs .append(r.addPrefix(prefix)) }
sockJsWithoutPrefix.firstPUTs .foreach { r => normal.firstPUTs .append(r.addPrefix(prefix)) }
sockJsWithoutPrefix.firstPATCHs .foreach { r => normal.firstPATCHs .append(r.addPrefix(prefix)) }
sockJsWithoutPrefix.firstDELETEs .foreach { r => normal.firstDELETEs .append(r.addPrefix(prefix)) }
sockJsWithoutPrefix.firstWEBSOCKETs.foreach { r => normal.firstWEBSOCKETs.append(r.addPrefix(prefix)) }
sockJsWithoutPrefix.lastGETs .foreach { r => normal.lastGETs .append(r.addPrefix(prefix)) }
sockJsWithoutPrefix.lastPOSTs .foreach { r => normal.lastPOSTs .append(r.addPrefix(prefix)) }
sockJsWithoutPrefix.lastPUTs .foreach { r => normal.lastPUTs .append(r.addPrefix(prefix)) }
sockJsWithoutPrefix.lastPATCHs .foreach { r => normal.lastPATCHs .append(r.addPrefix(prefix)) }
sockJsWithoutPrefix.lastDELETEs .foreach { r => normal.lastDELETEs .append(r.addPrefix(prefix)) }
sockJsWithoutPrefix.lastWEBSOCKETs.foreach { r => normal.lastWEBSOCKETs.append(r.addPrefix(prefix)) }
sockJsWithoutPrefix.otherGETs .foreach { r => normal.otherGETs .append(r.addPrefix(prefix)) }
sockJsWithoutPrefix.otherPOSTs .foreach { r => normal.otherPOSTs .append(r.addPrefix(prefix)) }
sockJsWithoutPrefix.otherPUTs .foreach { r => normal.otherPUTs .append(r.addPrefix(prefix)) }
sockJsWithoutPrefix.otherPATCHs .foreach { r => normal.otherPATCHs .append(r.addPrefix(prefix)) }
sockJsWithoutPrefix.otherDELETEs .foreach { r => normal.otherDELETEs .append(r.addPrefix(prefix)) }
sockJsWithoutPrefix.otherWEBSOCKETs.foreach { r => normal.otherWEBSOCKETs.append(r.addPrefix(prefix)) }
}
val firstGETs =
if (withSwagger)
normal.firstGETs
else
normal.firstGETs.filterNot { r =>
val className = r.actionClass
className == classOf[SwaggerJson].getName || className == classOf[SwaggerUi].getName
}
val cl = Thread.currentThread.getContextClassLoader
new RouteCollection(
firstGETs .map(_.toRoute), normal.lastGETs .map(_.toRoute), normal.otherGETs .map(_.toRoute),
normal.firstPOSTs .map(_.toRoute), normal.lastPOSTs .map(_.toRoute), normal.otherPOSTs .map(_.toRoute),
normal.firstPUTs .map(_.toRoute), normal.lastPUTs .map(_.toRoute), normal.otherPUTs .map(_.toRoute),
normal.firstPATCHs .map(_.toRoute), normal.lastPATCHs .map(_.toRoute), normal.otherPATCHs .map(_.toRoute),
normal.firstDELETEs .map(_.toRoute), normal.lastDELETEs .map(_.toRoute), normal.otherDELETEs .map(_.toRoute),
normal.firstWEBSOCKETs.map(_.toRoute), normal.lastWEBSOCKETs.map(_.toRoute), normal.otherWEBSOCKETs.map(_.toRoute),
new SockJsRouteMap(MMap(sockJsMap.toSeq: _*)),
swaggerMap,
normal.error404.map(cl.loadClass(_).asInstanceOf[Class[Action]]),
normal.error500.map(cl.loadClass(_).asInstanceOf[Class[Action]])
)
}
}
/**
* Routes are grouped by methods.
* The routes are `ArrayBuffer` so that routes can be modified after collected.
*/
class RouteCollection(
val firstGETs: ArrayBuffer[Route],
val lastGETs: ArrayBuffer[Route],
val otherGETs: ArrayBuffer[Route],
val firstPOSTs: ArrayBuffer[Route],
val lastPOSTs: ArrayBuffer[Route],
val otherPOSTs: ArrayBuffer[Route],
val firstPUTs: ArrayBuffer[Route],
val lastPUTs: ArrayBuffer[Route],
val otherPUTs: ArrayBuffer[Route],
val firstPATCHs: ArrayBuffer[Route],
val lastPATCHs: ArrayBuffer[Route],
val otherPATCHs: ArrayBuffer[Route],
val firstDELETEs: ArrayBuffer[Route],
val lastDELETEs: ArrayBuffer[Route],
val otherDELETEs: ArrayBuffer[Route],
val firstWEBSOCKETs: ArrayBuffer[Route],
val lastWEBSOCKETs: ArrayBuffer[Route],
val otherWEBSOCKETs: ArrayBuffer[Route],
val sockJsRouteMap: SockJsRouteMap,
val swaggerMap: Map[Class[_ <: Action], Swagger],
// 404.html and 500.html are used by default
val error404: Option[Class[Action]],
val error500: Option[Class[Action]]
)
{
/**
* Class name -> ReverseRoute
*
* Use class name (String) instead of Class[_] becasuse we want to reload
* classes in development mode, but classes loaded by different class loaders
* can't be compared.
*/
lazy val reverseMappings: scala.collection.Map[String, ReverseRoute] = {
val mmap = MMap.empty[String, ArrayBuffer[Route]]
allFirsts(None).foreach { r => mmap.getOrElseUpdate(r.klass.getName, ArrayBuffer()).append(r) }
allOthers(None).foreach { r => mmap.getOrElseUpdate(r.klass.getName, ArrayBuffer()).append(r) }
allLasts (None).foreach { r => mmap.getOrElseUpdate(r.klass.getName, ArrayBuffer()).append(r) }
mmap.mapValues { routes => ReverseRoute(routes) }
}
//----------------------------------------------------------------------------
/**
* All routes in one place for ease of use. Elements are `ArrayBuffer` and can
* still be modified.
*/
val all: Seq[ArrayBuffer[Route]] = Seq(
firstGETs, firstPOSTs, firstPUTs, firstPATCHs, firstDELETEs, firstWEBSOCKETs,
otherGETs, otherPOSTs, otherPUTs, otherPATCHs, otherDELETEs, otherWEBSOCKETs,
lastGETs, lastPOSTs, lastPUTs, lastPATCHs, lastDELETEs, lastWEBSOCKETs
)
def allFlatten(): Seq[Route] = all.flatten
/**
* @param xitrumRoutes
* - None: No filter, return all routes
* - Some(true): Only return Xitrum internal routes
* - Some(false): Only return non Xitrum internal routes
*/
def allFirsts(xitrumRoutes: Option[Boolean]): Seq[Route] = {
xitrumRoutes match {
case None =>
val ret = ArrayBuffer.empty[Route]
ret.appendAll(firstGETs)
ret.appendAll(firstPOSTs)
ret.appendAll(firstPUTs)
ret.appendAll(firstDELETEs)
ret.appendAll(firstWEBSOCKETs)
ret
case Some(x) =>
val ret = ArrayBuffer.empty[Route]
ret.appendAll(firstGETs .filter(_.klass.getName.startsWith("xitrum") == x))
ret.appendAll(firstPOSTs .filter(_.klass.getName.startsWith("xitrum") == x))
ret.appendAll(firstPUTs .filter(_.klass.getName.startsWith("xitrum") == x))
ret.appendAll(firstDELETEs .filter(_.klass.getName.startsWith("xitrum") == x))
ret.appendAll(firstWEBSOCKETs.filter(_.klass.getName.startsWith("xitrum") == x))
ret
}
}
/** See allFirsts */
def allLasts(xitrumRoutes: Option[Boolean]): Seq[Route] = {
xitrumRoutes match {
case None =>
val ret = ArrayBuffer.empty[Route]
ret.appendAll(lastGETs)
ret.appendAll(lastPOSTs)
ret.appendAll(lastPUTs)
ret.appendAll(lastDELETEs)
ret.appendAll(lastWEBSOCKETs)
ret
case Some(x) =>
val ret = ArrayBuffer.empty[Route]
ret.appendAll(lastGETs .filter(_.klass.getName.startsWith("xitrum") == x))
ret.appendAll(lastPOSTs .filter(_.klass.getName.startsWith("xitrum") == x))
ret.appendAll(lastPUTs .filter(_.klass.getName.startsWith("xitrum") == x))
ret.appendAll(lastDELETEs .filter(_.klass.getName.startsWith("xitrum") == x))
ret.appendAll(lastWEBSOCKETs.filter(_.klass.getName.startsWith("xitrum") == x))
ret
}
}
/** See allFirsts */
def allOthers(xitrumRoutes: Option[Boolean]): Seq[Route] = {
xitrumRoutes match {
case None =>
val ret = ArrayBuffer.empty[Route]
ret.appendAll(otherGETs)
ret.appendAll(otherPOSTs)
ret.appendAll(otherPUTs)
ret.appendAll(otherPATCHs)
ret.appendAll(otherDELETEs)
ret.appendAll(otherWEBSOCKETs)
ret
case Some(x) =>
val ret = ArrayBuffer.empty[Route]
ret.appendAll(otherGETs .filter(_.klass.getName.startsWith("xitrum") == x))
ret.appendAll(otherPOSTs .filter(_.klass.getName.startsWith("xitrum") == x))
ret.appendAll(otherPUTs .filter(_.klass.getName.startsWith("xitrum") == x))
ret.appendAll(otherPATCHs .filter(_.klass.getName.startsWith("xitrum") == x))
ret.appendAll(otherDELETEs .filter(_.klass.getName.startsWith("xitrum") == x))
ret.appendAll(otherWEBSOCKETs.filter(_.klass.getName.startsWith("xitrum") == x))
ret
}
}
//----------------------------------------------------------------------------
// Run only at startup, speed is not a problem
def logAll() {
logRoutes(false)
sockJsRouteMap.logRoutes(false)
logErrorRoutes()
logRoutes(true)
sockJsRouteMap.logRoutes(true)
}
/** @param xitrumRoutes true: log only Xitrum routes, false: log only app routes */
def logRoutes(xitrumRoutes: Boolean) {
// This method is only run once on start, speed is not a problem
// method pattern target
val firsts = ArrayBuffer.empty[(String, String, String)]
val others = ArrayBuffer.empty[(String, String, String)]
val lasts = ArrayBuffer.empty[(String, String, String)]
val (rFirsts, rOthers,rLasts) = if (xitrumRoutes) {
// Filter out routes created for SockJS to avoid noisy log
// (they are logged separately by sockJsRouteMap.logRoutes)
(
allFirsts(Some(xitrumRoutes)).filter(!_.klass.getName.startsWith("xitrum.sockjs")),
allOthers(Some(xitrumRoutes)).filter(!_.klass.getName.startsWith("xitrum.sockjs")),
allLasts (Some(xitrumRoutes)).filter(!_.klass.getName.startsWith("xitrum.sockjs"))
)
} else {
(
allFirsts(Some(xitrumRoutes)),
allOthers(Some(xitrumRoutes)),
allLasts (Some(xitrumRoutes))
)
}
for (r <- rFirsts) firsts.append((r.httpMethod.name, RouteCompiler.decompile(r.compiledPattern), targetWithCache(r)))
for (r <- rOthers) others.append((r.httpMethod.name, RouteCompiler.decompile(r.compiledPattern), targetWithCache(r)))
for (r <- rLasts) lasts .append((r.httpMethod.name, RouteCompiler.decompile(r.compiledPattern), targetWithCache(r)))
// Sort by pattern
val all = firsts ++ others.sortBy(_._2) ++ lasts
val (methodHttpMaxLength, patternMaxLength) = all.foldLeft((0, 0)) { case ((mmax, pmax), (m, p, _)) =>
val mlen = m.length
val plen = p.length
val mmax2 = if (mmax < mlen) mlen else mmax
val pmax2 = if (pmax < plen) plen else pmax
(mmax2, pmax2)
}
val logFormat = "%-" + methodHttpMaxLength + "s %-" + patternMaxLength + "s %s"
val strings = all.map { case (m, p, cr) => logFormat.format(m, p, cr) }
if (xitrumRoutes)
Log.info("Xitrum routes:\\n" + strings.mkString("\\n"))
else
Log.info("Normal routes:\\n" + strings.mkString("\\n"))
}
def logErrorRoutes() {
val strings = ArrayBuffer.empty[String]
error404.foreach { klass => strings.append("404 " + klass.getName) }
error500.foreach { klass => strings.append("500 " + klass.getName) }
if (strings.nonEmpty) Log.info("Error routes:\\n" + strings.mkString("\\n"))
}
private def targetWithCache(route: Route): String = {
val target = route.klass.getName
val secs = route.cacheSecs
if (secs == 0)
target
else if (secs < 0)
s"$target (action cache: ${formatTime(-secs)})"
else
s"$target (page cache: ${formatTime(secs)})"
}
private def formatTime(seconds: Int): String = {
if (seconds < 60) {
"%d [sec]".format(seconds)
} else {
val minutes = seconds / 60
if (minutes < 60) {
"%d [min]".format(minutes)
} else {
val hours = minutes / 60
if (hours < 24) {
"%d [h]".format(hours)
} else {
val days = hours / 24
"%d [d]".format(days)
}
}
}
}
//----------------------------------------------------------------------------
// Cache recently matched routes to speed up route matching
private val matchedRouteCache = LocalLruCache[String, (Route, Params)](1024)
def route(httpMethod: HttpMethod, pathInfo: PathInfo): Option[(Route, Params)] = {
// This method is run for every request, thus should be fast
val key = httpMethod + pathInfo.encoded
val value = matchedRouteCache.get(key)
if (value != null) return Some(value)
val maybeCached = matchMethod(httpMethod) match {
case None => None
case Some((firsts, lasts, others)) =>
val tokens = pathInfo.tokens
matchAndExtractPathParams(tokens, firsts) match {
case None =>
matchAndExtractPathParams(tokens, others) match {
case None => matchAndExtractPathParams(tokens, lasts)
case some => some
}
case some => some
}
}
maybeCached.foreach { value => matchedRouteCache.put(key, value) }
maybeCached
}
/** @return Option[(firsts, lasts, others)] */
private def matchMethod(httpMethod: HttpMethod): Option[(Seq[Route], Seq[Route], Seq[Route])] = {
val methodName = httpMethod.name
if (methodName == "GET") return Some(firstGETs, lastGETs, otherGETs)
if (methodName == "POST") return Some(firstPOSTs, lastPOSTs, otherPOSTs)
if (methodName == "PUT") return Some(firstPUTs, lastPUTs, otherPUTs)
if (methodName == "PATCH") return Some(firstPATCHs, lastPATCHs, otherPATCHs)
if (methodName == "DELETE") return Some(firstDELETEs, lastDELETEs, otherDELETEs)
if (methodName == "WEBSOCKET") return Some(firstWEBSOCKETs, lastWEBSOCKETs, otherWEBSOCKETs)
None
}
/** @return Option[(Class[Action], cacheSecs, Params)] */
@tailrec
private def matchAndExtractPathParams(tokens: Array[String], routes: Seq[Route]): Option[(Route, Params)] = {
if (routes.isEmpty) return None
val route = routes.head
route.matchRoute(tokens) match {
case Some(params) => Some(route, params)
case None => matchAndExtractPathParams(tokens, routes.tail)
}
}
//----------------------------------------------------------------------------
/** Used at SetCORS & OPTIONSResponse. */
def tryAllMethods(pathInfo: PathInfo): Seq[HttpMethod] = {
var methods = Seq.empty[HttpMethod]
if (route(HttpMethod.GET, pathInfo).nonEmpty)
methods = methods :+ HttpMethod.GET :+ HttpMethod.HEAD
if (route(HttpMethod.POST, pathInfo).nonEmpty)
methods = methods :+ HttpMethod.POST
if (route(HttpMethod.PUT, pathInfo).nonEmpty)
methods = methods :+ HttpMethod.PUT
if (route(HttpMethod.PATCH, pathInfo).nonEmpty)
methods = methods :+ HttpMethod.PATCH
if (route(HttpMethod.DELETE, pathInfo).nonEmpty)
methods = methods :+ HttpMethod.DELETE
methods
}
//----------------------------------------------------------------------------
// Convenient methods for modifying routes.
/** removeByClass[ActionClassToRemove]() */
def removeByClass[A <: Action]()(implicit action: Manifest[A]) {
val className = action.toString
all.foreach { routes =>
val tobeRemoved = routes.filter(_.klass.getName == className)
routes --= tobeRemoved
}
}
/** removeByPrefix("/path/prefix") or removeByPrefix("path/prefix") */
def removeByPrefix(prefix: String) {
val withoutSlashPrefix = if (prefix.startsWith("/")) prefix.substring(1) else prefix
all.foreach { routes =>
val tobeRemoved = routes.filter { r =>
val nonDotRouteTokens = r.compiledPattern.takeWhile { t =>
if (!t.isInstanceOf[NonDotRouteToken]) {
false
} else {
val nd = t.asInstanceOf[NonDotRouteToken]
!nd.isPlaceholder
}
}
if (nonDotRouteTokens.isEmpty) {
false
} else {
val values = nonDotRouteTokens.map(_.asInstanceOf[NonDotRouteToken].value)
values.mkString("/").startsWith(withoutSlashPrefix)
}
}
routes --= tobeRemoved
}
sockJsRouteMap.removeByPrefix(withoutSlashPrefix)
}
}
| caiiiycuk/xitrum | src/main/scala/xitrum/routing/RouteCollection.scala | Scala | mit | 17,025 |
package chana.reactor
import akka.actor.Actor
import akka.actor.ActorRef
import akka.actor.Terminated
import akka.cluster.pubsub.DistributedPubSubMediator.{ Publish, Subscribe, SubscribeAck, Unsubscribe, UnsubscribeAck }
import akka.event.LoggingAdapter
import akka.pattern.ask
import akka.routing.ActorRefRoutee
import akka.routing.BroadcastRoutingLogic
import akka.routing.ConsistentHashingRoutingLogic
import akka.routing.RandomRoutingLogic
import akka.routing.RoundRobinRoutingLogic
import akka.routing.Router
import scala.concurrent.duration._
trait Publisher { _: akka.actor.Actor =>
var queues = Set[ActorRef]() // ActorRef of queue
var groupToQueues: Map[Option[String], Set[ActorRefRoutee]] = Map.empty.withDefaultValue(Set.empty)
def log: LoggingAdapter
val groupRouter = Router(
context.system.settings.config.getString("chana.reactor.publisher.routing-logic") match {
case "random" => RandomRoutingLogic()
case "round-robin" => RoundRobinRoutingLogic()
case "consistent-hashing" => ConsistentHashingRoutingLogic(context.system)
case "broadcast" => BroadcastRoutingLogic()
case other => throw new IllegalArgumentException(s"Unknown 'routing-logic': [$other]")
})
def topic = self.path.name
def publisherBehavior: Receive = {
case x @ Subscribe(topic, group, queue) =>
insertSubscription(group, queue)
sender() ! SubscribeAck(x)
log.info("{} successfully subscribed to topic(me) [{}] under group [{}]", queue, topic, group)
case x @ Unsubscribe(topic, group, queue) =>
removeSubscription(group, queue)
sender() ! UnsubscribeAck(x)
log.info("{} successfully unsubscribed to topic(me) [{}] under group [{}]", queue, topic, group)
case Publish(topic, msg, _) => publish(msg)
case Terminated(ref) => removeSubscription(ref)
}
def publish(x: Any) {
groupToQueues foreach {
case (None, queues) => queues foreach (_.ref ! x)
case (_, queues) => groupRouter.withRoutees(queues.toVector).route(x, self)
}
}
def existsQueue(queue: ActorRef) = {
groupToQueues exists { case (group, queues) => queues.contains(ActorRefRoutee(queue)) }
}
def insertSubscription(group: Option[String], queue: ActorRef) {
if (!queues.contains(queue)) {
context watch queue
queues += queue
}
groupToQueues = groupToQueues.updated(group, groupToQueues(group) + ActorRefRoutee(queue))
}
def removeSubscription(group: Option[String], queue: ActorRef) {
if (!existsQueue(queue)) {
context unwatch queue
queues -= queue
}
groupToQueues = groupToQueues.updated(group, groupToQueues(group) - ActorRefRoutee(queue))
}
def removeSubscription(queue: ActorRef) {
context unwatch queue
queues -= queue
groupToQueues = for {
(group, queues) <- groupToQueues
} yield (group -> (queues - ActorRefRoutee(queue)))
}
}
| hustnn/chana | src/main/scala/chana/reactor/Publisher.scala | Scala | apache-2.0 | 2,967 |
package pelikomponentit
import peli.Pelitilanne
import siirrot.Siirto
/* Lähteet:
* O1-kurssin tehtävän Chess luokka Piece
*/
sealed abstract class Maasto(maastonTunnus: Char, nimi_ : String) {
override def toString = maastonTunnus.toString
def nimi = nimi_
def onMaali = this match {
case Maali(_,_) => true
case _ => false
}
def tyyppi: Maasto = this match {
case maasto => maasto
}
}
sealed case class Tie(tietyyppi: Char, tyypinNimi: String) extends Maasto(tietyyppi, "Tie, "+tyypinNimi)
case object Reuna extends Maasto(Maasto.reuna, "Reuna")
sealed case class Maali(maalityyppi: Char, tyypinNimi: String) extends Maasto(maalityyppi, "Maali ("+tyypinNimi+")")
object Normaali extends Tie(Maasto.tie, "normaali")
object Jaa extends Tie(Maasto.jaa, "jaa")
object Hiekka extends Tie(Maasto.hiekka, "hiekka")
object SyvaHiekka extends Tie(Maasto.syvaHiekka, "syvä hiekka")
object Oljy extends Tie(Maasto.oljy, "öljy")
object MaaliYlos extends Maali(Maasto.maaliYlos, "ylos")
object MaaliAlas extends Maali(Maasto.maaliAlas, "alas")
object MaaliOikea extends Maali(Maasto.maaliOikea, "oikealle")
object MaaliVasen extends Maali(Maasto.maaliVasen, "vasemmalle")
object Maasto {
val tie = ' '
val jaa = 'j'
val hiekka = 'h'
val syvaHiekka = 'H'
val oljy = 'o'
val reuna = '#'
val maaliYlos = '^'
val maaliAlas = 'v'
val maaliOikea = '>'
val maaliVasen = '<'
def hiekanSaannot(pelilauta: Pelilauta, lapimentavatRuudut: Vector[Ruutu], siirto: Siirto, auto: Auto) = {
!pelilauta(siirto.lahtoKoordinaatti).on(Hiekka) || //Jos lähtee liikkeelle hiekalta
(auto.edellinenSiirto.forall{edellinen => edellinen.vaihde >= siirto.vaihde}) //ei voi nopeuttaa
}
def syvanHiekanSaannot(pelilauta: Pelilauta, lapimentavatRuudut: Vector[Ruutu], siirto: Siirto, auto: Auto) = {
!pelilauta(siirto.lahtoKoordinaatti).on(SyvaHiekka) || //Jos lähtee liikkeelle syvällä hiekalla
(auto.edellinenSiirto.forall{edellinen => edellinen.vaihde > siirto.vaihde}) //vauhti hidastuu, kunnes pysähtyy (=häviö)
}
def jaanSaannot(pelilauta: Pelilauta, lapimentavatRuudut: Vector[Ruutu], siirto: Siirto, auto: Auto) = {
!( pelilauta(siirto.lahtoKoordinaatti).on(Jaa) || lapimentavatRuudut.exists(_.on(Jaa)) ) || //Jos kulkee jäällä
(auto.edellinenSiirto.forall{edellinen => //pysyy sama nopeus, samaan suuntaan tai voi ajaa kovempaa minne vaan
edellinen.muutaSuunnaksi == siirto.muutaSuunnaksi || edellinen.vaihde < siirto.vaihde} )
}
def oljynSaannot(pelilauta: Pelilauta, lapimentavatRuudut: Vector[Ruutu], siirto: Siirto, auto: Auto) = {
!(pelilauta(siirto.lahtoKoordinaatti).on(Oljy) || lapimentavatRuudut.exists(_.on(Oljy))) || //Jos kulkee öljyssä
(auto.edellinenSiirto.forall(_.samaSuunta(siirto.vaihde) != siirto.muutaSuunnaksi))
}
val maastoTunnukset = Vector(tie, hiekka, syvaHiekka, jaa, oljy, reuna, maaliYlos, maaliAlas, maaliOikea, maaliVasen)
def apply(maastonTunnus: Char): Maasto = {
maastonTunnus match {
case this.reuna => Reuna
case this.maaliYlos => MaaliYlos
case this.maaliAlas => MaaliAlas
case this.maaliOikea => MaaliOikea
case this.maaliVasen => MaaliVasen
case this.jaa => Jaa
case this.hiekka => Hiekka
case this.syvaHiekka => SyvaHiekka
case this.oljy => Oljy
case _ => Normaali //Tarkoitettujen teiden lisäksi, kaikki tunnistamattomat merkit tulkitaan tieksi.
}
}
} | MrAurela/Formula | Formula/src/pelikomponentit/Maasto.scala | Scala | mit | 3,518 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.reflect.macros
package contexts
import scala.reflect.macros.runtime.AbortMacroException
import scala.tools.nsc.Reporting.WarningCategory
trait FrontEnds {
self: Context =>
def echo(pos: Position, msg: String): Unit = universe.reporter.echo(pos, msg)
@deprecated("Use echo, info messages are always forced", since="2.13.0")
def info(pos: Position, msg: String, force: Boolean): Unit = universe.reporter.echo(pos, msg)
def hasWarnings: Boolean = universe.reporter.hasErrors
def hasErrors: Boolean = universe.reporter.hasErrors
// TODO: add WarningCategory parameter (not binary compatible)
def warning(pos: Position, msg: String): Unit = callsiteTyper.context.warning(pos, msg, WarningCategory.Other)
def error(pos: Position, msg: String): Unit = callsiteTyper.context.error(pos, msg)
def abort(pos: Position, msg: String): Nothing = throw new AbortMacroException(pos, msg)
}
| scala/scala | src/compiler/scala/reflect/macros/contexts/FrontEnds.scala | Scala | apache-2.0 | 1,207 |
package cz.kamenitxan.jakon.core.configuration
import cz.kamenitxan.jakon.core.custom_pages.{CustomPage, CustomPageInitializer, StaticPage}
import cz.kamenitxan.jakon.core.dynamic.{JsonPagelet, JsonPageletInitializer, Pagelet, PageletInitializer}
import cz.kamenitxan.jakon.utils.Utils
import cz.kamenitxan.jakon.webui.controller.objectextension.{ObjectExtension, ObjectExtensionInitializer}
import io.github.classgraph.{ClassGraph, ClassInfoList, ScanResult}
import java.io.File
import java.nio.file.{Files, Path, StandardCopyOption}
import java.util.regex.Pattern
import scala.jdk.CollectionConverters._
class AnnotationScanner {
private val scanResult = {
val cg = new ClassGraph().enableAllInfo()
cg.whitelistPaths("/static")
Settings.getPackage.foreach(p => cg.whitelistPackages(p))
val result = Utils.measured(elapsedTime => "Annotations scanned in " + elapsedTime + " ms") {
cg.scan()
}
result
}
def loadConfiguration(): Unit = {
loadConfiguration(scanResult)
}
def load(): Unit = {
try {
loadControllers(scanResult)
loadCustomPages(scanResult)
loadObjectExtensions(scanResult)
copyResources()
} finally {
scanResult.close()
}
}
/** copy static resources to static folder, so they can be server by nginx */
private def copyResources(): Unit = {
val resourceList = scanResult.getResourcesMatchingPattern(Pattern.compile(".*static.*"))
resourceList.forEach(r => {
val targetFile = new File(Settings.getStaticDir + "/" + r.getPath.replace("static/", ""))
targetFile.getParentFile.mkdirs()
targetFile.exists()
Files.copy(r.open(), targetFile.toPath, StandardCopyOption.REPLACE_EXISTING)
})
}
private def loadControllers(scanResult: ScanResult): Unit = {
val controllers = scanResult.getClassesWithAnnotation(classOf[Pagelet].getCanonicalName).loadScalaClasses()
val jsonControllers = scanResult.getClassesWithAnnotation(classOf[JsonPagelet].getCanonicalName).loadScalaClasses()
PageletInitializer.initControllers(controllers)
JsonPageletInitializer.initControllers(jsonControllers)
}
private def loadCustomPages(scanResult: ScanResult): Unit = {
val customPages = scanResult.getClassesWithAnnotation(classOf[CustomPage].getCanonicalName).loadScalaClasses()
val staticPages = scanResult.getClassesWithAnnotation(classOf[StaticPage].getCanonicalName).loadScalaClasses()
CustomPageInitializer.initCustomPages(customPages)
CustomPageInitializer.initStaticPages(staticPages)
}
private def loadConfiguration(scanResult: ScanResult): Unit = {
val config = scanResult.getClassesWithAnnotation(classOf[Configuration].getCanonicalName).loadScalaClasses()
ConfigurationInitializer.initConfiguration(config)
}
private def loadObjectExtensions(scanResult: ScanResult): Unit = {
val config = scanResult.getClassesWithAnnotation(classOf[ObjectExtension].getCanonicalName).loadScalaClasses()
ObjectExtensionInitializer.initObjectExtensions(config)
}
implicit class ClassInfoListExtensions(val cil: ClassInfoList) {
def loadScalaClasses(): Seq[Class[_]] = {
cil.loadClasses().asScala.toSeq
}
}
}
| kamenitxan/Jakon | modules/backend/src/main/scala/cz/kamenitxan/jakon/core/configuration/AnnotationScanner.scala | Scala | bsd-3-clause | 3,105 |
package vexriscv.demo.smp
import spinal.core
import spinal.core._
import spinal.core.sim.{onSimEnd, simSuccess}
import spinal.lib._
import spinal.lib.bus.bmb.sim.BmbMemoryAgent
import spinal.lib.bus.bmb._
import spinal.lib.bus.misc.{DefaultMapping, SizeMapping}
import spinal.lib.bus.wishbone.{Wishbone, WishboneConfig, WishboneToBmb, WishboneToBmbGenerator}
import spinal.lib.com.jtag.{Jtag, JtagInstructionDebuggerGenerator, JtagTapInstructionCtrl}
import spinal.lib.com.jtag.sim.JtagTcp
import spinal.lib.com.jtag.xilinx.Bscane2BmbMasterGenerator
import spinal.lib.generator._
import spinal.core.fiber._
import spinal.idslplugin.PostInitCallback
import spinal.lib.misc.plic.PlicMapping
import spinal.lib.system.debugger.SystemDebuggerConfig
import vexriscv.ip.{DataCacheAck, DataCacheConfig, DataCacheMemBus, InstructionCache, InstructionCacheConfig}
import vexriscv.plugin._
import vexriscv.{Riscv, VexRiscv, VexRiscvBmbGenerator, VexRiscvConfig, plugin}
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import spinal.lib.generator._
import vexriscv.ip.fpu.FpuParameter
case class VexRiscvSmpClusterParameter(cpuConfigs : Seq[VexRiscvConfig],
jtagHeaderIgnoreWidth : Int,
withExclusiveAndInvalidation : Boolean,
forcePeripheralWidth : Boolean = true,
outOfOrderDecoder : Boolean = true,
fpu : Boolean = false)
class VexRiscvSmpClusterBase(p : VexRiscvSmpClusterParameter) extends Area with PostInitCallback{
val cpuCount = p.cpuConfigs.size
val debugCd = ClockDomainResetGenerator()
debugCd.holdDuration.load(4095)
debugCd.makeExternal()
val systemCd = ClockDomainResetGenerator()
systemCd.holdDuration.load(63)
systemCd.setInput(debugCd)
val ctx = systemCd.outputClockDomain.push()
override def postInitCallback(): VexRiscvSmpClusterBase.this.type = {
ctx.restore()
this
}
implicit val interconnect = BmbInterconnectGenerator()
val debugBridge = debugCd.outputClockDomain on JtagInstructionDebuggerGenerator(p.jtagHeaderIgnoreWidth)
debugBridge.jtagClockDomain.load(ClockDomain.external("jtag", withReset = false))
val debugPort = Handle(debugBridge.logic.jtagBridge.io.ctrl.toIo)
val dBusCoherent = BmbBridgeGenerator()
val dBusNonCoherent = BmbBridgeGenerator()
val smp = p.withExclusiveAndInvalidation generate new Area{
val exclusiveMonitor = BmbExclusiveMonitorGenerator()
interconnect.addConnection(dBusCoherent.bmb, exclusiveMonitor.input)
val invalidationMonitor = BmbInvalidateMonitorGenerator()
interconnect.addConnection(exclusiveMonitor.output, invalidationMonitor.input)
interconnect.addConnection(invalidationMonitor.output, dBusNonCoherent.bmb)
if(p.outOfOrderDecoder) interconnect.masters(invalidationMonitor.output).withOutOfOrderDecoder()
}
val noSmp = !p.withExclusiveAndInvalidation generate new Area{
interconnect.addConnection(dBusCoherent.bmb, dBusNonCoherent.bmb)
}
val cores = for(cpuId <- 0 until cpuCount) yield new Area{
val cpu = VexRiscvBmbGenerator()
cpu.config.load(p.cpuConfigs(cpuId))
interconnect.addConnection(
cpu.dBus -> List(dBusCoherent.bmb)
)
cpu.enableDebugBmb(
debugCd = debugCd.outputClockDomain,
resetCd = systemCd,
mapping = SizeMapping(cpuId*0x1000, 0x1000)
)
interconnect.addConnection(debugBridge.bmb, cpu.debugBmb)
}
}
class VexRiscvSmpClusterWithPeripherals(p : VexRiscvSmpClusterParameter) extends VexRiscvSmpClusterBase(p) {
val peripheralBridge = BmbToWishboneGenerator(DefaultMapping)
val peripheral = Handle(peripheralBridge.logic.io.output.toIo)
if(p.forcePeripheralWidth) interconnect.slaves(peripheralBridge.bmb).forceAccessSourceDataWidth(32)
val plic = BmbPlicGenerator()(interconnect = null)
plic.priorityWidth.load(2)
plic.mapping.load(PlicMapping.sifive)
val plicWishboneBridge = new Generator{
dependencies += plic.ctrl
plic.accessRequirements.load(BmbAccessParameter(
addressWidth = 22,
dataWidth = 32
).addSources(1, BmbSourceParameter(
contextWidth = 0,
lengthWidth = 2,
alignment = BmbParameter.BurstAlignement.LENGTH
)))
val logic = add task new Area{
val bridge = WishboneToBmb(WishboneConfig(20, 32))
bridge.io.output >> plic.ctrl
}
}
val plicWishbone = plicWishboneBridge.produceIo(plicWishboneBridge.logic.bridge.io.input)
val clint = BmbClintGenerator(0)(interconnect = null)
val clintWishboneBridge = new Generator{
dependencies += clint.ctrl
clint.accessRequirements.load(BmbAccessParameter(
addressWidth = 16,
dataWidth = 32
).addSources(1, BmbSourceParameter(
contextWidth = 0,
lengthWidth = 2,
alignment = BmbParameter.BurstAlignement.LENGTH
)))
val logic = add task new Area{
val bridge = WishboneToBmb(WishboneConfig(14, 32))
bridge.io.output >> clint.ctrl
}
}
val clintWishbone = clintWishboneBridge.produceIo(clintWishboneBridge.logic.bridge.io.input)
val interrupts = in Bits(32 bits)
for(i <- 1 to 31) yield plic.addInterrupt(interrupts(i), i)
for ((core, cpuId) <- cores.zipWithIndex) {
core.cpu.setTimerInterrupt(clint.timerInterrupt(cpuId))
core.cpu.setSoftwareInterrupt(clint.softwareInterrupt(cpuId))
plic.priorityWidth.load(2)
plic.mapping.load(PlicMapping.sifive)
plic.addTarget(core.cpu.externalInterrupt)
plic.addTarget(core.cpu.externalSupervisorInterrupt)
List(clint.logic, core.cpu.logic).produce {
for (plugin <- core.cpu.config.plugins) plugin match {
case plugin: CsrPlugin if plugin.utime != null => plugin.utime := clint.logic.io.time
case _ =>
}
}
}
clint.cpuCount.load(cpuCount)
}
object VexRiscvSmpClusterGen {
def vexRiscvConfig(hartId : Int,
ioRange : UInt => Bool = (x => x(31 downto 28) === 0xF),
resetVector : Long = 0x80000000l,
iBusWidth : Int = 128,
dBusWidth : Int = 64,
loadStoreWidth : Int = 32,
coherency : Boolean = true,
atomic : Boolean = true,
iCacheSize : Int = 8192,
dCacheSize : Int = 8192,
iCacheWays : Int = 2,
dCacheWays : Int = 2,
iBusRelax : Boolean = false,
injectorStage : Boolean = false,
earlyBranch : Boolean = false,
earlyShifterInjection : Boolean = true,
dBusCmdMasterPipe : Boolean = false,
withMmu : Boolean = true,
withSupervisor : Boolean = true,
withFloat : Boolean = false,
withDouble : Boolean = false,
externalFpu : Boolean = true,
simHalt : Boolean = false,
decoderIsolationBench : Boolean = false,
decoderStupid : Boolean = false,
regfileRead : RegFileReadKind = plugin.ASYNC,
rvc : Boolean = false,
iTlbSize : Int = 4,
dTlbSize : Int = 4,
prediction : BranchPrediction = vexriscv.plugin.NONE,
withDataCache : Boolean = true,
withInstructionCache : Boolean = true
) = {
assert(iCacheSize/iCacheWays <= 4096, "Instruction cache ways can't be bigger than 4096 bytes")
assert(dCacheSize/dCacheWays <= 4096, "Data cache ways can't be bigger than 4096 bytes")
assert(!(withDouble && !withFloat))
val csrConfig = if(withSupervisor){
CsrPluginConfig.openSbi(mhartid = hartId, misa = Riscv.misaToInt(s"ima${if(withFloat) "f" else ""}${if(withDouble) "d" else ""}s")).copy(utimeAccess = CsrAccess.READ_ONLY)
} else {
CsrPluginConfig(
catchIllegalAccess = true,
mvendorid = null,
marchid = null,
mimpid = null,
mhartid = 0,
misaExtensionsInit = 0,
misaAccess = CsrAccess.NONE,
mtvecAccess = CsrAccess.READ_WRITE,
mtvecInit = null,
mepcAccess = CsrAccess.READ_WRITE,
mscratchGen = false,
mcauseAccess = CsrAccess.READ_ONLY,
mbadaddrAccess = CsrAccess.READ_ONLY,
mcycleAccess = CsrAccess.NONE,
minstretAccess = CsrAccess.NONE,
ecallGen = true,
ebreakGen = true,
wfiGenAsWait = false,
wfiGenAsNop = true,
ucycleAccess = CsrAccess.NONE
)
}
val config = VexRiscvConfig(
plugins = List(
if(withMmu)new MmuPlugin(
ioRange = ioRange
)else new StaticMemoryTranslatorPlugin(
ioRange = ioRange
),
//Uncomment the whole IBusCachedPlugin and comment IBusSimplePlugin if you want cached iBus config
if(withInstructionCache) new IBusCachedPlugin(
resetVector = resetVector,
compressedGen = rvc,
prediction = prediction,
historyRamSizeLog2 = 9,
relaxPredictorAddress = true,
injectorStage = injectorStage,
relaxedPcCalculation = iBusRelax,
config = InstructionCacheConfig(
cacheSize = iCacheSize,
bytePerLine = 64,
wayCount = iCacheWays,
addressWidth = 32,
cpuDataWidth = 32,
memDataWidth = iBusWidth,
catchIllegalAccess = true,
catchAccessFault = true,
asyncTagMemory = false,
twoCycleRam = false,
twoCycleCache = true,
reducedBankWidth = true
),
memoryTranslatorPortConfig = MmuPortConfig(
portTlbSize = iTlbSize,
latency = 1,
earlyRequireMmuLockup = true,
earlyCacheHits = true
)
) else new IBusSimplePlugin(
resetVector = resetVector,
cmdForkOnSecondStage = false,
cmdForkPersistence = false,
prediction = NONE,
catchAccessFault = false,
compressedGen = rvc,
busLatencyMin = 2,
vecRspBuffer = true
),
if(withDataCache) new DBusCachedPlugin(
dBusCmdMasterPipe = dBusCmdMasterPipe || dBusWidth == 32,
dBusCmdSlavePipe = true,
dBusRspSlavePipe = true,
relaxedMemoryTranslationRegister = true,
config = new DataCacheConfig(
cacheSize = dCacheSize,
bytePerLine = 64,
wayCount = dCacheWays,
addressWidth = 32,
cpuDataWidth = loadStoreWidth,
memDataWidth = dBusWidth,
catchAccessError = true,
catchIllegal = true,
catchUnaligned = true,
withLrSc = atomic,
withAmo = atomic,
withExclusive = coherency,
withInvalidate = coherency,
withWriteAggregation = dBusWidth > 32
),
memoryTranslatorPortConfig = MmuPortConfig(
portTlbSize = dTlbSize,
latency = 1,
earlyRequireMmuLockup = true,
earlyCacheHits = true
)
) else new DBusSimplePlugin(
catchAddressMisaligned = false,
catchAccessFault = false,
earlyInjection = false
),
new DecoderSimplePlugin(
catchIllegalInstruction = true,
decoderIsolationBench = decoderIsolationBench,
stupidDecoder = decoderStupid
),
new RegFilePlugin(
regFileReadyKind = regfileRead,
zeroBoot = false,
x0Init = true
),
new IntAluPlugin,
new SrcPlugin(
separatedAddSub = false
),
new FullBarrelShifterPlugin(earlyInjection = earlyShifterInjection),
// new LightShifterPlugin,
new HazardSimplePlugin(
bypassExecute = true,
bypassMemory = true,
bypassWriteBack = true,
bypassWriteBackBuffer = true,
pessimisticUseSrc = false,
pessimisticWriteRegFile = false,
pessimisticAddressMatch = false
),
new MulPlugin,
new MulDivIterativePlugin(
genMul = false,
genDiv = true,
mulUnrollFactor = 32,
divUnrollFactor = 1
),
new CsrPlugin(csrConfig),
new BranchPlugin(
earlyBranch = earlyBranch,
catchAddressMisaligned = true,
fenceiGenAsAJump = false
),
new YamlPlugin(s"cpu$hartId.yaml")
)
)
if(withFloat) config.plugins += new FpuPlugin(
externalFpu = externalFpu,
simHalt = simHalt,
p = FpuParameter(withDouble = withDouble)
)
config
}
// def vexRiscvCluster(cpuCount : Int, resetVector : Long = 0x80000000l) = VexRiscvSmpCluster(
// debugClockDomain = ClockDomain.current.copy(reset = Bool().setName("debugResetIn")),
// p = VexRiscvSmpClusterParameter(
// cpuConfigs = List.tabulate(cpuCount) {
// vexRiscvConfig(_, resetVector = resetVector)
// }
// )
// )
// def main(args: Array[String]): Unit = {
// SpinalVerilog {
// vexRiscvCluster(4)
// }
// }
}
//
//
//
//object VexRiscvSmpClusterTestInfrastructure{
// val REPORT_OFFSET = 0xF8000000
// val REPORT_THREAD_ID = 0x00
// val REPORT_THREAD_COUNT = 0x04
// val REPORT_END = 0x08
// val REPORT_BARRIER_START = 0x0C
// val REPORT_BARRIER_END = 0x10
// val REPORT_CONSISTENCY_VALUES = 0x14
//
// val PUTC = 0x00
// val GETC = 0x04
// val CLINT_ADDR = 0x10000
// val CLINT_IPI_ADDR = CLINT_ADDR+0x0000
// val CLINT_CMP_ADDR = CLINT_ADDR+0x4000
// val CLINT_TIME_ADDR = CLINT_ADDR+0xBFF8
//
// def ram(dut : VexRiscvSmpCluster, withStall : Boolean) = {
// import spinal.core.sim._
// val cpuCount = dut.cpus.size
// val ram = new BmbMemoryAgent(0x100000000l){
// case class Report(hart : Int, code : Int, data : Int){
// override def toString: String = {
// f"CPU:$hart%2d ${code}%3x -> $data%3d"
// }
// }
// val reports = ArrayBuffer.fill(cpuCount)(ArrayBuffer[Report]())
//
//
// val writeTable = mutable.HashMap[Int, Int => Unit]()
// val readTable = mutable.HashMap[Int, () => Int]()
// def onWrite(address : Int)(body : Int => Unit) = writeTable(address) = body
// def onRead(address : Int)(body : => Int) = readTable(address) = () => body
//
// var writeData = 0
// var readData = 0
// var reportWatchdog = 0
// val cpuEnd = Array.fill(cpuCount)(false)
// val barriers = mutable.HashMap[Int, Int]()
// var consistancyCounter = 0
// var consistancyLast = 0
// var consistancyA = 0
// var consistancyB = 0
// var consistancyAB = 0
// var consistancyNone = 0
//
// onSimEnd{
// for((list, hart) <- reports.zipWithIndex){
// println(f"\n\n**** CPU $hart%2d ****")
// for((report, reportId) <- list.zipWithIndex){
// println(f" $reportId%3d : ${report.code}%3x -> ${report.data}%3d")
// }
// }
//
// println(s"consistancy NONE:$consistancyNone A:$consistancyA B:$consistancyB AB:$consistancyAB")
// }
//
// override def setByte(address: Long, value: Byte): Unit = {
// if((address & 0xF0000000l) != 0xF0000000l) return super.setByte(address, value)
// val byteId = address & 3
// val mask = 0xFF << (byteId*8)
// writeData = (writeData & ~mask) | ((value.toInt << (byteId*8)) & mask)
// if(byteId != 3) return
// val offset = (address & ~0xF0000000l)-3
// // println(s"W[0x${offset.toHexString}] = $writeData @${simTime()}")
// offset match {
// case _ if offset >= 0x8000000 && offset < 0x9000000 => {
// val report = Report(
// hart = ((offset & 0xFF0000) >> 16).toInt,
// code = (offset & 0x00FFFF).toInt,
// data = writeData
// )
//// println(report)
// reports(report.hart) += report
// reportWatchdog += 1
// import report._
// code match {
// case REPORT_THREAD_ID => assert(data == hart)
// case REPORT_THREAD_COUNT => assert(data == cpuCount)
// case REPORT_END => assert(data == 0); assert(cpuEnd(hart) == false); cpuEnd(hart) = true; if(!cpuEnd.exists(_ == false)) simSuccess()
// case REPORT_BARRIER_START => {
// val counter = barriers.getOrElse(data, 0)
// assert(counter < cpuCount)
// barriers(data) = counter + 1
// }
// case REPORT_BARRIER_END => {
// val counter = barriers.getOrElse(data, 0)
// assert(counter == cpuCount)
// }
// case REPORT_CONSISTENCY_VALUES => consistancyCounter match {
// case 0 => {
// consistancyCounter = 1
// consistancyLast = data
// }
// case 1 => {
// consistancyCounter = 0
// (data, consistancyLast) match {
// case (666, 0) => consistancyA += 1
// case (0, 666) => consistancyB += 1
// case (666, 666) => consistancyAB += 1
// case (0,0) => consistancyNone += 1; simFailure("Consistancy issue :(")
// }
// }
// }
// }
// }
// case _ => writeTable.get(offset.toInt) match {
// case Some(x) => x(writeData)
// case _ => simFailure(f"\n\nWrite at ${address-3}%8x with $writeData%8x")
// }
// }
// }
//
// override def getByte(address: Long): Byte = {
// if((address & 0xF0000000l) != 0xF0000000l) return super.getByte(address)
// val byteId = address & 3
// val offset = (address & ~0xF0000000l)
// if(byteId == 0) readData = readTable.get(offset.toInt) match {
// case Some(x) => x()
// case _ => simFailure(f"\n\nRead at $address%8x")
// }
// (readData >> (byteId*8)).toByte
// }
//
// val clint = new {
// val cmp = Array.fill(cpuCount)(0l)
// var time = 0l
// periodicaly(100){
// time += 10
// var timerInterrupts = 0l
// for(i <- 0 until cpuCount){
// if(cmp(i) < time) timerInterrupts |= 1l << i
// }
// dut.io.timerInterrupts #= timerInterrupts
// }
//
//// delayed(200*1000000){
//// dut.io.softwareInterrupts #= 0xE
//// enableSimWave()
//// println("force IPI")
//// }
// }
//
// onWrite(PUTC)(data => print(data.toChar))
// onRead(GETC)( if(System.in.available() != 0) System.in.read() else -1)
//
// dut.io.softwareInterrupts #= 0
// dut.io.timerInterrupts #= 0
// dut.io.externalInterrupts #= 0
// dut.io.externalSupervisorInterrupts #= 0
// onRead(CLINT_TIME_ADDR)(clint.time.toInt)
// onRead(CLINT_TIME_ADDR+4)((clint.time >> 32).toInt)
// for(hartId <- 0 until cpuCount){
// onWrite(CLINT_IPI_ADDR + hartId*4) {data =>
// val mask = 1l << hartId
// val value = (dut.io.softwareInterrupts.toLong & ~mask) | (if(data == 1) mask else 0)
// dut.io.softwareInterrupts #= value
// }
//// onRead(CLINT_CMP_ADDR + hartId*8)(clint.cmp(hartId).toInt)
//// onRead(CLINT_CMP_ADDR + hartId*8+4)((clint.cmp(hartId) >> 32).toInt)
// onWrite(CLINT_CMP_ADDR + hartId*8){data => clint.cmp(hartId) = (clint.cmp(hartId) & 0xFFFFFFFF00000000l) | data}
// onWrite(CLINT_CMP_ADDR + hartId*8+4){data => clint.cmp(hartId) = (clint.cmp(hartId) & 0x00000000FFFFFFFFl) | (data.toLong << 32)}
// }
//
//
//
// }
// dut.io.iMems.foreach(ram.addPort(_,0,dut.clockDomain,true, withStall))
// ram.addPort(dut.io.dMem,0,dut.clockDomain,true, withStall)
// ram
// }
// def init(dut : VexRiscvSmpCluster): Unit ={
// import spinal.core.sim._
// dut.clockDomain.forkStimulus(10)
// dut.debugClockDomain.forkStimulus(10)
// dut.io.debugBus.cmd.valid #= false
// }
//}
//
//object VexRiscvSmpClusterTest extends App{
// import spinal.core.sim._
//
// val simConfig = SimConfig
// simConfig.withWave
// simConfig.allOptimisation
// simConfig.addSimulatorFlag("--threads 1")
//
// val cpuCount = 4
// val withStall = true
//
// simConfig.compile(VexRiscvSmpClusterGen.vexRiscvCluster(cpuCount)).doSimUntilVoid(seed = 42){dut =>
// disableSimWave()
// SimTimeout(100000000l*10*cpuCount)
// dut.clockDomain.forkSimSpeedPrinter(1.0)
// VexRiscvSmpClusterTestInfrastructure.init(dut)
// val ram = VexRiscvSmpClusterTestInfrastructure.ram(dut, withStall)
// ram.memory.loadBin(0x80000000l, "src/test/cpp/raw/smp/build/smp.bin")
// periodicaly(20000*10){
// assert(ram.reportWatchdog != 0)
// ram.reportWatchdog = 0
// }
// }
//}
//
//// echo "echo 10000 | dhrystone >> log" > test
//// time sh test &
//// top -b -n 1
//
//// TODO
//// MultiChannelFifo.toStream arbitration
//// BmbDecoderOutOfOrder arbitration
//// DataCache to bmb invalidation that are more than single line
//object VexRiscvSmpClusterOpenSbi extends App{
// import spinal.core.sim._
//
// val simConfig = SimConfig
// simConfig.withWave
// simConfig.allOptimisation
// simConfig.addSimulatorFlag("--threads 1")
//
// val cpuCount = 2
// val withStall = false
//
// def gen = {
// val dut = VexRiscvSmpClusterGen.vexRiscvCluster(cpuCount, resetVector = 0x80000000l)
// dut.cpus.foreach{cpu =>
// cpu.core.children.foreach{
// case cache : InstructionCache => cache.io.cpu.decode.simPublic()
// case _ =>
// }
// }
// dut
// }
//
// simConfig.workspaceName("rawr_4c").compile(gen).doSimUntilVoid(seed = 42){dut =>
//// dut.clockDomain.forkSimSpeedPrinter(1.0)
// VexRiscvSmpClusterTestInfrastructure.init(dut)
// val ram = VexRiscvSmpClusterTestInfrastructure.ram(dut, withStall)
//// ram.memory.loadBin(0x80000000l, "../opensbi/build/platform/spinal/vexriscv/sim/smp/firmware/fw_payload.bin")
//
//// ram.memory.loadBin(0x40F00000l, "/media/data/open/litex_smp/litex_vexriscv_smp/images/fw_jump.bin")
//// ram.memory.loadBin(0x40000000l, "/media/data/open/litex_smp/litex_vexriscv_smp/images/Image")
//// ram.memory.loadBin(0x40EF0000l, "/media/data/open/litex_smp/litex_vexriscv_smp/images/dtb")
//// ram.memory.loadBin(0x41000000l, "/media/data/open/litex_smp/litex_vexriscv_smp/images/rootfs.cpio")
//
// ram.memory.loadBin(0x80000000l, "../opensbi/build/platform/spinal/vexriscv/sim/smp/firmware/fw_jump.bin")
// ram.memory.loadBin(0xC0000000l, "../buildroot/output/images/Image")
// ram.memory.loadBin(0xC1000000l, "../buildroot/output/images/dtb")
// ram.memory.loadBin(0xC2000000l, "../buildroot/output/images/rootfs.cpio")
//
// import spinal.core.sim._
// var iMemReadBytes, dMemReadBytes, dMemWriteBytes, iMemSequencial,iMemRequests, iMemPrefetchHit = 0l
// var reportTimer = 0
// var reportCycle = 0
// val iMemFetchDelta = mutable.HashMap[Long, Long]()
// var iMemFetchDeltaSorted : Seq[(Long, Long)] = null
// var dMemWrites, dMemWritesCached = 0l
// val dMemWriteCacheCtx = List(4,8,16,32,64).map(bytes => new {
// var counter = 0l
// var address = 0l
// val mask = ~((1 << log2Up(bytes))-1)
// })
//
// import java.io._
// val csv = new PrintWriter(new File("bench.csv" ))
// val iMemCtx = Array.tabulate(cpuCount)(i => new {
// var sequencialPrediction = 0l
// val cache = dut.cpus(i).core.children.find(_.isInstanceOf[InstructionCache]).head.asInstanceOf[InstructionCache].io.cpu.decode
// var lastAddress = 0l
// })
// dut.clockDomain.onSamplings{
// dut.io.time #= simTime()/10
//
//
// for(i <- 0 until cpuCount; iMem = dut.io.iMems(i); ctx = iMemCtx(i)){
//// if(iMem.cmd.valid.toBoolean && iMem.cmd.ready.toBoolean){
//// val length = iMem.cmd.length.toInt + 1
//// val address = iMem.cmd.address.toLong
//// iMemReadBytes += length
//// iMemRequests += 1
//// }
// if(ctx.cache.isValid.toBoolean && !ctx.cache.mmuRefilling.toBoolean && !ctx.cache.mmuException.toBoolean){
// val address = ctx.cache.physicalAddress.toLong
// val length = ctx.cache.p.bytePerLine.toLong
// val mask = ~(length-1)
// if(ctx.cache.cacheMiss.toBoolean) {
// iMemReadBytes += length
// if ((address & mask) == (ctx.sequencialPrediction & mask)) {
// iMemSequencial += 1
// }
// }
// if(!ctx.cache.isStuck.toBoolean) {
// ctx.sequencialPrediction = address + length
// }
// }
//
// if(iMem.cmd.valid.toBoolean && iMem.cmd.ready.toBoolean){
// val address = iMem.cmd.address.toLong
// iMemRequests += 1
// if(iMemCtx(i).lastAddress + ctx.cache.p.bytePerLine == address){
// iMemPrefetchHit += 1
// }
// val delta = address-iMemCtx(i).lastAddress
// iMemFetchDelta(delta) = iMemFetchDelta.getOrElse(delta, 0l) + 1l
// if(iMemRequests % 1000 == 999) iMemFetchDeltaSorted = iMemFetchDelta.toSeq.sortBy(_._1)
// iMemCtx(i).lastAddress = address
// }
// }
// if(dut.io.dMem.cmd.valid.toBoolean && dut.io.dMem.cmd.ready.toBoolean){
// if(dut.io.dMem.cmd.opcode.toInt == Bmb.Cmd.Opcode.WRITE){
// dMemWriteBytes += dut.io.dMem.cmd.length.toInt+1
// val address = dut.io.dMem.cmd.address.toLong
// dMemWrites += 1
// for(ctx <- dMemWriteCacheCtx){
// if((address & ctx.mask) == (ctx.address & ctx.mask)){
// ctx.counter += 1
// } else {
// ctx.address = address
// }
// }
// }else {
// dMemReadBytes += dut.io.dMem.cmd.length.toInt+1
// for(ctx <- dMemWriteCacheCtx) ctx.address = -1
// }
// }
// reportTimer = reportTimer + 1
// reportCycle = reportCycle + 1
// if(reportTimer == 400000){
// reportTimer = 0
//// println(f"\n** c=${reportCycle} ir=${iMemReadBytes*1e-6}%5.2f dr=${dMemReadBytes*1e-6}%5.2f dw=${dMemWriteBytes*1e-6}%5.2f **\n")
//
//
// csv.write(s"$reportCycle,$iMemReadBytes,$dMemReadBytes,$dMemWriteBytes,$iMemRequests,$iMemSequencial,$dMemWrites,${dMemWriteCacheCtx.map(_.counter).mkString(",")},$iMemPrefetchHit\n")
// csv.flush()
// reportCycle = 0
// iMemReadBytes = 0
// dMemReadBytes = 0
// dMemWriteBytes = 0
// iMemRequests = 0
// iMemSequencial = 0
// dMemWrites = 0
// iMemPrefetchHit = 0
// for(ctx <- dMemWriteCacheCtx) ctx.counter = 0
// }
// }
//
//
//// fork{
//// disableSimWave()
//// val atMs = 3790
//// val durationMs = 5
//// sleep(atMs*1000000)
//// enableSimWave()
//// println("** enableSimWave **")
//// sleep(durationMs*1000000)
//// println("** disableSimWave **")
//// while(true) {
//// disableSimWave()
//// sleep(100000 * 10)
//// enableSimWave()
//// sleep( 100 * 10)
//// }
////// simSuccess()
//// }
//
// fork{
// while(true) {
// disableSimWave()
// sleep(100000 * 10)
// enableSimWave()
// sleep( 100 * 10)
// }
// }
// }
//}
| SpinalHDL/VexRiscv | src/main/scala/vexriscv/demo/smp/VexRiscvSmpCluster.scala | Scala | mit | 27,995 |
package org.camunda.feel.impl
import java.time.ZonedDateTime
import org.camunda.feel.FeelEngineClock
class TimeTravelClock extends FeelEngineClock {
private val systemClock = FeelEngineClock.SystemClock
private var provider: () => ZonedDateTime = () => systemClock.getCurrentTime
def currentTime(currentTime: ZonedDateTime) {
provider = () => currentTime
}
def reset(): Unit = {
provider = () => systemClock.getCurrentTime
}
override def getCurrentTime: ZonedDateTime = provider()
}
| camunda/feel-scala | src/test/scala/org/camunda/feel/impl/TimeTravelClock.scala | Scala | apache-2.0 | 513 |
// Copyright 2016 Foursquare Labs Inc. All Rights Reserved.
package io.fsq.rogue.types
trait MongoDisallowed
| foursquare/fsqio | src/jvm/io/fsq/rogue/types/MongoDisallowed.scala | Scala | apache-2.0 | 111 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.adaptive
import java.io.{PrintWriter, StringWriter}
import org.scalactic.source.Position
import org.scalatest.Tag
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SQLTestUtils
/**
* Test with this tag will be ignored if the test suite extends `EnableAdaptiveExecutionSuite`.
* Otherwise, it will be executed with adaptive execution disabled.
*/
case class DisableAdaptiveExecution(reason: String) extends Tag("DisableAdaptiveExecution")
/**
* Helper trait that enables AQE for all tests regardless of default config values, except that
* tests tagged with [[DisableAdaptiveExecution]] will be skipped.
*/
trait EnableAdaptiveExecutionSuite extends SQLTestUtils {
protected val forceApply = true
override protected def test(testName: String, testTags: Tag*)(testFun: => Any)
(implicit pos: Position): Unit = {
if (testTags.exists(_.isInstanceOf[DisableAdaptiveExecution])) {
// we ignore the test here but assume that another test suite which extends
// `DisableAdaptiveExecutionSuite` will test it anyway to ensure test coverage
ignore(testName + " (disabled when AQE is on)", testTags: _*)(testFun)
} else {
super.test(testName, testTags: _*) {
withSQLConf(
SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "true",
SQLConf.ADAPTIVE_EXECUTION_FORCE_APPLY.key -> forceApply.toString) {
testFun
}
}
}
}
}
/**
* Helper trait that disables AQE for all tests regardless of default config values.
*/
trait DisableAdaptiveExecutionSuite extends SQLTestUtils {
override protected def test(testName: String, testTags: Tag*)(testFun: => Any)
(implicit pos: Position): Unit = {
super.test(testName, testTags: _*) {
withSQLConf(SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "false") {
testFun
}
}
}
}
object AdaptiveTestUtils {
def assertExceptionMessage(e: Exception, expected: String): Unit = {
val stringWriter = new StringWriter()
e.printStackTrace(new PrintWriter(stringWriter))
val errorMsg = stringWriter.toString
assert(errorMsg.contains(expected))
}
def assertExceptionCause(t: Throwable, causeClass: Class[_]): Unit = {
var c = t.getCause
var foundCause = false
while (c != null && !foundCause) {
if (causeClass.isAssignableFrom(c.getClass)) {
foundCause = true
} else {
c = c.getCause
}
}
assert(foundCause, s"Can not find cause: $causeClass")
}
}
| zuotingbing/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveTestUtils.scala | Scala | apache-2.0 | 3,339 |
/*
* Copyright (C) 2012 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.plugin.environment.ssh
import java.io.File
import org.openmole.core.batch.authentication.CypheredPassword
import org.openmole.core.workspace.AuthenticationProvider
object PrivateKey {
def apply(
privateKey: File,
login: String,
cypheredPassword: String,
target: String) = new PrivateKey(privateKey, login, cypheredPassword, target)
}
class PrivateKey(
val privateKey: File,
val login: String,
val cypheredPassword: String,
val target: String) extends SSHAuthentication with CypheredPassword { a ⇒
override def apply(implicit authenticationProvider: AuthenticationProvider) = new fr.iscpif.gridscale.ssh.SSHPrivateKeyAuthentication {
val privateKey = a.privateKey
val password = a.password
val user = a.login
}
override def toString =
super.toString +
", PrivateKey = " + privateKey +
", Login = " + login
}
| ISCPIF/PSEExperiments | openmole-src/openmole/plugins/org.openmole.plugin.environment.ssh/src/main/scala/org/openmole/plugin/environment/ssh/PrivateKey.scala | Scala | agpl-3.0 | 1,600 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v2
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalInteger, Input}
case class B39(value: Option[Int]) extends CtBoxIdentifier("Number of associated companies in this period") with CtOptionalInteger with Input
| scottcutts/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600/v2/B39.scala | Scala | apache-2.0 | 851 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.tail.internal
import cats.effect.Sync
import cats.syntax.all._
import monix.execution.atomic.Atomic
import monix.execution.internal.Platform
import monix.execution.UncaughtExceptionReporter.{default => Logger}
import monix.tail.Iterant
import monix.tail.Iterant.{Concat, Halt, Last, Next, NextBatch, NextCursor, Scope, Suspend}
import monix.tail.batches.{Batch, BatchCursor}
import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer
import scala.util.control.NonFatal
private[tail] object IterantAttempt {
/**
* Implementation for `Iterant.attempt`.
*/
def apply[F[_], A](fa: Iterant[F, A])(implicit F: Sync[F]): Iterant[F, Either[Throwable, A]] = {
// Suspending execution in order to preserve laziness and
// referential transparency
Suspend(F.delay(new AttemptVisitor[F, A].apply(fa)))
}
private final class AttemptVisitor[F[_], A](implicit F: Sync[F])
extends Iterant.Visitor[F, A, Iterant[F, Either[Throwable, A]]] {
self =>
type Attempt = Either[Throwable, A]
private[this] var wasErrorHandled = false
private[this] val handleError = (e: Throwable) => {
self.wasErrorHandled = true
Left(e): Attempt
}
def visit(ref: Next[F, A]): Iterant[F, Either[Throwable, A]] =
Next(Right(ref.item), continueWith(ref.rest))
def visit(ref: NextBatch[F, A]): Iterant[F, Either[Throwable, A]] = {
val NextBatch(batch, rest) = ref
var signalError = true
try {
val cursor = batch.cursor()
signalError = false
handleCursor(NextCursor(cursor, rest), cursor, rest)
} catch {
case e if NonFatal(e) && signalError =>
Iterant.now(self.handleError(e))
}
}
def visit(ref: NextCursor[F, A]): Iterant[F, Either[Throwable, A]] =
handleCursor(ref, ref.cursor, ref.rest)
def visit(ref: Suspend[F, A]): Iterant[F, Either[Throwable, A]] =
Suspend(continueWith(ref.rest))
def visit(ref: Concat[F, A]): Iterant[F, Either[Throwable, A]] =
Concat(ref.lh.map(this), F.suspend {
if (self.wasErrorHandled)
F.pure(Iterant.empty[F, Attempt])
else
ref.rh.map(this)
})
def visit[S](ref: Scope[F, S, A]): Iterant[F, Attempt] = {
val Scope(acquire, use, release) = ref
Suspend(F.delay {
val errors = Atomic(null: Throwable)
val lh: Iterant[F, Attempt] =
Scope[F, Either[Throwable, S], Attempt](
acquire.attempt,
es =>
F.pure(es).flatMap {
case Left(e) =>
pushError(errors, e)
F.pure(Iterant.empty)
case Right(s) =>
try {
use(s).handleError { e =>
pushError(errors, e)
Iterant.empty
}.map(this)
} catch {
case NonFatal(e) =>
pushError(errors, e)
F.pure(Iterant.empty)
}
},
(es, exit) => {
es match {
case Left(_) => F.unit
case Right(s) =>
try F.handleError(release(s, exit)) { e =>
pushError(errors, e)
} catch {
case NonFatal(e) =>
F.delay(pushError(errors, e))
}
}
}
)
Concat(
F.pure(lh),
F.delay {
val err = errors.getAndSet(null)
if (err != null) {
if (!wasErrorHandled)
Last(handleError(err))
else {
Logger.reportFailure(err)
Iterant.empty
}
} else {
Iterant.empty
}
}
)
})
}
def visit(ref: Last[F, A]): Iterant[F, Either[Throwable, A]] =
Last(Right(ref.item))
def visit(ref: Halt[F, A]): Iterant[F, Either[Throwable, A]] =
ref.e match {
case None => ref.asInstanceOf[Iterant[F, Attempt]]
case Some(error) => Last(handleError(error))
}
def fail(e: Throwable): Iterant[F, Either[Throwable, A]] =
Iterant.raiseError(e)
private[this] val continueMapRef: Either[Throwable, Iterant[F, A]] => Iterant[F, Attempt] = {
case Left(e) =>
Iterant.now(handleError(e))
case Right(iter) =>
self(iter)
}
private def continueWith(rest: F[Iterant[F, A]]): F[Iterant[F, Attempt]] =
rest.attempt.map(continueMapRef)
private def handleCursor(
node: NextCursor[F, A],
cursor: BatchCursor[A],
rest: F[Iterant[F, A]]): Iterant[F, Attempt] = {
try {
val array = extractFromCursor(cursor)
val next =
if (cursor.hasNext()) F.delay(self(node))
else continueWith(rest)
if (array.length != 0)
NextBatch(Batch.fromArray(array), next)
else
Suspend(next)
} catch {
case e if NonFatal(e) => Iterant.pure(handleError(e))
}
}
private def extractFromCursor(ref: BatchCursor[A]): Array[Attempt] = {
var size = ref.recommendedBatchSize
val buffer = ArrayBuffer.empty[Attempt]
while (size > 0 && ref.hasNext()) {
buffer += Right(ref.next())
size -= 1
}
buffer.toArray[Attempt]
}
@tailrec
private def pushError(ref: Atomic[Throwable], e: Throwable): Unit = {
val current = ref.get()
val update = current match {
case null => e
case e0 => Platform.composeErrors(e0, e)
}
if (!ref.compareAndSet(current, update))
pushError(ref, e)
}
}
}
| monifu/monix | monix-tail/shared/src/main/scala/monix/tail/internal/IterantAttempt.scala | Scala | apache-2.0 | 6,437 |
object Asshole {
def main(args: Array[String]) = println("Asshole!")
} | juliocnsouzadev/scala_datascience | fuctional_programming_principles/fpps-week01-assign00/ass.scala | Scala | mit | 72 |
package com.twitter.algebird
import org.scalacheck.Arbitrary
import org.scalacheck.Gen.choose
import org.scalacheck.Properties
import org.scalacheck.Prop.forAll
object CollectionSpecification extends Properties("Collections") {
import BaseProperties._
implicit def arbMin[T:Arbitrary] : Arbitrary[Min[T]] =
Arbitrary { implicitly[Arbitrary[T]].arbitrary.map{ x => Min(x) } }
implicit def arbMax[T:Arbitrary] : Arbitrary[Max[T]] =
Arbitrary { implicitly[Arbitrary[T]].arbitrary.map{ x => Max(x) } }
property("MinSemigroup is a commutative semigroup") = commutativeSemigroupLaws[Min[Int]]
property("MaxSemigroup is a commutative semigroup") = commutativeSemigroupLaws[Max[Int]]
property("Min[Int] is a monoid") = monoidLaws[Min[Int]]
property("Max[String] is a monoid") = monoidLaws[Max[String]]
property("Max[List[Int]] is a monoid") = monoidLaws[Max[List[Int]]]
property("Either is a Semigroup") = semigroupLaws[Either[String,Int]]
property("Either is a Semigroup, with a Right non-monoid semigroup") = semigroupLaws[Either[String,Max[Int]]]
property("Option Monoid laws") = monoidLaws[Option[Int]] && monoidLaws[Option[String]]
property("List plus") = forAll { (a : List[Int], b : List[Int]) =>
val mon = implicitly[Monoid[List[Int]]]
(a ++ b == mon.plus(a,b)) && (mon.zero == List[Int]())
}
property("List Monoid laws") = monoidLaws[List[Int]]
implicit def arbSeq[T:Arbitrary] : Arbitrary[Seq[T]] =
Arbitrary { implicitly[Arbitrary[List[T]]].arbitrary.map { _.toSeq } }
property("Seq plus") = forAll { (a : Seq[Int], b : Seq[Int]) =>
val mon = implicitly[Monoid[Seq[Int]]]
(a ++ b == mon.plus(a,b)) && (mon.zero == Seq[Int]())
}
property("Seq Monoid laws") = monoidLaws[Seq[Int]]
property("Set plus") = forAll { (a : Set[Int], b : Set[Int]) =>
val mon = implicitly[Monoid[Set[Int]]]
(a ++ b == mon.plus(a,b)) && (mon.zero == Set[Int]())
}
property("Set Monoid laws") = monoidLaws[Set[Int]]
implicit def mapArb[K : Arbitrary, V : Arbitrary : Monoid] = Arbitrary {
val mv = implicitly[Monoid[V]]
implicitly[Arbitrary[Map[K,V]]]
.arbitrary
.map { _.filter { kv => mv.isNonZero(kv._2) } }
}
property("Map plus/times keys") = forAll { (a : Map[Int,Int], b : Map[Int,Int]) =>
val rng = implicitly[Ring[Map[Int,Int]]]
(rng.zero == Map[Int,Int]()) &&
// Subsets because zeros are removed from the times/plus values
(rng.times(a,b)).keys.toSet.subsetOf((a.keys.toSet & b.keys.toSet)) &&
(rng.plus(a,b)).keys.toSet.subsetOf((a.keys.toSet | b.keys.toSet)) &&
(rng.plus(a,a).keys == (a.filter { kv => (kv._2 + kv._2) != 0 }).keys)
}
property("Map[Int,Int] Monoid laws") = isAssociative[Map[Int,Int]] && weakZero[Map[Int,Int]]
property("Map[Int,Int] has -") = hasAdditiveInverses[Map[Int,Int]]
property("Map[Int,String] Monoid laws") = isAssociative[Map[Int,String]] && weakZero[Map[Int,String]]
// We haven't implemented ring.one yet for the Map, so skip the one property
property("Map is distributive") = isDistributive[Map[Int,Int]]
implicit def arbIndexedSeq[T:Arbitrary] : Arbitrary[IndexedSeq[T]] =
Arbitrary { implicitly[Arbitrary[List[T]]].arbitrary.map { _.toIndexedSeq } }
property("IndexedSeq (of a Semigroup) is a semigroup") = semigroupLaws[IndexedSeq[Max[Int]]]
// TODO: this test fails sometimes due to the equiv not doing the right thing.
// Fix by defining and Equiv and having all the properties use an implicit Equiv
property("IndexedSeq is a pseudoRing") = pseudoRingLaws[IndexedSeq[Int]]
property("Either is a Monoid") = monoidLaws[Either[String,Int]]
property("MapAlgebra.removeZeros works") = forAll { (m: Map[Int,Int]) =>
!MapAlgebra.removeZeros(m).values.toSet.contains(0)
}
property("Monoid.sum performs w/ or w/o MapAlgebra.removeZeros") =
forAll { (m: Map[Int,Int]) =>
Monoid.sum(m) == Monoid.sum(MapAlgebra.removeZeros(m))
}
property("sumByKey works") = forAll { (keys : List[Int], values: List[Int]) =>
import Operators._
val tupList = keys.zip(values)
tupList.sumByKey.filter { _._2 != 0 } ==
tupList.groupBy { _._1 }
.mapValues { v => v.map { _._2 }.sum }
.filter { _._2 != 0 }
}
property("MapAlgebra.dot works") = forAll { (m1: Map[Int,Int], m2: Map[Int,Int]) =>
// .toList below is to make sure we don't remove duplicate values
MapAlgebra.dot(m1, m2) ==
(m1.keySet ++ m2.keySet).toList.map { k => m1.getOrElse(k,0) * m2.getOrElse(k,0) }.sum
}
property("MapAlgebra.toGraph is correct") = forAll { (l: Set[(Int,Int)]) =>
MapAlgebra.toGraph(l).toIterable.flatMap { case (k,sv) => sv.map { v => (k,v) } }.toSet == l
}
property("MapAlgebra.invert works") = forAll { (m : Map[Int,Int]) =>
val m2 = MapAlgebra.invert(m)
val m3 = Monoid.sum( for((v,ks) <- m2.toIterable; k <- ks.toIterable) yield Map(k -> v))
m3 == m
}
property("MapAlgebra.invertExact works") = forAll { (m : Map[Option[Int],Set[Int]]) =>
MapAlgebra.invertExact(MapAlgebra.invertExact(m)) == m
}
property("MapAlgebra.join works") = forAll { (m1: Map[Int, Int], m2: Map[Int,Int]) =>
val m3 = MapAlgebra.join(m1, m2)
val m1after = m3.mapValues { vw => vw._1 }.filter { _._2.isDefined }.mapValues { _.get }
val m2after = m3.mapValues { vw => vw._2 }.filter { _._2.isDefined }.mapValues { _.get }
(m1after == m1) && (m2after == m2) && (m3.keySet == (m1.keySet | m2.keySet))
}
implicit def arbAV[T:Arbitrary:Monoid] : Arbitrary[AdaptiveVector[T]] =
Arbitrary {
Arbitrary.arbitrary[List[T]]
.map { l =>
AdaptiveVector.fromVector(Vector(l :_*), Monoid.zero[T])
}
}
property("AdaptiveVector[Int] has a semigroup") = semigroupLawsEq[AdaptiveVector[Int]](Equiv[AdaptiveVector[Int]].equiv)
property("AdaptiveVector[Int] has a monoid") = monoidLawsEq[AdaptiveVector[Int]](Equiv[AdaptiveVector[Int]].equiv)
property("AdaptiveVector[Int] has a group") = groupLawsEq[AdaptiveVector[Int]](Equiv[AdaptiveVector[Int]].equiv)
property("AdaptiveVector[String] has a monoid") = monoidLawsEq[AdaptiveVector[String]](Equiv[AdaptiveVector[String]].equiv)
}
| snoble/algebird | algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala | Scala | apache-2.0 | 6,174 |
package io.flatmap.ml.util
import breeze.plot._
import breeze.linalg._
import io.flatmap.ml.som.SelfOrganizingMap._
import scala.util.Try
object Plot {
def som(title: String, cb: CodeBook, path: String)(weightsMappingFn: List[Double] => Double) = Try {
val f = Figure(title)
f.subplot(0) += image(cb.mapValues(x => weightsMappingFn(x.toList)))
f.saveas(path)
}
def errors(es: List[Double]) = Try {
val f = Figure("Errors")
val p = f.subplot(0)
val x = linspace(0.0, es.length.toDouble, es.length)
val errs = DenseVector(es.toArray)
p += plot(x, errs, colorcode = "red", labels = (i) => f"${errs(i)}%1.4f")
p.xlabel = "Epoch"
p.ylabel = "Error"
f.saveas("errors.png")
}
}
| ShokuninSan/som | src/main/scala/io/flatmap/ml/util/Plot.scala | Scala | mit | 729 |
/**
* Copyright 2011-2016 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.charts.stats.buffers
import io.gatling.core.stats.{ Percentiles, PercentilesVsTimePlot }
import com.tdunning.math.stats.{ AVLTreeDigest, TDigest }
private[stats] class PercentilesBuffers(buckets: Array[Int]) {
val digests: Array[Option[TDigest]] = Array.fill(buckets.length)(None)
def update(bucketNumber: Int, value: Int): Unit = {
digests(bucketNumber) match {
case Some(digest) => digest.add(value)
case None =>
val digest = new AVLTreeDigest(100.0)
digest.add(value)
digests(bucketNumber) = Some(digest)
}
}
def percentiles: Iterable[PercentilesVsTimePlot] =
digests.view.zipWithIndex
.map {
case (digestO, bucketNumber) =>
val time = buckets(bucketNumber)
val percentiles = digestO.map { digest =>
Percentiles(
digest.quantile(0).toInt,
digest.quantile(0.25).toInt,
digest.quantile(0.5).toInt,
digest.quantile(0.75).toInt,
digest.quantile(0.80).toInt,
digest.quantile(0.85).toInt,
digest.quantile(0.90).toInt,
digest.quantile(0.95).toInt,
digest.quantile(0.99).toInt,
digest.quantile(1.0).toInt
)
}
PercentilesVsTimePlot(time, percentiles)
}
}
| GabrielPlassard/gatling | gatling-charts/src/main/scala/io/gatling/charts/stats/buffers/PercentilesBuffers.scala | Scala | apache-2.0 | 1,974 |
package chee
import java.time.Instant
import better.files.File
import org.scalatest._
import org.scalatest.matchers._
trait MoreMatcher {
implicit val _fileExistence = new enablers.Existence[File] {
def exists(f: File) = f.exists
}
class BeforeMatcher(right: Instant) extends Matcher[Instant] {
def apply(left: Instant) = MatchResult(
left.isBefore(right),
s"'$left' is not before '$right'",
s"'$left' is before '$right'"
)
}
class AfterMatcher(right: Instant) extends Matcher[Instant] {
def apply(left: Instant) = MatchResult(
left.isAfter(right),
s"'$left' is not after '$right'",
s"'$left' is after '$right'"
)
}
def beAfter(i: Instant) = new AfterMatcher(i)
def beBefore(i: Instant) = new BeforeMatcher(i)
}
object MoreMatcher extends MoreMatcher
| eikek/chee | src/test/scala/chee/MoreMatcher.scala | Scala | gpl-3.0 | 830 |
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.krasserm.ases
import akka.testkit.TestKit
import org.scalatest.{BeforeAndAfterAll, Suite}
trait StopSystemAfterAll extends BeforeAndAfterAll {
this: TestKit with Suite =>
override protected def afterAll(): Unit = {
TestKit.shutdownActorSystem(system)
super.afterAll()
}
}
| krasserm/akka-stream-eventsourcing | src/test/scala/com/github/krasserm/ases/StopSystemAfterAll.scala | Scala | apache-2.0 | 924 |
package com.eevolution.context.dictionary.domain.model
import ai.x.play.json.Jsonx
import com.eevolution.context.dictionary.api.{ActiveEnabled, DomainModel, Identifiable, Traceable}
import org.joda.time.DateTime
case class ReplicationRoleAccess(replicationRoleAccessId: Int,
tenantId: Int,
organizationId: Int,
replicationStrategyId: Option[Int],
roleId: Option[Int],
created : DateTime = DateTime.now,
createdBy : Int ,
isActive : Boolean = true,
isReadOnly: Option[Boolean],
updated : DateTime = DateTime.now,
updatedBy : Int,
uuid: String
) extends DomainModel
with ActiveEnabled
with Identifiable
with Traceable {
override type ActiveEnabled = this.type
override type Identifiable = this.type
override type Traceable = this.type
override def Id: Int = replicationRoleAccessId
override val entityName: String = "AD_ReplicationRoleAccess"
override val identifier: String = "AD_ReplicationRoleAccess_ID"
}
object ReplicationRoleAccess {
implicit lazy val jsonFormat = Jsonx.formatCaseClass[ReplicationRoleAccess]
def create(replicationRoleAccessId: Int,
tenantId: Int,
organizationId: Int,
replicationStrategyId: Int,
roleId: Int,
created : DateTime,
createdBy : Int ,
isActive : Boolean,
isReadOnly: Boolean,
updated : DateTime,
updatedBy : Int,
uuid: String) = ReplicationRoleAccess(replicationRoleAccessId, tenantId, organizationId,
None, None, created, createdBy, isActive, None, updated, updatedBy, uuid)
}
| adempiere/ADReactiveSystem | dictionary-api/src/main/scala/com/eevolution/context/dictionary/domain/model/ReplicationRoleAccess.scala | Scala | gpl-3.0 | 1,987 |
/*
* Copyright 2015 RONDHUIT Co.,LTD.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.nlp4l.lm
object HmmTokenizer {
def apply(model: HmmModel) = new HmmTokenizer(model)
}
class HmmTokenizer(model: HmmModel) extends HmmTracer {
var nodesTableByEnd = scala.collection.mutable.Map.empty[Int, Node]
val debug = false
def tokens(str: String): Seq[Token] = {
nodesTableByEnd.clear()
val node = parseForward(str)
// for debug
//nodesTableByEnd.keys.foreach(k => println("%d = %s".format(k, nodesTableByEnd.getOrElse(k, "XXX"))))
backTrace(str, node.backLink)
}
def parseForward(str: String): Node = {
val BOS = Node(CLASS_BOS, 0, -1, 0, 0)
val EOS = Node(CLASS_EOS, 0, str.length, -1)
addNodeToLattice(0, BOS)
parseForward(str, 0, EOS)
}
def parseForward(str: String, pos: Int, EOS: Node): Node = {
val leftNode = nodesTableByEnd.getOrElse(pos, null)
if(leftNode == null && pos < str.length){
parseForward(str, pos + 1, EOS)
}
else{
if(pos == str.length){
processLeftLink(model, leftNode, EOS)
EOS
}
else{
val wrds = model.fst.leftMostSubstring(str, pos)
wrds.foreach{ wrd =>
val epos = wrd._1
val classes = model.conditionalClassesCost(wrd._2.toInt)
debugPrintWord(str, pos, epos, classes)
classes.foreach{ cls =>
val node = Node(cls._1, cls._2, pos, epos)
addNodeToLattice(epos, node)
processLeftLink(model, leftNode, node)
}
}
parseForward(str, pos + 1, EOS)
}
}
}
private def debugPrintWord(str: String, spos: Int, epos: Int, classes: List[(Int, Int)]): Unit = {
if(debug){
println("%d %d".format(spos, epos))
println("%s %s".format(str.substring(spos, epos), classes.map(e => (model.className(e._1), e._2))))
}
}
def addNodeToLattice(pos: Int, node: Node): Unit = {
val topNode = nodesTableByEnd.getOrElse(pos, null)
if(topNode != null){
nodesTableByEnd -= pos
node.nextSameEnd = topNode
}
nodesTableByEnd += (pos -> node)
}
def createToken(str: String, node: AbstractNode): Token = {
Token(str.substring(node.asInstanceOf[Node].spos, node.asInstanceOf[Node].epos), model.className(node.cls))
}
object Node {
def apply(cls: Int, cost: Int, spos: Int, epos: Int, tcost: Int = Int.MaxValue) = new Node(cls, cost, spos, epos, tcost)
}
class Node(cls: Int, cost: Int, val spos: Int, val epos: Int, tcost: Int = Int.MaxValue) extends AbstractNode(cls, cost, tcost){
override def toString(): String = {
"cls=%d, cost=%d, spos=%d, epos=%d, tcost=%d".format(cls, cost, spos, epos, tcost)
}
}
}
| gazimahmud/nlp4l | src/main/scala/org/nlp4l/lm/HmmTokenizer.scala | Scala | apache-2.0 | 3,252 |
package com.twitter.finagle.client
import com.twitter.finagle.Filter
import com.twitter.finagle.Service
import com.twitter.finagle.SimpleFilter
import com.twitter.util.tunable.Tunable
import com.twitter.util.Duration
import com.twitter.util.Future
import scala.collection.mutable
/**
* @see [[BaseMethodBuilder]]
*/
private[finagle] class MethodBuilderTimeout[Req, Rep] private[client] (
mb: MethodBuilder[Req, Rep]) {
/**
* @see [[BaseMethodBuilder.withTimeoutTotal(Duration)]]
*/
def total(howLong: Duration): MethodBuilder[Req, Rep] = {
val timeouts = mb.config.timeout.copy(total = mb.config.timeout.total.copy(duration = howLong))
mb.withConfig(mb.config.copy(timeout = timeouts))
}
/**
* @see [[BaseMethodBuilder.withTimeoutTotal(Tunable[Duration])]]
*/
def total(howLong: Tunable[Duration]): MethodBuilder[Req, Rep] = {
val timeouts = mb.config.timeout.copy(total = mb.config.timeout.total.copy(tunable = howLong))
mb.withConfig(mb.config.copy(timeout = timeouts))
}
/**
* @see [[BaseMethodBuilder.withTimeoutPerRequest(Duration)]]
*/
def perRequest(howLong: Duration): MethodBuilder[Req, Rep] = {
val timeouts =
mb.config.timeout.copy(perRequest = mb.config.timeout.perRequest.copy(duration = howLong))
mb.withConfig(mb.config.copy(timeout = timeouts))
}
/**
* @see [[BaseMethodBuilder.withTimeoutPerRequest(Tunable[Duration])]]
*/
def perRequest(howLong: Tunable[Duration]): MethodBuilder[Req, Rep] = {
val timeouts =
mb.config.timeout.copy(perRequest = mb.config.timeout.perRequest.copy(tunable = howLong))
mb.withConfig(mb.config.copy(timeout = timeouts))
}
private[client] def totalFilter: Filter.TypeAgnostic = {
val config = mb.config.timeout
if (!config.total.isFinite &&
!config.total.isTunable &&
!config.stackTotalTimeoutDefined) {
Filter.TypeAgnostic.Identity
} else {
new Filter.TypeAgnostic {
def toFilter[Req1, Rep1]: Filter[Req1, Rep1, Req1, Rep1] = {
val dyn = new SimpleFilter[Req1, Rep1] {
def apply(req: Req1, service: Service[Req1, Rep1]): Future[Rep1] = {
DynamicTimeout.letTotalTimeout(config.total.toDuration) {
service(req)
}
}
}
dyn.andThen(DynamicTimeout.totalFilter(mb.params).toFilter)
}
}
}
}
/**
* A filter that sets the proper state for per-request timeouts to do the
* right thing down in the Finagle client stack.
*/
private[client] def perRequestFilter: Filter.TypeAgnostic = {
val config = mb.config.timeout
new Filter.TypeAgnostic {
def toFilter[Req1, Rep1]: Filter[Req1, Rep1, Req1, Rep1] = {
new SimpleFilter[Req1, Rep1] {
def apply(req: Req1, service: Service[Req1, Rep1]): Future[Rep1] = {
DynamicTimeout.letPerRequestTimeout(config.perRequest.toDuration) {
service(req)
}
}
}
}
}
}
private[client] def registryEntries: Iterable[(Seq[String], String)] = {
val entries = new mutable.ListBuffer[(Seq[String], String)]()
val perReq = mb.config.timeout.perRequest
if (perReq.isFinite)
entries += ((Seq("timeout", "per_request"), perReq.toString))
val total = mb.config.timeout.total
if (total.isFinite)
entries += ((Seq("timeout", "total"), total.toString))
entries
}
}
private[client] object MethodBuilderTimeout {
/**
* @param stackTotalTimeoutDefined indicates the stack originally had a total
* timeout module. if `total` does not get
* overridden by the module, it must still be added
* back.
* @param total this includes retries, connection setup, etc
*
* @param perRequest how long a '''single''' request is given to complete.
*/
case class Config(
stackTotalTimeoutDefined: Boolean,
total: TunableDuration = TunableDuration("total"),
perRequest: TunableDuration = TunableDuration("perRequest"))
case class TunableDuration(
id: String,
duration: Duration = Duration.Undefined,
tunable: Tunable[Duration] = Tunable.none) {
def isFinite: Boolean = duration.isFinite
def isTunable: Boolean = tunable != Tunable.none
def toDuration: Duration = tunable() match {
case Some(d) => d
case None => duration
}
}
}
| twitter/finagle | finagle-core/src/main/scala/com/twitter/finagle/client/MethodBuilderTimeout.scala | Scala | apache-2.0 | 4,452 |
package streamz.akka
import akka.actor._
import akka.stream.actor._
import akka.stream.actor.ActorSubscriberMessage._
import akka.stream.scaladsl2._
import org.reactivestreams.Publisher
import scalaz.concurrent.Task
import scalaz.stream.Process.Halt
import scalaz.stream.Sink
import scalaz.stream._
package object stream { outer =>
type RequestStrategyFactory = InFlight => RequestStrategy
trait InFlight {
def inFlight: Int
}
def maxInFlightStrategyFactory(max: Int): RequestStrategyFactory = inFlight => new MaxInFlightRequestStrategy(max) {
override def inFlightInternally = inFlight.inFlight
}
/**
* Creates a process that subscribes to the specified `flow`.
*/
def subscribe[I, O](flow: FlowWithSource[I, O],
strategyFactory: RequestStrategyFactory = maxInFlightStrategyFactory(10),
name: Option[String] = None)
(implicit actorRefFactory: ActorRefFactory, flowMaterializer: FlowMaterializer): Process[Task, O] =
io.resource
{ Task.delay[ActorRef] {
val adapterActor = name.fold
{ actorRefFactory.actorOf(AdapterSubscriber.props(strategyFactory)) }
{ actorRefFactory.actorOf(AdapterSubscriber.props(strategyFactory), _) }
flow.withSink(SubscriberSink(ActorSubscriber(adapterActor))).run()
adapterActor
}}
{ adapterActor => Task.delay(adapterActor ! PoisonPill) }
{ adapterActor => Task.async(callback => adapterActor ! AdapterSubscriber.Request[O](callback)) }
/**
* Creates a process that publishes to the managed flow which is passed as argument to `f`.
*/
def publish[I, O](process: Process[Task, I],
strategyFactory: RequestStrategyFactory = maxInFlightStrategyFactory(10),
name: Option[String] = None)
(f: FlowWithSource[I, I] => RunnableFlow[I, O])
(m: MaterializedFlow => Unit = _ => ())
(implicit actorRefFactory: ActorRefFactory, materializer: FlowMaterializer): Process[Task, Unit] =
process.to(adapterSink(AdapterPublisher.props[I](strategyFactory), name, f, m))
/**
* Creates a process and a publisher from which un-managed downstream flows can be constructed.
*/
def publisher[O](process: Process[Task, O],
strategyFactory: RequestStrategyFactory = maxInFlightStrategyFactory(10),
name: Option[String] = None)
(implicit actorRefFactory: ActorRefFactory): (Process[Task, Unit], Publisher[O]) = {
val adapterProps = AdapterPublisher.props[O](strategyFactory)
val adapter = name.fold(actorRefFactory.actorOf(adapterProps))(actorRefFactory.actorOf(adapterProps, _))
(process.onHalt {
case cause@Cause.End =>
adapter ! OnComplete
Halt(cause)
case cause@Cause.Kill =>
adapter ! OnError(new Exception("processed killed")) // Test missing
Halt(cause)
case cause@Cause.Error(ex) =>
adapter ! OnError(ex)
Halt(cause)
}.to(adapterSink[O](adapter)), ActorPublisher[O](adapter))
}
implicit class ProcessSyntax[I](self: Process[Task,I]) {
def publish[O](strategyFactory: RequestStrategyFactory = maxInFlightStrategyFactory(10), name: Option[String] = None)
(f: FlowWithSource[I, I] => RunnableFlow[I,O])
(m: MaterializedFlow => Unit = _ => ())
(implicit actorRefFactory: ActorRefFactory, materializer: FlowMaterializer): Process[Task, Unit] =
outer.publish(self, strategyFactory)(f)(m)
def publisher(strategyFactory: RequestStrategyFactory = maxInFlightStrategyFactory(10), name: Option[String] = None)
(implicit actorRefFactory: ActorRefFactory): (Process[Task, Unit], Publisher[I]) =
outer.publisher(self, strategyFactory, name)(actorRefFactory)
}
implicit class FlowSyntax[I, O](self: FlowWithSource[I, O]) {
def toProcess(strategyFactory: RequestStrategyFactory = maxInFlightStrategyFactory(10),
name: Option[String] = None)
(implicit actorRefFactory: ActorRefFactory, flowMaterializer: FlowMaterializer): Process[Task, O] =
outer.subscribe(self, strategyFactory, name)
}
private def adapterSink[I, O](adapterProps: Props,
name: Option[String] = None,
f: FlowWithSource[I, I] => RunnableFlow[I, O],
m: MaterializedFlow => Unit)
(implicit actorRefFactory: ActorRefFactory, materializer: FlowMaterializer): Sink[Task, I] =
adapterSink {
val adapter = name.fold(actorRefFactory.actorOf(adapterProps))(actorRefFactory.actorOf(adapterProps, _))
val publisher = ActorPublisher[I](adapter)
val materializedFlow = f(FlowFrom(publisher)).run()
m(materializedFlow)
adapter
}
private def adapterSink[I](adapter: => ActorRef): Sink[Task,I] = {
io.resource[ActorRef, I => Task[Unit]]
{ Task.delay[ActorRef](adapter) }
{ adapterActor => Task.delay(()) }
{ adapterActor => Task.delay(i => Task.async[Unit](callback => adapterActor ! OnNext(i, callback))) }
}
}
| Astrac/streamz | streamz-akka-stream/src/main/scala/streamz/akka/stream/package.scala | Scala | apache-2.0 | 5,209 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.webmvc.freemarker
import freemarker.cache.{MultiTemplateLoader, TemplateLoader}
import freemarker.template.ObjectWrapper
import org.beangle.commons.lang.Strings.{split, substringAfter}
import org.beangle.web.servlet.context.ServletContextHolder
import org.beangle.template.freemarker.Configurer
/**
* @author chaostone
*/
class ContextFreemarkerConfigurer extends Configurer {
override def createObjectWrapper(props: Map[String, String]): ObjectWrapper = {
val wrapper = new ContextObjectWrapper()
wrapper.setUseCache(false)
wrapper
}
protected override def detectTemplatePath(props: Map[String, String]): String = {
if (null == templatePath) {
templatePath = ServletContextHolder.context.getInitParameter("templatePath")
}
if (null == templatePath) {
templatePath = props.getOrElse("template_path", "class://")
}
if (null == templatePath) templatePath = "class://"
templatePath
}
override def createTemplateLoader(props: Map[String, String]): TemplateLoader = {
val paths = split(detectTemplatePath(props), ",")
val loaders = new collection.mutable.ListBuffer[TemplateLoader]
for (path <- paths) {
if (path.startsWith("webapp://")) {
loaders += new WebappTemplateLoader(ServletContextHolder.context, substringAfter(path, "webapp://"))
} else {
loaders += super.buildLoader(path)
}
}
new MultiTemplateLoader(loaders.toArray[TemplateLoader])
}
}
| beangle/webmvc | freemarker/src/main/scala/org/beangle/webmvc/freemarker/ContextFreemarkerConfigurer.scala | Scala | lgpl-3.0 | 2,207 |
package im.actor.server.session
import akka.actor.{ Stash, ActorRef, ActorLogging, Props }
import akka.stream.actor._
import im.actor.api.rpc.UpdateBox
import im.actor.server.sequence._
import scala.annotation.tailrec
import scala.collection.immutable
private[session] object UpdatesHandler {
final case class Authorize(userId: Int)
def props(authId: Long): Props =
Props(classOf[UpdatesHandler], authId)
}
private[session] class UpdatesHandler(authId: Long)
extends ActorSubscriber with ActorPublisher[(UpdateBox, Option[String])] with ActorLogging with Stash {
import ActorPublisherMessage._
import ActorSubscriberMessage._
def receive = {
case UpdatesHandler.Authorize(userId) ⇒
val updatesConsumer = context.actorOf(UpdatesConsumer.props(userId, authId, self), "updates-consumer")
context become authorized(updatesConsumer)
case msg ⇒ stash()
}
def authorized(consumer: ActorRef): Receive = subscriber(consumer) orElse publisher orElse {
case unmatched ⇒ log.error("Unmatched msg: {}", unmatched)
}
// Subscriber-related
def subscriber(consumer: ActorRef): Receive = {
case OnNext(cmd: SubscribeCommand) ⇒
cmd match {
case SubscribeToOnline(userIds) ⇒
consumer ! UpdatesConsumerMessage.SubscribeToUserPresences(userIds.toSet)
case SubscribeFromOnline(userIds) ⇒
consumer ! UpdatesConsumerMessage.UnsubscribeFromUserPresences(userIds.toSet)
case SubscribeToGroupOnline(groupIds) ⇒
consumer ! UpdatesConsumerMessage.SubscribeToGroupPresences(groupIds.toSet)
case SubscribeFromGroupOnline(groupIds) ⇒
consumer ! UpdatesConsumerMessage.UnsubscribeFromGroupPresences(groupIds.toSet)
case SubscribeToSeq(_) ⇒
consumer ! UpdatesConsumerMessage.SubscribeToSeq
case SubscribeToWeak(Some(group)) ⇒
consumer ! UpdatesConsumerMessage.SubscribeToWeak(Some(group))
case SubscribeToWeak(None) ⇒
log.error("Subscribe to weak is done implicitly on UpdatesConsumer start")
}
case OnComplete ⇒
context.stop(self)
case OnError(cause) ⇒
log.error(cause, "Error in upstream")
}
override val requestStrategy = WatermarkRequestStrategy(10) // TODO: configurable
// Publisher-related
private[this] var messageQueue = immutable.Queue.empty[(UpdateBox, Option[String])]
def publisher: Receive = {
case NewUpdate(ub, reduceKey) ⇒ enqueueProtoMessage(ub, reduceKey)
case Request(_) ⇒ deliverBuf()
case Cancel ⇒ context.stop(self)
}
private def enqueueProtoMessage(message: UpdateBox, reduceKey: Option[String]): Unit = {
val el = message → reduceKey
if (messageQueue.isEmpty && totalDemand > 0) {
onNext(el)
} else {
messageQueue = messageQueue.enqueue(el)
deliverBuf()
}
}
@tailrec final def deliverBuf(): Unit = {
if (isActive && totalDemand > 0)
messageQueue.dequeueOption match {
case Some((el, q)) ⇒
messageQueue = q
onNext(el)
deliverBuf()
case None ⇒
}
}
}
| EaglesoftZJ/actor-platform | actor-server/actor-session/src/main/scala/im/actor/server/session/UpdatesHandler.scala | Scala | agpl-3.0 | 3,166 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming.continuous
import java.util.concurrent.TimeUnit
import scala.concurrent.duration.Duration
import org.apache.spark.annotation.Evolving
import org.apache.spark.sql.streaming.Trigger
import org.apache.spark.unsafe.types.CalendarInterval
/**
* A [[Trigger]] that continuously processes streaming data, asynchronously checkpointing at
* the specified interval.
*/
@Evolving
case class ContinuousTrigger(intervalMs: Long) extends Trigger {
require(intervalMs >= 0, "the interval of trigger should not be negative")
}
private[sql] object ContinuousTrigger {
def apply(interval: String): ContinuousTrigger = {
val cal = CalendarInterval.fromCaseInsensitiveString(interval)
if (cal.months > 0) {
throw new IllegalArgumentException(s"Doesn't support month or year interval: $interval")
}
new ContinuousTrigger(TimeUnit.MICROSECONDS.toMillis(cal.microseconds))
}
def apply(interval: Duration): ContinuousTrigger = {
ContinuousTrigger(interval.toMillis)
}
def create(interval: String): ContinuousTrigger = {
apply(interval)
}
def create(interval: Long, unit: TimeUnit): ContinuousTrigger = {
ContinuousTrigger(unit.toMillis(interval))
}
}
| aosagie/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/continuous/ContinuousTrigger.scala | Scala | apache-2.0 | 2,041 |
package net.resonious.sburb.abstracts
import net.minecraft.client.renderer.tileentity.TileEntitySpecialRenderer
import net.minecraft.tileentity.TileEntity
import org.lwjgl.opengl.GL11
import net.minecraft.client.model.ModelBase
import net.minecraft.client.renderer.Tessellator
import net.minecraft.world.World
import net.minecraft.client.renderer.OpenGlHelper
import net.minecraft.block.Block
import net.minecraft.util.ResourceLocation
import net.minecraft.client.Minecraft
abstract class ActiveTileEntityRenderer(
val tileEntityType: Class[_ <: ActiveTileEntity],
val block: Block)
extends TileEntitySpecialRenderer {
def model: ModelBase
def texture: ResourceLocation
// Override this to have the offset or whatever (before it is rotated..hopefully not confusing)
def makeAdjustments() = {
GL11.glTranslatef(0.5F, 1.5F, 0.5F)
}
override def renderTileEntityAt(tileEntity:TileEntity, x:Double,y:Double,z:Double, whatisthis:Float) = {
GL11.glPushMatrix()
def f(d:Double) = d.asInstanceOf[Float]
GL11.glTranslatef(f(x), f(y), f(z))
val t = tileEntity.asInstanceOf[ActiveTileEntity]
renderBlock(t, t.getWorldObj, t.xCoord,t.yCoord,t.zCoord, block)
GL11.glPopMatrix()
}
def renderBlock(tl: ActiveTileEntity,
world: World,
x: Int,
y: Int,
z: Int,
block: Block) {
val tessellator = Tessellator.instance
val f = world.getBlockLightValue(x, y, z)
val l = world.getLightBrightnessForSkyBlocks(x, y, z, 0)
val l1 = l % 65536
val l2 = l / 65536
tessellator.setColorOpaque_F(f, f, f)
OpenGlHelper.setLightmapTextureCoords(OpenGlHelper.lightmapTexUnit, l1.toFloat, l2.toFloat)
val dir = world.getBlockMetadata(x, y, z)
GL11.glPushMatrix()
makeAdjustments()
GL11.glRotatef(180F, 0F, 0F, 1F)
Minecraft.getMinecraft.getTextureManager.bindTexture(texture)
model.render(null, 0.0F, 0.0F, -0.1F, 0.0F, 0.0F, 0.0625F)
GL11.glPopMatrix()
}
} | Resonious/mcsburb | src/main/scala/net/resonious/sburb/abstracts/ActiveTileEntityRenderer.scala | Scala | mit | 1,990 |
/*
* Copyright (C) 2010 Peter Lewerin <peter.lewerin@tele2.se>
*
* The contents of this file are subject to the GNU General Public License
* Version 3 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.gnu.org/copyleft/gpl.html
*
* Software distributed under the License is distributed on an "AS
* IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
* implied. See the License for the specific language governing
* rights and limitations under the License.
*
*/
package net.kogics.kojo
package staging
import edu.umd.cs.piccolo._
import edu.umd.cs.piccolo.nodes._
import edu.umd.cs.piccolo.util._
import edu.umd.cs.piccolo.event._
//import net.kogics.kojo.util.Utils
import javax.swing._
import core._
object Screen {
val rect = Bounds(0, 0, 0, 0)
def size(width: Int, height: Int) = {
// TODO 560 is a value that works on my system, should be less ad-hoc
val factor = 560
val xfactor = factor / (if (width < 0) -(height.abs) else height.abs) // sic!
val yfactor = factor / height
Impl.canvas.zoomXY(xfactor, yfactor, width / 2, height / 2)
rect.setRect(0, 0, width.abs, height.abs)
(rect.getWidth.toInt, rect.getHeight.toInt)
}
}
| dotta/kojo | KojoEnv/src/net/kogics/kojo/staging/screen.scala | Scala | gpl-3.0 | 1,278 |
package uk.gov.gds.location.importer.model
import org.specs2.mutable.Specification
class CountriesTest extends Specification {
"Countries object" should {
"resolve gsscodes starting with E as England" in {
Countries.countryForGssCode("E123456789") must beEqualTo("England")
}
"resolve gsscodes starting with W as Wales" in {
Countries.countryForGssCode("W123456789") must beEqualTo("Wales")
}
"resolve gsscodes starting with S as Scotland" in {
Countries.countryForGssCode("S123456789") must beEqualTo("Scotland")
}
}
}
| alphagov/location-data-importer | src/test/scala/uk/gov/gds/location/importer/model/CountriesTest.scala | Scala | mit | 571 |
package com.faacets.qalg
import org.scalatest.FunSuite
import spire.math.Rational
import spire.syntax.innerProductSpace._
import algebra._
import math._
import syntax.all._
/** Examples and tests taken from Lipschutz-Lipson 2004.
*
* Full reference:
*
* Lipschutz-Lipson, Schaum's Outline of Theory and Problems of Linear Algebra, 3rd Edition, McGraw-Hill, 2004
*/
trait SchaumSuite[M, V] extends FunSuite {
implicit def M: MatField[M, Rational]
implicit def V: VecField[V, Rational]
implicit def MVP: MatVecProduct[M, V]
test("Example 1.2") {
val u = V.build(2, 4, -5)
val v = V.build(1, -6, 9)
assert(u + v === V.build(3, -2, 4))
assert(u :* 7 === V.build(14, 28, -35))
assert(-v === V.build(-1, 6, -9))
assert((u :* 3) - (v :* 5) === V.build(1, 42, -60))
}
test("Example 1.3a") {
val u = V.build(1, -2, 3)
val v = V.build(4, 5, -1)
val w = V.build(2, 7, 4)
assert(u.dot(v) === -9)
assert(u.dot(w) === 0)
assert(v.dot(w) === 39)
}
test("Example 1.3b") {
val u = V.build(2, 3, -4)
val v = V.build(3, -1, -2)
assert(u.dot(v) === 11)
}
test("Example 2.2") {
val a = M.rowMajor(2, 3)(
1, -2, 3,
0, 4, 5)
val b = M.rowMajor(2, 3)(
4, 6, 8,
1, -3, -7)
assert(a + b === M.rowMajor(2, 3)(
5, 4, 11,
1, 1, -2))
assert(a:* 3 === M.rowMajor(2, 3)(
3, -6, 9,
0, 12, 15))
assert((a:* 2) - (b:* 3) === M.rowMajor(2, 3)(
-10, -22, -18,
-3, 17, 31))
}
test("Example 2.4") {
assert(M.rowMajor(1,3)(7, -4, 5)*M.rowMajor(3,1)(3,2,-1) === M.rowMajor(1,1)(8))
assert(M.rowMajor(1,4)(6,-1,8,3)*M.rowMajor(4,1)(4,-9,-2,5) === M.rowMajor(1,1)(32))
}
test("Example 2.5") {
assert(M.rowMajor(2, 2)(1, 3, 2, -1) * M.rowMajor(2, 3)(2, 0, -4, 5, -2, 6) === M.rowMajor(2, 3)(17, -6, 14, -1, 2, -14))
}
test("Exercice 2.8a") {
val v: V = V.build(2, -7)
val m: M = M.rowMajor(2, 2)(1, 6, -3, 5)
m ::* v
assert((M.rowMajor(2, 2)(1, 6, -3, 5) ::* V.build(2, -7)) === V.build(-40, -41))
}
}
| denisrosset/qalg | tests/src/test/scala/qalg/SchaumSuite.scala | Scala | mit | 2,078 |
package com.twitter.finagle.netty4.param
import com.twitter.finagle.netty4.UnpooledAllocator
import com.twitter.finagle.Stack
import io.netty.buffer.ByteBufAllocator
private[netty4] case class Allocator(allocator: ByteBufAllocator)
private[netty4] object Allocator {
// TODO investigate pooled allocator CSL-2089
// While we already pool receive buffers, this ticket is about end-to-end pooling
// (everything in the pipeline should be pooled).
implicit val allocatorParam: Stack.Param[Allocator] =
Stack.Param(Allocator(UnpooledAllocator))
}
| spockz/finagle | finagle-netty4/src/main/scala/com/twitter/finagle/netty4/param/Allocator.scala | Scala | apache-2.0 | 558 |
package momijikawa.lacquer.KanColleMessage
import spray.http.{ HttpRequest, Uri, HttpResponse }
import scalaz.{ \\/-, -\\/, \\/ }
trait KanColleMessageConverter {
val uriRegex: String
// レスポンスの文字列から"svdata="を削除して返す
def getJsonString(response: HttpResponse): String = response.entity.data.asString.substring(7)
def uriContainsString(uri: Uri, regex: String) = regex.r.findFirstIn(uri.path.toString()).isDefined
def apply(message: HttpRequest): \\/[HttpResponse ⇒ KanColleMessage, HttpRequest] = {
uriContainsString(message.uri, uriRegex) match {
case true ⇒ -\\/(response2Message)
case false ⇒ \\/-(message)
}
}
def response2Message(response: HttpResponse): KanColleMessage
}
| windymelt/lacquer | src/main/scala/momijikawa/lacquer/KanColleMessage/KanColleMessageConverter.scala | Scala | bsd-3-clause | 750 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.util
import java.util.concurrent.TimeUnit
import com.typesafe.scalalogging.LazyLogging
import org.apache.accumulo.core.client.BatchWriterConfig
import org.locationtech.geomesa.accumulo.AccumuloProperties
object GeoMesaBatchWriterConfig extends LazyLogging {
def apply(threads: Option[Int] = None,
memory: Option[Long] = None,
latency: Option[Long] = None,
timeout: Option[Long] = None): BatchWriterConfig = {
import AccumuloProperties.BatchWriterProperties
val bwc = new BatchWriterConfig
threads.orElse(BatchWriterProperties.WRITER_THREADS.option.map(_.toInt)).foreach { threads =>
logger.trace(s"GeoMesaBatchWriter config: maxWriteThreads set to $threads")
bwc.setMaxWriteThreads(threads)
}
memory.orElse(BatchWriterProperties.WRITER_MEMORY_BYTES.toBytes).foreach { memory =>
logger.trace(s"GeoMesaBatchWriter config: maxMemory set to $memory bytes")
bwc.setMaxMemory(memory)
}
latency.orElse(BatchWriterProperties.WRITER_LATENCY.toDuration.map(_.toMillis)).foreach { latency =>
logger.trace(s"GeoMesaBatchWriter config: maxLatency set to $latency millis")
bwc.setMaxLatency(latency, TimeUnit.MILLISECONDS)
}
timeout.orElse(BatchWriterProperties.WRITE_TIMEOUT.toDuration.map(_.toMillis)).foreach { timeout =>
logger.trace(s"GeoMesaBatchWriter config: maxTimeout set to $timeout millis")
bwc.setTimeout(timeout, TimeUnit.MILLISECONDS)
}
bwc
}
}
| jahhulbert-ccri/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/util/GeoMesaBatchWriterConfig.scala | Scala | apache-2.0 | 1,995 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.manager.utils
import java.util.Properties
/**
* Copied from kafka 0.8.1.1
* https://git-wip-us.apache.org/repos/asf?p=kafka.git;a=blob;f=core/src/main/scala/kafka/log/LogConfig.scala
*/
case class LogConfig(segmentSize: Int = 1024*1024,
segmentMs: Long = Long.MaxValue,
flushInterval: Long = Long.MaxValue,
flushMs: Long = Long.MaxValue,
retentionSize: Long = Long.MaxValue,
retentionMs: Long = Long.MaxValue,
maxMessageSize: Int = Int.MaxValue,
maxIndexSize: Int = 1024*1024,
indexInterval: Int = 4096,
fileDeleteDelayMs: Long = 60*1000,
deleteRetentionMs: Long = 24 * 60 * 60 * 1000L,
minCleanableRatio: Double = 0.5,
compact: Boolean = false) {
def toProps: Properties = {
val props = new Properties()
import LogConfig._
props.put(SegmentBytesProp, segmentSize.toString)
props.put(SegmentMsProp, segmentMs.toString)
props.put(SegmentIndexBytesProp, maxIndexSize.toString)
props.put(FlushMessagesProp, flushInterval.toString)
props.put(FlushMsProp, flushMs.toString)
props.put(RetentionBytesProp, retentionSize.toString)
props.put(RententionMsProp, retentionMs.toString)
props.put(MaxMessageBytesProp, maxMessageSize.toString)
props.put(IndexIntervalBytesProp, indexInterval.toString)
props.put(DeleteRetentionMsProp, deleteRetentionMs.toString)
props.put(FileDeleteDelayMsProp, fileDeleteDelayMs.toString)
props.put(MinCleanableDirtyRatioProp, minCleanableRatio.toString)
props.put(CleanupPolicyProp, if (compact) "compact" else "delete")
props
}
}
object LogConfig {
val SegmentBytesProp = "segment.bytes"
val SegmentMsProp = "segment.ms"
val SegmentIndexBytesProp = "segment.index.bytes"
val FlushMessagesProp = "flush.messages"
val FlushMsProp = "flush.ms"
val RetentionBytesProp = "retention.bytes"
val RententionMsProp = "retention.ms"
val MaxMessageBytesProp = "max.message.bytes"
val IndexIntervalBytesProp = "index.interval.bytes"
val DeleteRetentionMsProp = "delete.retention.ms"
val FileDeleteDelayMsProp = "file.delete.delay.ms"
val MinCleanableDirtyRatioProp = "min.cleanable.dirty.ratio"
val CleanupPolicyProp = "cleanup.policy"
val ConfigNames = Set(SegmentBytesProp,
SegmentMsProp,
SegmentIndexBytesProp,
FlushMessagesProp,
FlushMsProp,
RetentionBytesProp,
RententionMsProp,
MaxMessageBytesProp,
IndexIntervalBytesProp,
FileDeleteDelayMsProp,
DeleteRetentionMsProp,
MinCleanableDirtyRatioProp,
CleanupPolicyProp)
/**
* Parse the given properties instance into a LogConfig object
*/
def fromProps(props: Properties): LogConfig = {
new LogConfig(segmentSize = props.getProperty(SegmentBytesProp).toInt,
segmentMs = props.getProperty(SegmentMsProp).toLong,
maxIndexSize = props.getProperty(SegmentIndexBytesProp).toInt,
flushInterval = props.getProperty(FlushMessagesProp).toLong,
flushMs = props.getProperty(FlushMsProp).toLong,
retentionSize = props.getProperty(RetentionBytesProp).toLong,
retentionMs = props.getProperty(RententionMsProp).toLong,
maxMessageSize = props.getProperty(MaxMessageBytesProp).toInt,
indexInterval = props.getProperty(IndexIntervalBytesProp).toInt,
fileDeleteDelayMs = props.getProperty(FileDeleteDelayMsProp).toInt,
deleteRetentionMs = props.getProperty(DeleteRetentionMsProp).toLong,
minCleanableRatio = props.getProperty(MinCleanableDirtyRatioProp).toDouble,
compact = props.getProperty(CleanupPolicyProp).trim.toLowerCase != "delete")
}
/**
* Create a log config instance using the given properties and defaults
*/
def fromProps(defaults: Properties, overrides: Properties): LogConfig = {
val props = new Properties(defaults)
props.putAll(overrides)
fromProps(props)
}
/**
* Check that property names are valid
*/
def validateNames(props: Properties) {
import scala.collection.JavaConversions._
for (name <- props.keys)
require(LogConfig.ConfigNames.contains(name), "Unknown configuration \"%s\".".format(name))
}
/**
* Check that the given properties contain only valid log config names, and that all values can be parsed.
*/
def validate(props: Properties) {
validateNames(props)
LogConfig.fromProps(LogConfig().toProps, props) // check that we can parse the values
}
}
| wking1986/kafka-manager | app/kafka/manager/utils/LogConfig.scala | Scala | apache-2.0 | 5,403 |
/*
* Copyright (C) 2007-2008 Artima, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Example code from:
*
* Programming in Scala (First Edition, Version 6)
* by Martin Odersky, Lex Spoon, Bill Venners
*
* http://booksites.artima.com/programming_in_scala
*/
def searchFrom(i: Int): Int =
if (i >= args.length) -1
else if (args(i).startsWith("-")) searchFrom(i + 1)
else if (args(i).endsWith(".scala")) i
else searchFrom(i + 1)
val i = searchFrom(0)
println("i = " + i)
| peachyy/scalastu | expressions-and-control-flow/Breakless2.scala | Scala | apache-2.0 | 1,032 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.