code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
package castalia.model
import castalia.UnitSpecBase
import castalia.model.Model.LatencyConfig
import scala.concurrent.duration.Duration
class ModelSpec extends UnitSpecBase {
"A latencyconfig" when {
"parsing the json" should {
"find the right distribution" in {
val constantLatencyConfig = LatencyConfig("constant", Option("100 ms"), None, None)
constantLatencyConfig.sample() shouldBe Duration("100 ms")
val gammaLatencyConfig = LatencyConfig("gamma", None, Option("50 ms"), Option("500 ms"))
assert(gammaLatencyConfig.sample().toMillis >= 0)
val weibullLatencyConfig = LatencyConfig("weibull", None, Option("6.57803 ms"), Option("7.32456 ms"))
assert(weibullLatencyConfig.sample().toMillis >= 0)
}
"give 0 ms when no distribution is found" in {
val nonExistingLatencyConfig = LatencyConfig("", None, None, None)
nonExistingLatencyConfig.sample() shouldBe Duration("0 ms")
}
}
"sampled" should {
"return unique latencies when not constant" in {
val gammaLatencyConfig = LatencyConfig("gamma", None, Option("50 ms"), Option("500 ms"))
val sample1 = gammaLatencyConfig.sample()
val sample2 = gammaLatencyConfig.sample()
assert(!sample1.equals(sample2))
}
}
}
}
| TimSoethout/stubserver | src/test/scala/castalia/model/ModelSpec.scala | Scala | mit | 1,325 |
package uima.cpe
import us.feliscat.text.{StringNone, StringOption, StringSome}
/**
* <pre>
* Created on 2016/12/18.
* </pre>
*
* @author K.Sakamoto
*/
object IntermediatePoint {
case object QuestionReader extends IntermediatePoint(0) {
override val name: String = "QuestionReader"
override val code: String = "qr"
}
case object QuestionAnalyzer extends IntermediatePoint(1) {
override val name: String = "QuestionAnalyzer"
override val code: String = "qa"
override val descriptor = StringOption("questionAnalyzerAAEDescriptor")
override val primitiveDescriptor = StringOption("questionAnalyzerAEDescriptor")
}
case object InformationRetriever extends IntermediatePoint(2) {
override val name: String = "InformationRetriever"
override val code: String = "ir"
override val descriptor = StringOption("informationRetrieverAAEDescriptor")
override val primitiveDescriptor = StringOption("informationRetrieverAEDescriptor")
}
case object AnswerGenerator extends IntermediatePoint(3) {
override val name: String = "AnswerGenerator"
override val code: String = "ag"
override val descriptor = StringOption("answerGeneratorAAEDescriptor")
override val primitiveDescriptor = StringOption("answerGeneratorAEDescriptor")
}
case object AnswerWriter extends IntermediatePoint(4) {
override val name: String = "AnswerWriter"
override val code: String = "aw"
override val descriptor = StringOption("answerWriterCCDescriptor")
}
case object AnswerEvaluator extends IntermediatePoint(5) {
override val name: String = "AnswerEvaluator"
override val code: String = "ae"
override val descriptor = StringOption("answerEvaluatorCCDescriptor")
}
val pipeline = Seq(
QuestionReader,
QuestionAnalyzer,
InformationRetriever,
AnswerGenerator,
AnswerWriter,
AnswerEvaluator
)
def get(code: StringOption, default: IntermediatePoint): IntermediatePoint = {
code match {
case StringSome(ip) =>
pipeline foreach {
case component if component.equals(ip) =>
return component
case _ =>
// Do nothing
}
default
case StringNone =>
default
}
}
}
sealed abstract class IntermediatePoint(val id: Int) {
val name: String
val code: String
val descriptor: StringOption = StringOption.empty
val primitiveDescriptor: StringOption = StringOption.empty
def equals(nameOrCode: String): Boolean = {
if (Option(nameOrCode).isEmpty) {
return false
}
name.equalsIgnoreCase(nameOrCode) || code.equalsIgnoreCase(nameOrCode)
}
}
| ktr-skmt/FelisCatusZero-multilingual | src/main/scala/uima/cpe/IntermediatePoint.scala | Scala | apache-2.0 | 2,689 |
package chrome.management.bindings
import chrome.events.bindings.Event
import chrome.management.bindings.ExtensionInfo.LaunchType
import chrome.runtime.bindings.Runtime.AppID
import scala.scalajs.js
import scala.scalajs.js.annotation.JSName
@JSName("chrome.management")
object Management extends js.Object {
val onInstalled: Event[js.Function1[ExtensionInfo, _]] = js.native
val onUninstalled: Event[js.Function1[String, _]] = js.native
val onEnabled: Event[js.Function1[ExtensionInfo, _]] = js.native
val onDisabled: Event[js.Function1[ExtensionInfo, _]] = js.native
def getAll(callback: js.Function1[js.Array[ExtensionInfo], _]): Unit = js.native
def get(id: AppID, callback: js.Function1[ExtensionInfo, _]): Unit = js.native
def getSelf(callback: js.Function1[ExtensionInfo, _]): Unit = js.native
def getPermissionWarningsById(id: AppID, callback: js.Function1[js.Array[String], _]): Unit = js.native
def getPermissionWarningsByManifest(manifestStr: String,
callback: js.Function1[js.Array[String], _]): Unit = js.native
def setEnabled(id: AppID, enabled: Boolean, callback: js.UndefOr[js.Function0[_]] = js.undefined): Unit = js.native
def uninstall(id: AppID, options: js.UndefOr[js.Object] = js.undefined,
callback: js.UndefOr[js.Function0[_]] = js.undefined): Unit = js.native
def uninstallSelf(options: js.UndefOr[js.Object] = js.undefined,
callback: js.UndefOr[js.Function0[_]] = js.undefined): Unit = js.native
def launchApp(id: AppID, callback: js.UndefOr[js.Function0[_]] = js.undefined): Unit = js.native
def createAppShortcut(id: AppID, callback: js.UndefOr[js.Function0[_]] = js.undefined): Unit = js.native
def setLaunchType(id: AppID, launchType: LaunchType,
callback: js.UndefOr[js.Function0[_]] = js.undefined): Unit = js.native
def generateAppForLink(url: String, title: String,
callback: js.UndefOr[js.Function1[ExtensionInfo, _]] = js.undefined): Unit = js.native
}
| amsayk/scala-js-chrome | bindings/src/main/scala/chrome/management/bindings/Management.scala | Scala | mit | 2,060 |
/*
* Copyright 2006-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package mapper
import net.liftweb.util._
import net.liftweb.common._
import Helpers._
trait MetaProtoTag[ModelType <: ProtoTag[ModelType]] extends KeyedMetaMapper[Long, ModelType] {
self: ModelType =>
override def dbTableName: String // = "tags"
def cacheSize: Int
private val idCache = new LRU[Long, ModelType](cacheSize)
private val tagCache = new LRU[String, ModelType](cacheSize)
def findOrCreate(ntag: String): ModelType = synchronized {
val tag = capify(ntag)
if (tagCache.contains(tag)) tagCache(tag)
else {
find(By(name, tag)) match {
case Full(t) => tagCache(tag) = t; t
case _ => val ret: ModelType = (createInstance).name(tag).saveMe
tagCache(tag) = ret
ret
}
}
}
override def findDbByKey(dbId: ConnectionIdentifier, key: Long): Box[ModelType] = synchronized {
if (idCache.contains(key)) Full(idCache(key))
else {
val ret = super.findDbByKey(dbId,key)
ret.foreach(v => idCache(key) = v)
ret
}
}
/**
* Split the String into tags
*/
def split(in: String): List[String] = in.roboSplit(",").map(capify)
/**
* Split the String into tags and find all the tags
*/
def splitAndFind(in: String): List[ModelType] = split(in).map(findOrCreate)
def capify: String => String = Helpers.capify _
}
abstract class ProtoTag[MyType <: ProtoTag[MyType]] extends KeyedMapper[Long, MyType] with Ordered[MyType] {
self: MyType =>
def getSingleton: MetaProtoTag[MyType]
// the primary key for the database
object id extends MappedLongIndex(this)
def primaryKeyField = id
object name extends MappedPoliteString(this, 256) {
override def setFilter = getSingleton.capify :: super.setFilter
override def dbIndexed_? = true
}
def compare(other: MyType): Int = name.is.compare(other.name.is)
}
| pbrant/framework | persistence/mapper/src/main/scala/net/liftweb/mapper/ProtoTag.scala | Scala | apache-2.0 | 2,483 |
package com.github.pheymann.rrt
import cats.{Id, ~>}
import TestAction._
import akka.actor.ActorSystem
import com.github.pheymann.rrt.io.DbService
import com.github.pheymann.rrt.io.DbService.UndefinedDatabase
import com.github.pheymann.rrt.util.RandomUtil
import com.github.pheymann.rrt.util.ResponseComparator.BodyComparison
object TestActionInterpreter {
import RandomUtil._
def interpreter(comparison: BodyComparison, config: TestConfig)
(implicit system: ActorSystem): TestAction ~> Id = new (TestAction ~> Id) {
config.dbConfigOpt.foreach(DbService.newDriver)
override def apply[R](action: TestAction[R]): Id[R] = action match {
case StaticData(data) => () => nextFromSeq(data)
case IntData(maxOpt) => () => maxOpt.fold(rand.nextInt())(rand.nextInt)
case PositiveIntData(maxOpt) => () => maxOpt.fold(nextPositiveInt())(nextPositiveInt)
case LongData(maxOpt) => () => maxOpt.fold(rand.nextLong())(nextLong)
case DoubleData(maxOpt) => () => maxOpt.fold(rand.nextDouble())(nextDouble)
case GetTestCase(test) => TestRunner.runGetSequential(test, comparison, config, RandomUtil)
case PostTestCase(test) => TestRunner.runPostSequential(test, comparison, config, RandomUtil)
case PutTestCase(test) => TestRunner.runPutSequential(test, comparison, config, RandomUtil)
case DeleteTestCase(test) => TestRunner.runDeleteSequential(test, comparison, config, RandomUtil)
case FromDatabase(table, selectCol, resultCol, _action) =>
config.dbConfigOpt.fold(throw UndefinedDatabase)(dbInterpreter(table, selectCol, resultCol, _action, _))
}
}
import DbService._
private def dbInterpreter[R](table: String,
pkCol: String,
resultCol: String,
action: DbReadyTestAction[R],
config: DatabaseConfig): Id[R] = action match {
case RetrieveInts(size) =>
val intValues = selectRandomInts(table, pkCol, resultCol, size, config.dbType)
() => nextFromSeq(intValues)
case RetrieveLongs(size) =>
val longValues = selectRandomLongs(table, pkCol, resultCol, size, config.dbType)
() => nextFromSeq(longValues)
case RetrieveDoubles(size) =>
val doubleValues = selectRandomDoubles(table, pkCol, resultCol, size, config.dbType)
() => nextFromSeq(doubleValues)
case RetrieveStrings(size) =>
val stringValues = selectRandomStrings(table, pkCol, resultCol, size, config.dbType)
() => nextFromSeq(stringValues)
}
}
| pheymann/rest-refactoring-test | core/src/main/scala/com/github/pheymann/rrt/TestActionInterpreter.scala | Scala | mit | 2,598 |
package controllers
import models.Formats.AlarmFormat
import models._
import play.api.Play.current
import play.api.libs.concurrent.Akka
import play.api.libs.json.Json
import play.api.mvc._
import scala.concurrent.ExecutionContext.Implicits.global
/**
* Created by evalery on 06/02/15.
*/
object AlarmController extends Controller with AlarmsRepository with SettingsAware with MongoAware with ActorSystemProvider {
lazy val actorSystem = Akka.system
def createAlarm = Action.async(parse.json) { request =>
val alarm = request.body.as[Alarm]
insertAlarm(alarm) map { lastError =>
if (lastError.ok) Created
else InternalServerError(lastError.errMsg.getOrElse("Unknown"))
} recover {
case DuplicatedAlarmIdException(id) => BadRequest(s"Duplicated alarm id <$id>")
}
}
def getAlarm(id: Long) = Action.async {
findAlarm(id) map { optAlarm =>
if (optAlarm.isDefined) Ok(Json.toJson(optAlarm.get))
else NotFound(s"Alarm id <$id> not found")
}
}
def listAllAlarms = Action.async {
listAlarms() map { alarms =>
Ok(Json.toJson(alarms))
}
}
} | evanther/play-akka-reactivemongo | app/controllers/AlarmController.scala | Scala | apache-2.0 | 1,122 |
package jgo.tools.compiler
import lexer._
import scala.util.parsing.input.Reader
package object parser {
type Input = Base#Input
}
| thomasmodeneis/jgo | src/src/main/scala/jgo/tools/compiler/parser/package.scala | Scala | gpl-3.0 | 136 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
import scala.collection.mutable
import org.apache.spark._
import org.apache.spark.internal.Logging
import org.apache.spark.rpc.{RpcCallContext, RpcEndpoint, RpcEndpointRef, RpcEnv}
import org.apache.spark.util.{RpcUtils, ThreadUtils}
private sealed trait OutputCommitCoordinationMessage extends Serializable
private case object StopCoordinator extends OutputCommitCoordinationMessage
private case class AskPermissionToCommitOutput(stage: Int, partition: Int, attemptNumber: Int)
/**
* Authority that decides whether tasks can commit output to HDFS. Uses a "first committer wins"
* policy.
*
* OutputCommitCoordinator is instantiated in both the drivers and executors. On executors, it is
* configured with a reference to the driver's OutputCommitCoordinatorEndpoint, so requests to
* commit output will be forwarded to the driver's OutputCommitCoordinator.
*
* This class was introduced in SPARK-4879; see that JIRA issue (and the associated pull requests)
* for an extensive design discussion.
*/
private[spark] class OutputCommitCoordinator(conf: SparkConf, isDriver: Boolean) extends Logging {
// Initialized by SparkEnv
var coordinatorRef: Option[RpcEndpointRef] = None
private type StageId = Int
private type PartitionId = Int
private type TaskAttemptNumber = Int
private val NO_AUTHORIZED_COMMITTER: TaskAttemptNumber = -1
private case class StageState(numPartitions: Int) {
val authorizedCommitters = Array.fill[TaskAttemptNumber](numPartitions)(NO_AUTHORIZED_COMMITTER)
val failures = mutable.Map[PartitionId, mutable.Set[TaskAttemptNumber]]()
}
/**
* Map from active stages's id => authorized task attempts for each partition id, which hold an
* exclusive lock on committing task output for that partition, as well as any known failed
* attempts in the stage.
*
* Entries are added to the top-level map when stages start and are removed they finish
* (either successfully or unsuccessfully).
*
* Access to this map should be guarded by synchronizing on the OutputCommitCoordinator instance.
*/
private val stageStates = mutable.Map[StageId, StageState]()
/**
* Returns whether the OutputCommitCoordinator's internal data structures are all empty.
*/
def isEmpty: Boolean = {
stageStates.isEmpty
}
/**
* Called by tasks to ask whether they can commit their output to HDFS.
*
* If a task attempt has been authorized to commit, then all other attempts to commit the same
* task will be denied. If the authorized task attempt fails (e.g. due to its executor being
* lost), then a subsequent task attempt may be authorized to commit its output.
*
* @param stage the stage number
* @param partition the partition number
* @param attemptNumber how many times this task has been attempted
* (see [[TaskContext.attemptNumber()]])
* @return true if this task is authorized to commit, false otherwise
*/
def canCommit(
stage: StageId,
partition: PartitionId,
attemptNumber: TaskAttemptNumber): Boolean = {
val msg = AskPermissionToCommitOutput(stage, partition, attemptNumber)
coordinatorRef match {
case Some(endpointRef) =>
ThreadUtils.awaitResult(endpointRef.ask[Boolean](msg),
RpcUtils.askRpcTimeout(conf).duration)
case None =>
logError(
"canCommit called after coordinator was stopped (is SparkEnv shutdown in progress)?")
false
}
}
/**
* Called by the DAGScheduler when a stage starts.
*
* @param stage the stage id.
* @param maxPartitionId the maximum partition id that could appear in this stage's tasks (i.e.
* the maximum possible value of `context.partitionId`).
*/
private[scheduler] def stageStart(stage: StageId, maxPartitionId: Int): Unit = synchronized {
stageStates(stage) = new StageState(maxPartitionId + 1)
}
// Called by DAGScheduler
private[scheduler] def stageEnd(stage: StageId): Unit = synchronized {
stageStates.remove(stage)
}
// Called by DAGScheduler
private[scheduler] def taskCompleted(
stage: StageId,
partition: PartitionId,
attemptNumber: TaskAttemptNumber,
reason: TaskEndReason): Unit = synchronized {
val stageState = stageStates.getOrElse(stage, {
logDebug(s"Ignoring task completion for completed stage")
return
})
reason match {
case Success =>
// The task output has been committed successfully
case denied: TaskCommitDenied =>
logInfo(s"Task was denied committing, stage: $stage, partition: $partition, " +
s"attempt: $attemptNumber")
case otherReason =>
// Mark the attempt as failed to blacklist from future commit protocol
stageState.failures.getOrElseUpdate(partition, mutable.Set()) += attemptNumber
if (stageState.authorizedCommitters(partition) == attemptNumber) {
logDebug(s"Authorized committer (attemptNumber=$attemptNumber, stage=$stage, " +
s"partition=$partition) failed; clearing lock")
stageState.authorizedCommitters(partition) = NO_AUTHORIZED_COMMITTER
}
}
}
def stop(): Unit = synchronized {
if (isDriver) {
coordinatorRef.foreach(_ send StopCoordinator)
coordinatorRef = None
stageStates.clear()
}
}
// Marked private[scheduler] instead of private so this can be mocked in tests
private[scheduler] def handleAskPermissionToCommit(
stage: StageId,
partition: PartitionId,
attemptNumber: TaskAttemptNumber): Boolean = synchronized {
stageStates.get(stage) match {
case Some(state) if attemptFailed(state, partition, attemptNumber) =>
logInfo(s"Denying attemptNumber=$attemptNumber to commit for stage=$stage," +
s" partition=$partition as task attempt $attemptNumber has already failed.")
false
case Some(state) =>
state.authorizedCommitters(partition) match {
case NO_AUTHORIZED_COMMITTER =>
logDebug(s"Authorizing attemptNumber=$attemptNumber to commit for stage=$stage, " +
s"partition=$partition")
state.authorizedCommitters(partition) = attemptNumber
true
case existingCommitter =>
// Coordinator should be idempotent when receiving AskPermissionToCommit.
if (existingCommitter == attemptNumber) {
logWarning(s"Authorizing duplicate request to commit for " +
s"attemptNumber=$attemptNumber to commit for stage=$stage," +
s" partition=$partition; existingCommitter = $existingCommitter." +
s" This can indicate dropped network traffic.")
true
} else {
logDebug(s"Denying attemptNumber=$attemptNumber to commit for stage=$stage, " +
s"partition=$partition; existingCommitter = $existingCommitter")
false
}
}
case None =>
logDebug(s"Stage $stage has completed, so not allowing" +
s" attempt number $attemptNumber of partition $partition to commit")
false
}
}
private def attemptFailed(
stageState: StageState,
partition: PartitionId,
attempt: TaskAttemptNumber): Boolean = synchronized {
stageState.failures.get(partition).exists(_.contains(attempt))
}
}
private[spark] object OutputCommitCoordinator {
// This endpoint is used only for RPC
private[spark] class OutputCommitCoordinatorEndpoint(
override val rpcEnv: RpcEnv, outputCommitCoordinator: OutputCommitCoordinator)
extends RpcEndpoint with Logging {
logDebug("init") // force eager creation of logger
override def receive: PartialFunction[Any, Unit] = {
case StopCoordinator =>
logInfo("OutputCommitCoordinator stopped!")
stop()
}
override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = {
case AskPermissionToCommitOutput(stage, partition, attemptNumber) =>
context.reply(
outputCommitCoordinator.handleAskPermissionToCommit(stage, partition, attemptNumber))
}
}
}
| akopich/spark | core/src/main/scala/org/apache/spark/scheduler/OutputCommitCoordinator.scala | Scala | apache-2.0 | 9,019 |
package sample.blog
import java.lang.{ Long ⇒ JLong }
import java.util.concurrent.Executor
import akka.event.LoggingAdapter
import akka.persistence.cassandra._
import akka.stream._
import akka.stream.scaladsl.{ Keep, Source }
import akka.stream.stage._
import com.datastax.driver.core._
import com.google.common.util.concurrent.{ FutureCallback, ListenableFuture }
import sample.blog.PsJournal.{ ListenableFutureConverter, ResultSetFutureConverter }
import scala.annotation.tailrec
import scala.concurrent.{ ExecutionContext, Future, Promise }
import scala.reflect.ClassTag
import scala.util.{ Failure, Success, Try }
/*
Links:
http://akka.io/blog/2016/10/21/emit-and-friends
http://akka.io/blog/2016/07/30/mastering-graph-stage-part-1
http://doc.akka.io/docs/akka/2.4/scala/stream/stream-customize.html?_ga=2.15138358.1258512146.1495883588-1678957595.1434051367#Using_asynchronous_side-channels
http://doc.akka.io/docs/akka/2.4/scala/stream/stream-customize.html?_ga=2.73266707.1258512146.1495883588-1678957595.1434051367#Using_timers
http://doc.akka.io/docs/akka/2.5.2/scala/stream/stream-customize.html
http://akka.io/blog/2016/08/29/connecting-existing-apis
https://github.com/mkubala/akka-stream-contrib/blob/feature/101-mkubala-interval-based-rate-limiter/contrib/src/main/scala/akka/stream/contrib/IntervalBasedRateLimiter.scala
*/
/*
http://doc.akka.io/docs/akka/2.5.4/scala/stream/stream-customize.html
Thread safety of custom processing stages.
The callbacks are never called concurrently.
The state encapsulated can be safely modified from the provided callbacks, without any further synchronization.
*/
/**
* A Source that has one output and no inputs, it models a source of cassandra rows
* associated with a persistenceId starting with offset.
*
* The impl is based on this
* https://github.com/akka/alpakka/blob/master/cassandra/src/main/scala/akka/stream/alpakka/cassandra/impl/CassandraSourceStage.scala
* and adapted with respect to akka-cassandra-persistence schema
*/
final class PsJournal(client: Cluster, keySpace: String, journal: String, persistenceId: String,
offset: Long, partitionSize: Long, pageSize: Int) extends GraphStage[SourceShape[Row]] {
val out: Outlet[Row] = Outlet[Row](akka.event.Logging.simpleName(this) + ".out")
private val retryTimeout = 3000
override val shape: SourceShape[Row] = SourceShape(out)
private val queryByPersistenceId =
s"""
|SELECT persistence_id, partition_nr, sequence_nr, timestamp, timebucket, event FROM $journal WHERE
| persistence_id = ? AND
| partition_nr = ? AND
| sequence_nr >= ?
""".stripMargin
/*
Selecting a separate dispatcher in Akka Streams is done by returning it from the initialAttributes of the GraphStage.
*/
override protected def initialAttributes: Attributes =
Attributes.name(persistenceId)
//.and(ActorAttributes.dispatcher("cassandra-dispatcher"))
private def navigatePartition(sequenceNr: Long, partitionSize: Long): Long = sequenceNr / partitionSize
private def statement(preparedStmt: PreparedStatement, persistenceId: String,
partition: JLong, sequenceNr: JLong, pageSize: Int) =
new BoundStatement(preparedStmt).bind(persistenceId, partition, sequenceNr).setFetchSize(pageSize)
@tailrec private def conAttempt[T](n: Int)(log: LoggingAdapter, f: ⇒ T): T = {
log.info("Getting cassandra connection")
Try(f) match {
case Success(x) ⇒
x
case Failure(e) if n > 1 ⇒
log.error(e.getMessage)
Thread.sleep(retryTimeout)
conAttempt(n - 1)(log, f)
case Failure(e) ⇒
throw e
}
}
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic =
new GraphStageLogic(shape) with StageLogging {
/*
It is not safe to access the state of any custom stage outside of the callbacks that it provides,
just like it is unsafe to access the state of an actor from the outside.
This means that Future callbacks should not close over internal state of custom stages because such access can be
concurrent with the provided callbacks, leading to undefined behavior.
All mutable state MUST be inside the GraphStageLogic
*/
var requireMore = false
var sequenceNr = offset
var partitionIter = Option.empty[ResultSet]
var onMessageCallback: AsyncCallback[Try[ResultSet]] = _
//
lazy val session = conAttempt(Int.MaxValue)(log, client.connect(keySpace))
lazy val preparedStmt = session.prepare(queryByPersistenceId)
implicit lazy val ec = materializer.executionContext
override def preStart(): Unit = {
onMessageCallback = getAsyncCallback[Try[ResultSet]](onFetchCompleted)
val partition = navigatePartition(sequenceNr, partitionSize): JLong
val stmt = statement(preparedStmt, persistenceId, partition, sequenceNr, pageSize)
session.executeAsync(stmt).asFuture.onComplete(onMessageCallback.invoke)
}
setHandler(out, new OutHandler {
override def onPull(): Unit = {
partitionIter match {
case Some(iter) if iter.getAvailableWithoutFetching > 0 ⇒
sequenceNr += 1
push(out, iter.one)
case Some(iter) ⇒
if (iter.isExhausted) {
//a current partition is exhausted, let's try to read from the next partition
val nextPartition = navigatePartition(sequenceNr, partitionSize): JLong
val stmt = statement(preparedStmt, persistenceId, nextPartition, sequenceNr, pageSize)
session.executeAsync(stmt).asFuture.onComplete(onMessageCallback.invoke)
} else {
//Your page size less than akka-cassandra-persistence partition size(cassandra-journal.target-partition-size)
//so you hit the end of page but still have something to read
log.info("Still have something to read in current partition seqNum: {}", sequenceNr)
iter.fetchMoreResults.asFuture.onComplete(onMessageCallback.invoke)
}
case None ⇒
//log.info("A request from a downstream had arrived before we read the first row")
()
}
}
})
/*
* reached the end of page or the end of the journal
*/
private def onFetchCompleted(rsOrFailure: Try[ResultSet]): Unit = {
rsOrFailure match {
case Success(iter) ⇒
partitionIter = Some(iter)
if (iter.getAvailableWithoutFetching > 0) {
if (isAvailable(out)) {
sequenceNr += 1
push(out, iter.one)
}
} else {
log.info("{} CompleteSource {} seqNum:{}", persistenceId, sequenceNr)
completeStage()
}
case Failure(failure) ⇒ failStage(failure)
}
}
override def postStop(): Unit = {
//cleaning up resources should be done here
//session.closeAsync
}
}
}
object PsJournal {
implicit class ListenableFutureConverter[A](val future: ListenableFuture[A]) extends AnyVal {
def asFuture(implicit ec: ExecutionContext): Future[A] = {
val promise = Promise[A]
future.addListener(() ⇒ {
//BusySpinWait
while (!future.isDone) {
java.lang.Thread.onSpinWait()
}
promise.tryComplete(Try(future.get()))
}, ec.asInstanceOf[Executor])
//lf.addListener(() => promise.complete(Try(lf.get())), ec.asInstanceOf[Executor])
/*com.google.common.util.concurrent.Futures.addCallback(lf, new FutureCallback[A] {
def onFailure(error: Throwable): Unit = {
promise.failure(error)
()
}
def onSuccess(result: A): Unit = {
promise.success(result)
()
}
}, ec.asInstanceOf[Executor])*/
promise.future
}
}
implicit class ResultSetFutureConverter(val lf: ResultSetFuture) extends AnyVal {
def asFuture(implicit ec: ExecutionContext): Future[ResultSet] = {
val promise = Promise[ResultSet]()
com.google.common.util.concurrent.Futures.addCallback(lf, new FutureCallback[ResultSet] {
def onFailure(error: Throwable): Unit = {
promise.failure(error)
()
}
def onSuccess(result: ResultSet): Unit = {
promise.success(result)
()
}
}, ec.asInstanceOf[Executor])
promise.future
}
}
def apply[T: Codec: ClassTag](client: Cluster, keySpace: String, journal: String, persistenceId: String,
offset: Long, partitionSize: Long, pageSize: Int = 32) = {
Source.fromGraph(new PsJournal(client, keySpace, journal, persistenceId, offset, partitionSize, pageSize))
.map(_.as[T])
.viaMat(new LastSeen)(Keep.right)
}
def typedRow(client: Cluster, keySpace: String, journal: String, persistenceId: String,
offset: Long, partitionSize: Long, pageSize: Int = 32) = {
Source.fromGraph(new PsJournal(client, keySpace, journal, persistenceId, offset, partitionSize, pageSize))
.map(_.asTypedRow)
.viaMat(new LastSeen)(Keep.right)
}
final class LastSeen[T] extends GraphStageWithMaterializedValue[FlowShape[T, T], Future[Option[T]]] {
override val shape = FlowShape(Inlet[T]("in"), Outlet[T]("out"))
override def createLogicAndMaterializedValue(inheritedAttributes: Attributes): (GraphStageLogic, Future[Option[T]]) = {
val matVal = Promise[Option[T]]
val logic = new GraphStageLogic(shape) with StageLogging {
import shape._
private var current = Option.empty[T]
setHandler(in, new InHandler {
override def onPush(): Unit = {
val element = grab(in)
current = Some(element)
push(out, element)
}
override def onUpstreamFinish(): Unit = {
log.info("upstream finish")
matVal.success(current)
super.onUpstreamFinish()
}
override def onUpstreamFailure(ex: Throwable): Unit = {
log.info("upstream failure")
matVal.success(current)
//don't fail here intentionally
//super.onUpstreamFailure(LastSeenException(ex, current))
super.onUpstreamFinish()
}
})
setHandler(out, new OutHandler {
override def onPull(): Unit = pull(in)
})
}
(logic, matVal.future)
}
}
} | haghard/akka-pq | src/main/scala/sample/blog/PsJournal.scala | Scala | apache-2.0 | 10,578 |
// Copyright (c) 2016 PSForever.net to present
package net.psforever.packet.game
import net.psforever.packet.{GamePacketOpcode, Marshallable, PlanetSideGamePacket}
import scodec.Codec
import scodec.codecs._
/**
* Promotes a warpgate's "broadcast" functionality.<br>
* <br>
* Change the map name of a warpgate into "Broadcast" when the proper state is set.
* If a proper warpgate is not designated, nothing happens.
* If not set, the map name of the warpgate will default to whatever is normally written on the map.
* The map designation of geowarps is not affected by this packet.<br>
* <br>
* Exploration:<br>
* I believe these `Boolean` values actually indicate some measure of warpgate operation.
* Geowarps, for example, though their appearance does not change, recieve this packet.
* Moreover, they can operate as a receiving-end broadcast gate.
* @param continent_guid identifies the zone (continent)
* @param building_guid identifies the warpgate (see `BuildingInfoUpdateMessage`)
* @param unk1 na
* @param unk2 na
* @param is_broadcast if true, the gate replaces its destination text with "Broadcast"
*/
final case class BroadcastWarpgateUpdateMessage(continent_guid : PlanetSideGUID,
building_guid : PlanetSideGUID,
unk1 : Boolean,
unk2 : Boolean,
is_broadcast : Boolean)
extends PlanetSideGamePacket {
type Packet = BroadcastWarpgateUpdateMessage
def opcode = GamePacketOpcode.BroadcastWarpgateUpdateMessage
def encode = BroadcastWarpgateUpdateMessage.encode(this)
}
object BroadcastWarpgateUpdateMessage extends Marshallable[BroadcastWarpgateUpdateMessage] {
implicit val codec : Codec[BroadcastWarpgateUpdateMessage] = (
("continent_guid" | PlanetSideGUID.codec) ::
("building_guid" | PlanetSideGUID.codec) ::
("unk1" | bool) ::
("unk2" | bool) ::
("is_broadcast" | bool)
).as[BroadcastWarpgateUpdateMessage]
}
| Fate-JH/PSF-Server | common/src/main/scala/net/psforever/packet/game/BroadcastWarpgateUpdateMessage.scala | Scala | gpl-3.0 | 2,094 |
/**
* Copyright 2015 Peter Nerg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package javascalautils.converters.j2s
import org.scalatest.funsuite.AnyFunSuite
import javascalautils.{ Either => JEither, Left => JLeft, Right => JRight }
import javascalautils.converters.j2s.Converters._
/**
* Test suite for Converters Either/Left/Right conversions.
* @author Peter Nerg
*/
class ConvertersEitherSuite extends AnyFunSuite {
val expected = "Left is not Right"
test("Test asScalaLeft") {
val left = asScalaLeft(new JLeft[String,String](expected))
assert(left.isLeft)
assertResult(expected)(left.left.get)
}
test("Test asScalaRight") {
val right = asScalaRight(new JRight[String,String](expected))
assert(right.isRight)
assertResult(expected)(right.right.get)
}
test("Test asScalaEither with Right") {
val right:Either[String, String] = asScalaEither(new JRight[String,String](expected))
assert(right.isRight)
assertResult(expected)(right.right.get)
}
test("Test asScalaEither with Left") {
val left:Either[String,String] = asScalaEither(new JLeft[String,String](expected))
assert(left.isLeft)
assertResult(expected)(left.left.get)
}
} | pnerg/java-scala-util-converter | src/test/scala/javascalautils/converters/j2s/ConvertersEitherSuite.scala | Scala | apache-2.0 | 1,732 |
import scala.tools.partest.ReplTest
object Test extends ReplTest {
def code = """
var s = 0
for (i <- 1 to 10) {s += i}
for (i <- 1 to 10) {s += i}
for (i <- 1 to 10) {s += i}
println(s)
"""
}
| yusuke2255/dotty | tests/pending/run/t5583.scala | Scala | bsd-3-clause | 198 |
package com.outr.arango
case class Index(`type`: IndexType,
fields: List[String],
sparse: Boolean = false,
unique: Boolean = false,
deduplicate: Boolean = true,
geoJson: Boolean = true,
minLength: Long = 3L,
expireAfterSeconds: Int = -1) {
def typeAndFields(info: IndexInfo): Boolean = info.`type` == `type`.toString.toLowerCase &&
info.fields.contains(fields)
def matches(info: IndexInfo): Boolean = typeAndFields(info) &&
(info.unique.isEmpty || info.unique.contains(unique)) &&
(info.sparse.isEmpty || info.sparse.contains(sparse))
} | outr/arangodb-scala | core/src/main/scala/com/outr/arango/Index.scala | Scala | mit | 672 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package repo
import play.api.libs.json._
import play.modules.reactivemongo.ReactiveMongoComponent
import reactivemongo.api.commands.{UpdateWriteResult, WriteResult}
import uk.gov.hmrc.http.HeaderCarrier
import uk.gov.hmrc.mongo.ReactiveRepository
import scala.concurrent.{ExecutionContext, Future}
abstract class Repo[A, ID](
collectionName: String,
reactiveMongoComponent: ReactiveMongoComponent)(implicit manifest: Manifest[A],
mid: Manifest[ID],
domainFormat: OFormat[A],
idFormat: Format[ID],
executionContext: ExecutionContext)
extends ReactiveRepository[A, ID](
collectionName,
reactiveMongoComponent.mongoConnector.db,
domainFormat,
idFormat) {
implicit val f: OWrites[JsObject] = new OWrites[JsObject] {
override def writes(o: JsObject): JsObject = o
}
def update(id: ID, a: A)(implicit hc: HeaderCarrier): Future[UpdateWriteResult] = collection.update(ordered = false).one(_id(id), a)
/**
* Update or Insert (UpSert)
*/
def upsert(id: ID, a: A)(implicit hc: HeaderCarrier): Future[UpdateWriteResult] = collection.update(ordered = false).one(
_id(id),
a,
upsert = true
)
protected implicit class WriteResultChecker(future: Future[WriteResult])(implicit hc: HeaderCarrier) {
def checkResult: Future[Unit] = future.map { writeResult =>
if (hasAnyConcerns(writeResult)) throw new RuntimeException(writeResult.toString)
else ()
}
}
private def hasAnyConcerns(writeResult: WriteResult): Boolean = !writeResult.ok || writeResult.writeErrors.nonEmpty || writeResult.writeConcernError.isDefined
}
| hmrc/self-service-time-to-pay-frontend | app/repo/Repo.scala | Scala | apache-2.0 | 2,439 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.dataload
import org.apache.spark.sql.Row
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll
/**
* Test Class for data loading with hive syntax and old syntax
*
*/
class TestLoadDataWithNoMeasure extends QueryTest with BeforeAndAfterAll {
override def beforeAll {
sql("DROP TABLE IF EXISTS nomeasureTest")
sql(
"CREATE TABLE nomeasureTest (empno String, doj String) STORED BY 'org.apache.carbondata" +
".format'"
)
val testData = s"$resourcesPath/datasample.csv"
sql("LOAD DATA LOCAL INPATH '" + testData + "' into table nomeasureTest")
}
test("test data loading and validate query output") {
checkAnswer(
sql("select empno from nomeasureTest"),
Seq(Row("11"), Row("12"), Row("13"))
)
}
test("test data loading with single dictionary column") {
sql("DROP TABLE IF EXISTS nomeasureTest_sd")
sql("CREATE TABLE nomeasureTest_sd (city String) STORED BY 'org.apache.carbondata.format'")
val testData = s"$resourcesPath/datasingleCol.csv"
sql("LOAD DATA LOCAL INPATH '" + testData + "' into table nomeasureTest_sd options " +
"('FILEHEADER'='city')"
)
checkAnswer(
sql("select city from nomeasureTest_sd"),
Seq(Row("CA"), Row("LA"), Row("AD"))
)
}
test("test data loading with single no dictionary column") {
sql("DROP TABLE IF EXISTS nomeasureTest_sd")
sql(
"CREATE TABLE nomeasureTest_sd (city String) STORED BY 'org.apache.carbondata.format' " +
"TBLPROPERTIES ('DICTIONARY_EXCLUDE'='city')"
)
val testData = s"$resourcesPath/datasingleCol.csv"
sql("LOAD DATA LOCAL INPATH '" + testData + "' into table nomeasureTest_sd options " +
"('FILEHEADER'='city')"
)
checkAnswer(
sql("select city from nomeasureTest_sd"),
Seq(Row("CA"), Row("LA"), Row("AD"))
)
}
test("test data loading with single complex struct type column") {
//only data load check
sql("DROP TABLE IF EXISTS nomeasureTest_scd")
sql(
"CREATE TABLE nomeasureTest_scd (cityDetail struct<cityName:string,cityCode:string>) STORED" +
" " +
"BY 'org.apache.carbondata.format'"
)
val testData = s"$resourcesPath/datasingleComplexCol.csv"
sql("LOAD DATA LOCAL INPATH '" + testData + "' into table nomeasureTest_scd options " +
"('DELIMITER'=',','QUOTECHAR'='\\"','FILEHEADER'='cityDetail','COMPLEX_DELIMITER_LEVEL_1'=':')"
)
}
test("test data loading with single complex array type column") {
//only data load check
sql("DROP TABLE IF EXISTS nomeasureTest_scd")
sql(
"CREATE TABLE nomeasureTest_scd (cityDetail array<string>) STORED" +
" " +
"BY 'org.apache.carbondata.format'"
)
val testData = s"$resourcesPath/datasingleComplexCol.csv"
sql("LOAD DATA LOCAL INPATH '" + testData + "' into table nomeasureTest_scd options " +
"('DELIMITER'=',','QUOTECHAR'='\\"','FILEHEADER'='cityDetail'," +
"'COMPLEX_DELIMITER_LEVEL_1'=':')"
)
}
override def afterAll {
sql("drop table if exists nomeasureTest")
sql("drop table if exists nomeasureTest_sd")
sql("drop table if exists nomeasureTest_scd")
}
}
| HuaweiBigData/carbondata | integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithNoMeasure.scala | Scala | apache-2.0 | 4,063 |
package org.example1_1.usage
import org.example1_1.declaration._
import org.example1_1.declaration.data.{X => X_Renamed, Y => Y_Renamed, Z => Z_Renamed}
trait Usage2 {
val x1: X_Renamed = ???
val y1: Y_Renamed = ???
val y2: Y_Renamed = ???
val z1: Z_Renamed = ???
val z2: Z_Renamed = ???
val xx: X4 = ???
def myScope1(): Unit = {
import org.example1_1.declaration.data.X
val xxx: X = ???
import org.example1_1.declaration.data.Y
val yyy: Y = ???
import org.example1_1.declaration.data.Z
val zzz: Z = ???
}
def myScope2(): Unit = {
import org.example1_1.declaration.data.{Z => Z_Renamed_New}
val zzz: Z_Renamed_New = ???
import org.example1_1.declaration.data.{Y => Y_Renamed_New}
val yyy: Y_Renamed_New = ???
import org.example1_1.declaration.data.{X => X_Renamed_New}
val xxx: X_Renamed_New = ???
}
def myScope3(): Unit = {
import org.example1_1.declaration.data.{Z => Z_Renamed_New}
val zzz: Z_Renamed_New = ???
import org.example1_1.declaration.data.{Y => Y_Renamed_New}
val yyy: Y_Renamed_New = ???
import org.example1_1.declaration.data.{X => X_Renamed_New}
val xxx: X_Renamed_New = ???
}
def myScope4(): Unit = {
import org.example1_1.declaration.X4
import org.example1_1.declaration.data.{Z => Z_Renamed_New}
val zzz: Z_Renamed_New = ???
import org.example1_1.declaration.X5
import org.example1_1.declaration.data.{Y => Y_Renamed_New}
val yyy: Y_Renamed_New = ???
import org.example1_1.declaration.X6
import org.example1_1.declaration.data.{X => X_Renamed_New}
val xxx: X_Renamed_New = ???
}
} | JetBrains/intellij-scala | scala/scala-impl/testdata/move/allInOne_1_MoveXYZ/after/org/example1_1/usage/Usage2.scala | Scala | apache-2.0 | 1,670 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.metastore
import slamdata.Predef._
import quasar.fs.mount._, MountConfig._, MountType._
import quasar.sql
import scalaz._, Scalaz._
final case class Mount(`type`: MountType, uri: ConnectionUri)
object Mount {
val fromMountConfig: MountConfig => Mount = {
case ViewConfig(query, vars) =>
Mount(ViewMount, viewCfgAsUri(query, vars))
case ModuleConfig(statements) =>
Mount(ModuleMount, stmtsAsSqlUri(statements))
case FileSystemConfig(typ, uri) =>
Mount(FileSystemMount(typ), uri)
}
val toMountConfig: Mount => MountingError \\/ MountConfig = {
case Mount(ViewMount, uri) =>
viewCfgFromUri(uri).fold(
e => MountingError.invalidMount(
ViewMount,
s"error while obtaining a view config for ${uri.value}, $e").left,
viewConfig(_).right)
case Mount(ModuleMount, uri) =>
sql.fixParser.parseModule(uri.value).fold(
e => MountingError.invalidMount(
ModuleMount,
s"error while obtaining a mount config for ${uri.value}, ${e.message}").left,
moduleConfig(_).right)
case Mount(FileSystemMount(fsType), uri) =>
fileSystemConfig(fsType, uri).right
}
}
| jedesah/Quasar | core/src/main/scala/quasar/metastore/Mount.scala | Scala | apache-2.0 | 1,800 |
package eu.ace_design.island.viewer.svg
import java.awt.{Color, BasicStroke, Graphics2D}
import java.awt.geom.Line2D
import eu.ace_design.island.map.resources.Biome
import eu.ace_design.island.map.{RiverFlow, HasForBiome, IslandMap}
import eu.ace_design.island.stdlib.{Colors, Biomes}
import Colors._
/**
* the BiomeViewer display a map by painting faces according to their biomes, and also rivers (in addition to
* oceans).
*/
trait BiomeViewer extends SVGViewer {
protected def draw(m: IslandMap, g: Graphics2D) {
m.faceRefs foreach { drawABiome(_, m, g) }
m.edgeRefs foreach { drawAnEdge(_, m, g) }
if (m.uuid.isDefined) {
g.setColor(BLACK)
g.drawString(s"seed: 0x${m.uuid.get.toHexString.toUpperCase}L", 5, m.size - 5)
}
}
protected def drawABiome(idx: Int, map: IslandMap, g: Graphics2D) {
val path = buildPath(idx, map)
try {
val biome = map.faceProps.getValue(idx, HasForBiome())
g.setColor(biome.color)
} catch { case e: IllegalArgumentException => g.setColor(WHITE) }
g.setStroke(new BasicStroke(1f))
g.draw(path)
g.fill(path)
}
private def drawAnEdge(idx: Int, map: IslandMap, g: Graphics2D) {
try {
val flow = map.edgeProps.getValue(idx, RiverFlow()) // throw an exception if no river flows through this edge
trace(s"edge #$idx with flow $flow")
val edge = map.edge(idx)
val p1 = map.vertex(edge.p1)
val p2 = map.vertex(edge.p2)
g.setStroke(new BasicStroke(2f * flow))
g.setColor(MEDIUM_BLUE)
//g.setColor(BLACK)
g.draw(new Line2D.Double(p1.x, p1.y, p2.x, p2.y))
} catch { case e: IllegalArgumentException => } // do nothing if not a river
}
}
object BiomeViewer extends BiomeViewer
case class FogOfWarViewer(fog: FogOfWar) extends BiomeViewer {
override val enhancers = Seq(fog)
} | ace-design/island | engine/src/main/scala/eu/ace_design/island/viewer/svg/BiomeViewer.scala | Scala | lgpl-3.0 | 1,843 |
package com.textteaser.summarizer
import org.scalatest.{BeforeAndAfter, FunSuite}
import net.codingwell.scalaguice.InjectorExtensions.ScalaInjector
import com.google.inject.Guice
class ParserSuite extends FunSuite with BeforeAndAfter {
val guice = new ScalaInjector(Guice.createInjector(new GuiceModule(new Config)))
val parser = guice.instance[Parser]
val sentenceWithFiveWords: Array[String] = Array("1", "2", "3", "4", "5")
val emptySentence: Array[String] = Array()
val sentenceWithTwentyWords: Array[String] = (1 to 20).map(_.toString).toArray
val textBuilder = StringBuilder.newBuilder
val longTextBuilder = StringBuilder.newBuilder
val stopWordsSentence = Array("hereafter", "hereby", "herein")
val noStopWordsSentence = Array("Accommodation", "globalization", "emancipation")
val title = Array("Accommodation", "globalization", "emancipation")
val textForKeywords = "oneone twotwo twotwo threethree threethree threethree"
before {
longTextBuilder ++= "1914 translation by H. Rackham\\n\\n"
longTextBuilder ++= "On the other hand, we denounce with righteous indignation and dislike men "
longTextBuilder ++= "who are so beguiled and demoralized by the charms "
longTextBuilder ++= "of pleasure of the moment, so blinded by desire, that they cannot foresee the pain and trouble "
longTextBuilder ++= "that are bound to ensue; and equal blame belongs to those who fail in their duty "
longTextBuilder ++= "through weakness of will, which is the same as saying through shrinking from toil and pain. "
longTextBuilder ++= "These cases are perfectly simple and easy to distinguish. In a free hour, "
longTextBuilder ++= "when our power of choice is untrammelled and when nothing prevents our being able to do "
longTextBuilder ++= "what we like best, every pleasure is to be welcomed and every pain avoided. "
longTextBuilder ++= "But in certain circumstances and owing to the claims of duty or the obligations of business "
longTextBuilder ++= "it will frequently occur that pleasures have to be repudiated and annoyances accepted. "
longTextBuilder ++= "The wise man therefore always holds in these matters to this principle of selection: "
longTextBuilder ++= "he rejects pleasures to secure other greater pleasures, or else he endures pains to avoid worse pains."
textBuilder ++= "Now that conventional thinking has been turned on its head in a paper by "
textBuilder ++= "Prof Christof Wetterich at the University of Heidelberg in Germany. "
textBuilder ++= "He points out that the tell-tale light emitted by atoms is also governed by the masses "
textBuilder ++= "of their constituent particles, notably their electrons. The way these absorb and emit "
textBuilder ++= "light would shift towards the blue part of the spectrum if atoms were to grow in mass, "
textBuilder ++= "and to the red if they lost it. Because the frequency or ÒpitchÓ of light increases with mass, "
textBuilder ++= "Prof Wetterich argues that masses could have been lower long ago. "
textBuilder ++= "If they had been constantly increasing, the colours of old galaxies would look red-shifted Ð"
textBuilder ++= "and the degree of red shift would depend on how far away they were from Earth. "
textBuilder ++= "ÒNone of my colleagues has so far found any fault [with this],Ó he says. "
textBuilder ++= "Although his research has yet to be published in a peer-reviewed publication, Nature reports "
textBuilder ++= "that the idea that the universe is not expanding at all Ð or even contracting Ð is being taken "
textBuilder ++= "seriously by some experts, such as Dr HongSheng Zhao, a cosmologist at the University of "
textBuilder ++= "St Andrews who has worked on an alternative theory of gravity. ÒI see no fault in [Prof WetterichÕs] "
textBuilder ++= "mathematical treatment,Ó he says. ÒThere were rudimentary versions of this idea two decades ago, and "
textBuilder ++= "I think it is fascinating to explore this alternative representation of the cosmic expansion, where the evolution"
textBuilder ++= "of the universe is like a piano keyboard played out from low to high pitch.Ó Prof Wetterich takes the detached,"
textBuilder ++= " even playful, view that his work marks a change in perspective, with two different views of reality: "
textBuilder ++= "either the distances between galaxies grow, as in the traditional balloon picture, or the size of atoms "
textBuilder ++= "shrinks, increasing their mass. Or itÕs a complex blend of the two. One benefit of this idea"
textBuilder ++= "is that he is able to rid physics of the singularity at the start of time, a nasty infinity where "
textBuilder ++= "the laws of physics break down. Instead, the Big Bang is smeared over the distant past : "
textBuilder ++= "the first note of the ''cosmic pianoÕÕ was long and low-pitched. Harry Cliff, a physicist working at CERN"
textBuilder ++= "who is the Science MuseumÕs fellow of modern science, thinks it striking that a universe where particles are "
textBuilder ++= "getting heavier could look identical to one where space/time is expanding. ÒFinding two different "
textBuilder ++= "ways of thinking about the same problem often leads to new insights,Ó he says. ÒString theory, "
textBuilder ++= " for instance, is full of 'dualitiesÕ like this, which allow theorists to pick whichever view "
textBuilder ++= "makes their calculations simpler.Ó If this idea turns out to be right Ð and that is a very big "
textBuilder ++= "if Ð it could pave the way for new ways to think about our universe. If we are lucky, they might "
textBuilder ++= "even be as revolutionary as Edwin HubbleÕs, almost a century ago. Roger Highfield is director "
textBuilder ++= "of external affairs at the Science Museum"
}
test("Sentence length on empty sentence returns 0") {
assert(parser.sentenceLength(emptySentence) === 0.0)
}
test("Sentence length on non-empty sentence returns it's length according to formula") {
assert(parser.sentenceLength(sentenceWithFiveWords) === 0.25)
}
test("When `ideal` is equal to `sentence` array length, sentence length should be 1") {
assert(parser.sentenceLength(sentenceWithTwentyWords) === 1.0)
}
test("Splitting string into words should return no empty strings") {
assert(!parser.splitWords(longTextBuilder.toString()).contains(""))
assert(!parser.splitWords(textBuilder.toString()).contains(""))
}
test("Splitting string into words should not produce whitespaces in output") {
assert(parser.splitWords(longTextBuilder.toString()).forall(s => """\\s+""".r.findFirstIn(s) == None))
assert(parser.splitWords(textBuilder.toString()).forall(s => """\\s+""".r.findFirstIn(s) == None))
}
test("Splitting string into words should not produce newlines in output") {
assert(parser.splitWords(longTextBuilder.toString()).forall(s => """\\r?\\n+""".r.findFirstIn(s) == None))
assert(parser.splitWords(textBuilder.toString()).forall(s => """\\r?\\n+""".r.findFirstIn(s) == None))
}
test("Splitting string into words should let digits and letters pass") {
assert(parser.splitWords(longTextBuilder.toString()).forall(s => s matches """\\w+"""))
assert(parser.splitWords(textBuilder.toString()).forall(s => s matches """\\w+"""))
}
test("Title score of sentence consisting solely of stop words should be 0") {
assert(parser.titleScore(title, stopWordsSentence) === 0.0)
}
test("Title score of sentence that hasn't stop words should be 1") {
assert(parser.titleScore(title, noStopWordsSentence) === 1.0)
}
test("Keywords are sorted in descending order") {
assert(parser.getKeywords(textForKeywords) ===
KeywordList(List(ArticleKeyword("threethree", 3), ArticleKeyword("twotwo", 2), ArticleKeyword("oneone", 1)), 6))
}
test("Keywords are unique") {
assert(parser.getKeywords(textForKeywords).keywords.toSet ===
Set(ArticleKeyword("threethree", 3), ArticleKeyword("twotwo", 2), ArticleKeyword("oneone", 1)))
}
test("Any keyword isn't present in stopWords list") {
assert(parser.getKeywords(textForKeywords).keywords.forall(aw => !parser.stopWords.contains(aw.word)))
}
}
| ahmadassaf/Text-Teaser | src/test/scala/com/textteaser/summarizer/ParserSuite.scala | Scala | mit | 8,363 |
/*
* Copyright 2010-2011 Vilius Normantas <code@norma.lt>
*
* This file is part of Crossbow library.
*
* Crossbow is free software: you can redistribute it and/or modify it under the terms of the GNU
* General Public License as published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* Crossbow is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with Crossbow. If not,
* see <http://www.gnu.org/licenses/>.
*/
package lt.norma.crossbow.indicators
import lt.norma.crossbow.core.Direction._
import org.scalatest.FunSuite
class SignalTest extends FunSuite {
test("isLong - long") {
val signal = new Signal {
def name = ""
def dependencies = Set[Indicator[_]]()
def optionalValue = Some(Long)
}
assert(signal.isLong)
}
test("isLong - short") {
val signal = new Signal {
def name = ""
def dependencies = Set[Indicator[_]]()
def optionalValue = Some(Short)
}
assert(!signal.isLong)
}
test("isLong - undefined") {
val signal = new Signal {
def name = ""
def dependencies = Set[Indicator[_]]()
def optionalValue = None
}
assert(!signal.isLong)
}
test("isShort - long") {
val signal = new Signal {
def name = ""
def dependencies = Set[Indicator[_]]()
def optionalValue = Some(Long)
}
assert(!signal.isShort)
}
test("isShort - short") {
val signal = new Signal {
def name = ""
def dependencies = Set[Indicator[_]]()
def optionalValue = Some(Short)
}
assert(signal.isShort)
}
test("isShort- undefined") {
val signal = new Signal {
def name = ""
def dependencies = Set[Indicator[_]]()
def optionalValue = None
}
assert(!signal.isShort)
}
test("isFlat - long") {
val signal = new Signal {
def name = ""
def dependencies = Set[Indicator[_]]()
def optionalValue = Some(Long)
}
assert(!signal.isFlat)
}
test("isFlat - short") {
val signal = new Signal {
def name = ""
def dependencies = Set[Indicator[_]]()
def optionalValue = Some(Short)
}
assert(!signal.isFlat)
}
test("isFlat- undefined") {
val signal = new Signal {
def name = ""
def dependencies = Set[Indicator[_]]()
def optionalValue = None
}
assert(signal.isFlat)
}
test("valueNotSetString") {
val signal = new Signal {
def name = ""
def dependencies = Set[Indicator[_]]()
def optionalValue = None
}
expect("Flat") {
signal.valueNotSetString
}
}
}
| ViliusN/Crossbow | crossbow-core/test/lt/norma/crossbow/indicators/SignalTest.scala | Scala | gpl-3.0 | 2,901 |
package se.lu.nateko.cp.meta.core.data
import spray.json._
import scala.util.Try
import scala.util.Failure
import scala.util.Success
import scala.util.control.NoStackTrace
object GeoJson {
class FormatException(msg: String) extends IllegalArgumentException(msg) with NoStackTrace
def fromFeatureWithLabels(f: GeoFeature): JsObject = toGeometryOrFeatureWithLabels(f).fold(identity, identity)
def fromFeature(f: GeoFeature): JsObject = toGeometryOrFeature(f).fold(identity, identity)
//Right means Geometry or GeometryCollection was enough (i.e. no labels or Circles)
//Left means Feature or FeatureCollection was necessary (Circle or label present)
private def toGeometryOrFeatureWithLabels(f: GeoFeature): Either[JsObject, JsObject] =
toGeometryOrFeature(f).fold(Left(_),
geo => f.label match{
case Some(_) => Left(wrapGeoInFeature(geo, f.label))
case None => Right(geo)
}
)
//Right means Geometry or GeometryCollection was enough (i.e. no Circles and no labels inside colls)
//Left means Feature or FeatureCollection was necessary (Circle, or coll with Circle, or coll with label inside)
private def toGeometryOrFeature(f: GeoFeature): Either[JsObject, JsObject] = f match{
case GeoTrack(points, _) => Right(JsObject(
"type" -> JsString("LineString"),
"coordinates" -> JsArray(points.map(coordinates).toVector)
))
case p: Position => Right(JsObject(
"type" -> JsString("Point"),
"coordinates" -> coordinates(p)
))
case box: LatLonBox => toGeometryOrFeature(box.asPolygon)
case Polygon(vertices, _) => Right(JsObject(
"type" -> JsString("Polygon"),
"coordinates" -> JsArray(
JsArray((vertices ++ vertices.headOption).map(coordinates).toVector)
)
))
case Circle(center, radius, labelOpt) => Left(JsObject(
"type" -> JsString("Feature"),
"geometry" -> fromFeature(center),
"properties" -> JsObject(
Map("radius" -> JsNumber(Math.round(radius*100).toFloat / 100)) ++ labelOpt.map(
lbl => "label" -> JsString(lbl)
)
)
))
case FeatureCollection(features, _) =>
val geomsOrFeats = features.map(toGeometryOrFeatureWithLabels).toVector
val geomsOnly = geomsOrFeats.flatMap(_.toOption)
if(geomsOnly.size == geomsOrFeats.size)
Right(JsObject(
"type" -> JsString("GeometryCollection"),
"geometries" -> JsArray(geomsOnly)
))
else{
val featuresJs: Vector[JsObject] = geomsOrFeats.zip(features).map{
case (geoOrFeat, feat) => geoOrFeat.fold(
identity,
geom => wrapGeoInFeature(geom, feat.label)
)
}
Left(JsObject(
"type" -> JsString("FeatureCollection"),
"features" -> JsArray(featuresJs)
))
}
}
private def wrapGeoInFeature(geo: JsObject, labelOpt: Option[String]) = JsObject(
"type" -> JsString("Feature"),
"geometry" -> geo,
"properties" -> labelOpt.fold[JsValue](JsNull)(lbl => JsObject("label" -> JsString(lbl)))
)
def toFeature(geoJs: String): Try[GeoFeature] =
Try(geoJs.parseJson.asJsObject).flatMap(toFeature)
def toFeature(json: JsObject): Try[GeoFeature] = {
def field(name: String): Try[JsValue] = json.fields.get(name).fold[Try[JsValue]](
fail(s"'$name' not found in ${json.compactPrint}")
)(Success.apply)
def coords = field("coordinates")
def featuresColl(fieldName: String): Try[FeatureCollection] = field(fieldName).collect{
case JsArray(elements) => FeatureCollection(
elements.map{
case o: JsObject => toFeature(o).get
case other =>
throw new FormatException(s"Expected JsObject, got ${other.compactPrint}")
},
None
)
case other =>
throw new FormatException(s"Expected '$fieldName' to be a JsArray, got ${other.compactPrint}")
}
field("type").collect{ case JsString(geoType) => geoType }.flatMap{
case "Point" => coords.flatMap(parsePosition)
case "LineString" => coords.flatMap(parsePointsArray).map(GeoTrack(_, None))
case "Polygon" => coords.map{
case JsArray(Vector(pntArr)) => {
val points = parsePointsArray(pntArr).get
if(points.size < 2) throw new FormatException(s"Expected polygon, got ${points.size} points: ${pntArr.compactPrint}")
else Polygon(points.dropRight(1), None)
}
case other =>
throw new FormatException(s"Expected polygon coordinates to be a single-element JsArray, got ${other.compactPrint}")
}
case "GeometryCollection" => featuresColl("geometries")
case "FeatureCollection" => featuresColl("features")
case "Feature" => for(
geoJs <- field("geometry");
geo <- toFeature(geoJs.asJsObject);
props <- field("properties")
) yield {
val lblOpt = props match{
case o: JsObject => o.fields.get("label").collect{case JsString(lbl) => lbl}
case _ => None
}
(geo, props) match{
case (p: Position, prop: JsObject) if prop.fields.contains("radius") =>
val radius = prop.fields.get("radius").collect{case JsNumber(value) => value.floatValue}.getOrElse{
throw new FormatException("Expected numeric 'radius' propert in " + json.prettyPrint)
}
Circle(p, radius, lblOpt)
case _ =>
geo.withOptLabel(lblOpt)
}
}
case other => fail(s"Unsupported GeoJSON object type: $other")
}
}
private def parsePointsArray(geoJson: JsValue): Try[Vector[Position]] = geoJson match{
case JsArray(elements) => Try{
elements.map(p => parsePosition(p).get)
}
case _ =>
fail(s"Expected JSON array, got ${geoJson.compactPrint}")
}
private def parsePosition(geoJson: JsValue): Try[Position] = geoJson match {
case JsArray(Vector(JsNumber(lon), JsNumber(lat))) =>
Success(Position(lat.doubleValue, lon.doubleValue, None, None))
case JsArray(Vector(JsNumber(lon), JsNumber(lat), JsNumber(elev))) =>
Success(Position(lat.doubleValue, lon.doubleValue, Some(elev.floatValue), None))
case _ =>
fail(s"Not a valid JSON for GeoJSON for a position: ${geoJson.compactPrint}")
}
private def coordinates(p: Position) = {
val latLon = Vector(JsNumber(p.lon6.toDouble), JsNumber(p.lat6.toDouble))
val coords = p.alt.fold(latLon){alt => latLon :+ JsNumber(alt)}
JsArray(coords)
}
private def fail(msg: String) = Failure(new FormatException(msg))
}
| ICOS-Carbon-Portal/meta | core/src/main/scala/se/lu/nateko/cp/meta/core/data/GeoJson.scala | Scala | gpl-3.0 | 6,227 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.visor.commands.cache
import java.util.{Collection => JavaCollection, List => JavaList, Collections, UUID}
import org.apache.ignite._
import org.apache.ignite.cluster.ClusterNode
import org.apache.ignite.internal.util.lang.{GridFunc => F}
import org.apache.ignite.internal.util.typedef.X
import org.apache.ignite.internal.visor.cache._
import org.apache.ignite.internal.visor.util.VisorTaskUtils._
import org.apache.ignite.visor.VisorTag
import org.apache.ignite.visor.commands.cache.VisorCacheCommand._
import org.apache.ignite.visor.commands.common.VisorTextTable
import org.apache.ignite.visor.visor._
import org.jetbrains.annotations._
import scala.collection.JavaConversions._
import scala.language.{implicitConversions, reflectiveCalls}
/**
* ==Overview==
* Visor 'cache' command implementation.
*
* ==Help==
* {{{
* +-----------------------------------------------------------------------------------------+
* | cache | Prints statistics about caches from specified node on the entire grid. |
* | | Output sorting can be specified in arguments. |
* | | |
* | | Output abbreviations: |
* | | # Number of nodes. |
* | | H/h Number of cache hits. |
* | | M/m Number of cache misses. |
* | | R/r Number of cache reads. |
* | | W/w Number of cache writes. |
* +-----------------------------------------------------------------------------------------+
* | cache -clear | Clears all entries from cache on all nodes. |
* +-----------------------------------------------------------------------------------------+
* | cache -scan | List all entries in cache with specified name. |
* +-----------------------------------------------------------------------------------------+
* | cache -stop | Stop cache with specified name. |
* +-----------------------------------------------------------------------------------------+
* | cache -reset | Reset metrics for cache with specified name. |
* +-----------------------------------------------------------------------------------------+
*
* }}}
*
* ====Specification====
* {{{
* cache {-system}
* cache -i {-system}
* cache {-c=<cache-name>} {-id=<node-id>|id8=<node-id8>} {-s=hi|mi|rd|wr|cn} {-a} {-r} {-system}
* cache -clear {-c=<cache-name>}
* cache -scan -c=<cache-name> {-id=<node-id>|id8=<node-id8>} {-p=<page size>} {-system}
* cache -stop -c=<cache-name>
* cache -reset -c=<cache-name>
* }}}
*
* ====Arguments====
* {{{
* -id8=<node-id>
* ID8 of the node to get cache statistics from.
* Note that either '-id8' or '-id' should be specified.
* You can also use '@n0' ... '@nn' variables as a shortcut for <node-id8>.
* To specify oldest node on the same host as visor use variable '@nl'.
* To specify oldest node on other hosts that are not running visor use variable '@nr'.
* If neither is specified statistics will be gathered from all nodes.
* -id=<node-id>
* Full ID of the node to get cache statistics from.
* Either '-id8' or '-id' can be specified.
* If neither is specified statistics will be gathered from all nodes.
* -c=<cache-name>
* Name of the cache.
* -s=hi|mi|rd|wr|cn
* Defines sorting type. Sorted by:
* hi Hits.
* mi Misses.
* rd Reads.
* wr Writes.
* cn Cache name.
* If not specified - default sorting is 'cn'.
* -i
* Interactive mode.
* User can interactively select node for cache statistics.
* -r
* Defines if sorting should be reversed.
* Can be specified only with '-s' argument.
* -a
* Prints details statistics about each cache.
* By default only aggregated summary is printed.
* -system
* Enable showing of information about system caches.
* -clear
* Clears cache.
* -scan
* Prints list of all entries from cache.
* -stop
* Stop cache with specified name.
* -reset
* Reset metrics for cache with specified name.
* -p=<page size>
* Number of object to fetch from cache at once.
* Valid range from 1 to 100.
* By default page size is 25.
* }}}
*
* ====Examples====
* {{{
* cache
* Prints summary statistics about all no system caches.
* cache -system
* Prints summary statistics about all caches.
* cache -id8=12345678 -s=hi -r
* Prints summary statistics about caches from node with specified id8
* sorted by number of hits in reverse order.
* cache -i
* Prints cache statistics for interactively selected node.
* cache -s=hi -r -a
* Prints detailed statistics about all caches sorted by number of hits in reverse order.
* cache -clear
* Clears interactively selected cache.
* cache -clear -c=cache
* Clears cache with name 'cache'.
* cache -scan
* Prints list entries from interactively selected cache.
* cache -scan -c=cache
* Prints list entries from cache with name 'cache' from all nodes with this cache.
* cache -scan -c=@c0 -p=50
* Prints list entries from cache with name taken from 'c0' memory variable
* with page of 50 items from all nodes with this cache.
* cache -scan -c=cache -id8=12345678
* Prints list entries from cache with name 'cache' and node '12345678' ID8.
* cache -stop -c=cache
* Stops cache with name 'cache'.
* cache -reset -c=cache
* Reset metrics for cache with name 'cache'.
*
* }}}
*/
class VisorCacheCommand {
/**
* Prints error message and advise.
*
* @param errMsgs Error messages.
*/
private def scold(errMsgs: Any*) {
assert(errMsgs != null)
warn(errMsgs: _*)
warn("Type 'help cache' to see how to use this command.")
}
/**
* ===Command===
* Prints statistics about caches from nodes that pass mnemonic predicate.
* Sorting can be specified in arguments.
*
* ===Examples===
* <ex>cache -id8=12345678 -s=no -r</ex>
* Prints statistics about caches from node with specified id8 sorted by number of nodes in reverse order.
* <br>
* <ex>cache -s=no -r</ex>
* Prints statistics about all caches sorted by number of nodes in reverse order.
* <br>
* <ex>cache -clear</ex>
* Clears interactively selected cache.
* <br>
* <ex>cache -clear -c=cache</ex>
* Clears cache with name 'cache'.
* <br>
* <ex>cache -scan</ex>
* Prints list entries from interactively selected cache.
* <br>
* <ex>cache -scan -c=cache</ex>
* Prints list entries from cache with name 'cache' from all nodes with this cache.
* <br>
* <ex>cache -scan -c=@c0 -p=50</ex>
* Prints list entries from cache with name taken from 'c0' memory variable with page of 50 items
* from all nodes with this cache.
* <br>
* <ex>cache -scan -c=cache -id8=12345678</ex>
* Prints list entries from cache with name 'cache' and node '12345678' ID8.
* <br>
* <ex>cache -stop -c=@c0</ex>
* Stop cache with name taken from 'c0' memory variable.
* <br>
* <ex>cache -reset -c=@c0</ex>
* Reset metrics for cache with name taken from 'c0' memory variable.
*
* @param args Command arguments.
*/
def cache(args: String) {
if (!isConnected)
adviseToConnect()
else {
var argLst = parseArgs(args)
if (hasArgFlag("i", argLst)) {
askForNode("Select node from:") match {
case Some(nid) => ask("Detailed statistics (y/n) [n]: ", "n") match {
case "n" | "N" => nl(); cache("-id=" + nid); return;
case "y" | "Y" => nl(); cache("-a -id=" + nid); return;
case x => nl(); warn("Invalid answer: " + x); return;
}
case None => return
}
return
}
val node = parseNode(argLst) match {
case Left(msg) =>
scold(msg)
return
case Right(n) => n
}
val showSystem = hasArgFlag("system", argLst)
var cacheName = argValue("c", argLst) match {
case Some(dfltName) if dfltName == DFLT_CACHE_KEY || dfltName == DFLT_CACHE_NAME =>
argLst = argLst.filter(_._1 != "c") ++ Seq("c" -> null)
Some(null)
case cn => cn
}
/** Check that argument list has flag from list. */
def hasArgFlagIn(flags: String *) = {
flags.exists(hasArgFlag(_, argLst))
}
// Get cache stats data from all nodes.
val aggrData = cacheData(node, cacheName, showSystem)
if (hasArgFlagIn("clear", "scan", "stop", "reset")) {
if (cacheName.isEmpty)
askForCache("Select cache from:", node, showSystem && !hasArgFlagIn("clear", "stop", "reset"), aggrData) match {
case Some(name) =>
argLst = argLst ++ Seq("c" -> name)
cacheName = Some(name)
case None => return
}
cacheName.foreach(name => {
if (hasArgFlag("scan", argLst))
VisorCacheScanCommand().scan(argLst, node)
else {
if (aggrData.nonEmpty && !aggrData.exists(cache => F.eq(cache.getName, name) && cache.isSystem)) {
if (hasArgFlag("clear", argLst))
VisorCacheClearCommand().clear(argLst, node)
else if (hasArgFlag("stop", argLst))
VisorCacheStopCommand().stop(argLst, node)
else if (hasArgFlag("reset", argLst))
VisorCacheResetCommand().reset(argLst, node)
}
else {
if (hasArgFlag("clear", argLst))
warn("Clearing of system cache is not allowed: " + name)
else if (hasArgFlag("stop", argLst))
warn("Stopping of system cache is not allowed: " + name)
else if (hasArgFlag("reset", argLst))
warn("Reset metrics of system cache is not allowed: " + name)
}
}
})
return
}
val all = hasArgFlag("a", argLst)
val sortType = argValue("s", argLst)
val reversed = hasArgName("r", argLst)
if (sortType.isDefined && !isValidSortType(sortType.get)) {
scold("Invalid '-s' argument in: " + args)
return
}
if (aggrData.isEmpty) {
scold("No caches found.")
return
}
node match {
case Some(n) =>
println("ID8=" + nid8(n) + ", time of the snapshot: " + formatDateTime(System.currentTimeMillis))
case None =>
println("Time of the snapshot: " + formatDateTime(System.currentTimeMillis))
}
val sumT = VisorTextTable()
sumT #= ("Name(@)", "Mode", "Nodes", "Entries (Heap / Off-heap)", "Hits", "Misses", "Reads", "Writes")
sortAggregatedData(aggrData, sortType.getOrElse("cn"), reversed).foreach(
ad => {
// Add cache host as visor variable.
registerCacheName(ad.getName)
sumT += (
mkCacheName(ad.getName),
ad.getMode,
ad.getNodes.size(),
(
"min: " + (ad.getMinimumHeapSize + ad.getMinimumOffHeapSize) +
" (" + ad.getMinimumHeapSize + " / " + ad.getMinimumOffHeapSize + ")",
"avg: " + formatDouble(ad.getAverageHeapSize + ad.getAverageOffHeapSize) +
" (" + formatDouble(ad.getAverageHeapSize) + " / " + formatDouble(ad.getAverageOffHeapSize) + ")",
"max: " + (ad.getMaximumHeapSize + ad.getMaximumOffHeapSize) +
" (" + ad.getMaximumHeapSize + " / " + ad.getMaximumOffHeapSize + ")"
),
(
"min: " + ad.getMinimumHits,
"avg: " + formatDouble(ad.getAverageHits),
"max: " + ad.getMaximumHits
),
(
"min: " + ad.getMinimumMisses,
"avg: " + formatDouble(ad.getAverageMisses),
"max: " + ad.getMaximumMisses
),
(
"min: " + ad.getMinimumReads,
"avg: " + formatDouble(ad.getAverageReads),
"max: " + ad.getMaximumReads
),
(
"min: " + ad.getMinimumWrites,
"avg: " + formatDouble(ad.getAverageWrites),
"max: " + ad.getMaximumWrites
)
)
}
)
sumT.render()
if (all) {
val sorted = aggrData.sortWith((k1, k2) => {
if (k1.getName == null)
true
else if (k2.getName == null)
false
else k1.getName.compareTo(k2.getName) < 0
})
val gCfg = node.map(config).collect {
case cfg if cfg != null => cfg
}
sorted.foreach(ad => {
val cacheNameVar = mkCacheName(ad.getName)
println("\\nCache '" + cacheNameVar + "':")
val m = ad.getMetrics
val csT = VisorTextTable()
csT += ("Name(@)", cacheNameVar)
csT += ("Nodes", m.size())
csT += ("Total size Min/Avg/Max", (ad.getMinimumHeapSize + ad.getMinimumOffHeapSize) + " / " +
formatDouble(ad.getAverageHeapSize + ad.getAverageOffHeapSize) + " / " +
(ad.getMaximumHeapSize + ad.getMaximumOffHeapSize))
csT += (" Heap size Min/Avg/Max", ad.getMinimumHeapSize + " / " +
formatDouble(ad.getAverageHeapSize) + " / " + ad.getMaximumHeapSize)
csT += (" Off-heap size Min/Avg/Max", ad.getMinimumOffHeapSize + " / " +
formatDouble(ad.getAverageOffHeapSize) + " / " + ad.getMaximumOffHeapSize)
val ciT = VisorTextTable()
ciT #= ("Node ID8(@), IP", "CPUs", "Heap Used", "CPU Load", "Up Time", "Size", "Hi/Mi/Rd/Wr")
sortData(m.toMap, sortType.getOrElse("hi"), reversed).foreach { case (nid, cm) =>
val nm = ignite.cluster.node(nid).metrics()
ciT += (
nodeId8Addr(nid),
nm.getTotalCpus,
formatDouble(100d * nm.getHeapMemoryUsed / nm.getHeapMemoryMaximum) + " %",
formatDouble(nm.getCurrentCpuLoad * 100d) + " %",
X.timeSpan2HMSM(nm.getUpTime),
(
"Total: " + (cm.getKeySize + cm.offHeapEntriesCount()),
" Heap: " + cm.getKeySize,
" Off-Heap: " + cm.offHeapEntriesCount(),
" Off-Heap Memory: " + formatMemory(cm.getOffHeapAllocatedSize)
),
(
"Hi: " + cm.getHits,
"Mi: " + cm.getMisses,
"Rd: " + cm.getReads,
"Wr: " + cm.getWrites
)
)
}
csT.render()
nl()
println("Nodes for: " + cacheNameVar)
ciT.render()
// Print footnote.
println("'Hi' - Number of cache hits.")
println("'Mi' - Number of cache misses.")
println("'Rd' - number of cache reads.")
println("'Wr' - Number of cache writes.")
// Print metrics.
nl()
println("Aggregated queries metrics:")
println(" Minimum execution time: " + X.timeSpan2HMSM(ad.getMinimumQueryTime))
println(" Maximum execution time: " + X.timeSpan2HMSM(ad.getMaximumQueryTime))
println(" Average execution time: " + X.timeSpan2HMSM(ad.getAverageQueryTime.toLong))
println(" Total number of executions: " + ad.getQueryExecutions)
println(" Total number of failures: " + ad.getQueryFailures)
gCfg.foreach(ccfgs => ccfgs.find(ccfg => F.eq(ccfg.getName, ad.getName))
.foreach(ccfg => {
nl()
printCacheConfiguration("Cache configuration:", ccfg)
}))
})
}
else
println("\\nUse \\"-a\\" flag to see detailed statistics.")
}
}
/**
* Makes extended cache host attaching optional visor variable host
* associated with it.
*
* @param s Cache host.
*/
private def mkCacheName(@Nullable s: String): String = {
if (s == null) {
val v = mfindHead(DFLT_CACHE_KEY)
DFLT_CACHE_NAME + (if (v.isDefined) "(@" + v.get._1 + ')' else "")
}
else {
val v = mfindHead(s)
s + (if (v.isDefined) "(@" + v.get._1 + ')' else "")
}
}
/**
* Registers cache host as a visor variable if one wasn't already registered.
*
* @param s Cache host.
*/
private def registerCacheName(@Nullable s: String) = setVarIfAbsent(if (s != null) s else DFLT_CACHE_KEY, "c")
/**
* ===Command===
* Prints unsorted statistics about all caches.
*
* ===Examples===
* <ex>cache</ex>
* Prints unsorted statistics about all caches.
*/
def cache() {
this.cache("")
}
/**
* Get metrics data for all caches from all node or from specified node.
*
* @param node Option of node for cache names extracting. All nodes if `None`.
* @param systemCaches Allow selection of system caches.
* @return Caches metrics data.
*/
private def cacheData(node: Option[ClusterNode], name: Option[String], systemCaches: Boolean = false):
List[VisorCacheAggregatedMetrics] = {
assert(node != null)
try {
val caches: JavaList[String] = name.fold(Collections.emptyList[String]())(Collections.singletonList)
val arg = new VisorCacheMetricsCollectorTaskArg(systemCaches, caches)
node match {
case Some(n) => executeOne(n.id(), classOf[VisorCacheMetricsCollectorTask], arg).toList
case None => executeMulti(classOf[VisorCacheMetricsCollectorTask], arg).toList
}
}
catch {
case e: IgniteException => Nil
}
}
/**
* Gets configuration of grid from specified node for collecting of node cache's configuration.
*
* @param node Specified node.
* @return Cache configurations for specified node.
*/
private def config(node: ClusterNode): JavaCollection[VisorCacheConfiguration] = {
try {
cacheConfigurations(node.id())
}
catch {
case e: IgniteException =>
scold(e)
null
}
}
/**
* Tests whether passed in parameter is a valid sorting type.
*
* @param arg Sorting type to test.
*/
private def isValidSortType(arg: String): Boolean = {
assert(arg != null)
Set("hi", "mi", "rd", "wr", "cn").contains(arg.trim)
}
/**
* Sort metrics data.
*
* @param data Unsorted list.
* @param arg Sorting command argument.
* @param reverse Whether to reverse sorting or not.
* @return Sorted data.
*/
private def sortData(data: Map[UUID, VisorCacheMetrics], arg: String, reverse: Boolean) = {
assert(data != null)
assert(arg != null)
val sorted = arg.trim match {
case "hi" => data.toSeq.sortBy(_._2.getHits)
case "mi" => data.toSeq.sortBy(_._2.getMisses)
case "rd" => data.toSeq.sortBy(_._2.getReads)
case "wr" => data.toSeq.sortBy(_._2.getWrites)
case "cn" => data.toSeq.sortBy(_._1)
case _ =>
assert(false, "Unknown sorting type: " + arg)
Nil
}
if (reverse) sorted.reverse else sorted
}
/**
* Sort aggregated metrics data.
*
* @param data Unsorted list.
* @param arg Command argument.
* @param reverse Whether to reverse sorting or not.
* @return Sorted data.
*/
private def sortAggregatedData(data: Iterable[VisorCacheAggregatedMetrics], arg: String, reverse: Boolean):
List[VisorCacheAggregatedMetrics] = {
val sorted = arg.trim match {
case "hi" => data.toList.sortBy(_.getAverageHits)
case "mi" => data.toList.sortBy(_.getAverageMisses)
case "rd" => data.toList.sortBy(_.getAverageReads)
case "wr" => data.toList.sortBy(_.getAverageWrites)
case "cn" => data.toList.sortWith((x, y) =>
x.getName == null || (y.getName != null && x.getName.toLowerCase < y.getName.toLowerCase))
case _ =>
assert(false, "Unknown sorting type: " + arg)
Nil
}
if (reverse) sorted.reverse else sorted
}
/**
* Asks user to select a cache from the list.
*
* @param title Title displayed before the list of caches.
* @param node Option of node for cache names extracting. All nodes if `None`.
* @param showSystem Allow selection of system caches.
* @return `Option` for ID of selected cache.
*/
def askForCache(title: String, node: Option[ClusterNode], showSystem: Boolean = false,
aggrData: Seq[VisorCacheAggregatedMetrics]): Option[String] = {
assert(title != null)
assert(visor.visor.isConnected)
if (aggrData.isEmpty) {
scold("No caches found.")
return None
}
val sortedAggrData = sortAggregatedData(aggrData, "cn", false)
println("Time of the snapshot: " + formatDateTime(System.currentTimeMillis))
val sumT = VisorTextTable()
sumT #= ("#", "Name(@)", "Mode", "Size (Heap / Off-heap)")
sortedAggrData.indices.foreach(i => {
val ad = sortedAggrData(i)
// Add cache host as visor variable.
registerCacheName(ad.getName)
sumT += (
i,
mkCacheName(ad.getName),
ad.getMode,
(
"min: " + (ad.getMinimumHeapSize + ad.getMinimumOffHeapSize) +
" (" + ad.getMinimumHeapSize + " / " + ad.getMinimumOffHeapSize + ")",
"avg: " + formatDouble(ad.getAverageHeapSize + ad.getAverageOffHeapSize) +
" (" + formatDouble(ad.getAverageHeapSize) + " / " + formatDouble(ad.getAverageOffHeapSize) + ")",
"max: " + (ad.getMaximumHeapSize + ad.getMaximumOffHeapSize) +
" (" + ad.getMaximumHeapSize + " / " + ad.getMaximumOffHeapSize + ")"
))
})
sumT.render()
val a = ask("\\nChoose cache number ('c' to cancel) [c]: ", "0")
if (a.toLowerCase == "c")
None
else {
try
Some(sortedAggrData(a.toInt).getName)
catch {
case e: Throwable =>
warn("Invalid selection: " + a)
None
}
}
}
}
/**
* Companion object that does initialization of the command.
*/
object VisorCacheCommand {
/** Singleton command */
private val cmd = new VisorCacheCommand
addHelp(
name = "cache",
shortInfo = "Prints cache statistics, clears cache, prints list of all entries from cache.",
longInfo = Seq(
"Prints statistics about caches from specified node on the entire grid.",
"Output sorting can be specified in arguments.",
" ",
"Output abbreviations:",
" # Number of nodes.",
" H/h Number of cache hits.",
" M/m Number of cache misses.",
" R/r Number of cache reads.",
" W/w Number of cache writes.",
" ",
"Clears cache.",
" ",
"Prints list of all entries from cache."
),
spec = Seq(
"cache",
"cache -i",
"cache {-c=<cache-name>} {-id=<node-id>|id8=<node-id8>} {-s=hi|mi|rd|wr} {-a} {-r}",
"cache -clear {-c=<cache-name>} {-id=<node-id>|id8=<node-id8>}",
"cache -scan -c=<cache-name> {-id=<node-id>|id8=<node-id8>} {-p=<page size>}",
"cache -stop -c=<cache-name>",
"cache -reset -c=<cache-name>"
),
args = Seq(
"-id8=<node-id>" -> Seq(
"ID8 of the node to get cache statistics from.",
"Note that either '-id8' or '-id' should be specified.",
"You can also use '@n0' ... '@nn' variables as a shortcut for <node-id8>.",
"To specify oldest node on the same host as visor use variable '@nl'.",
"To specify oldest node on other hosts that are not running visor use variable '@nr'.",
"If neither is specified statistics will be gathered from all nodes."
),
"-id=<node-id>" -> Seq(
"Full ID of the node to get cache statistics from.",
"Either '-id8' or '-id' can be specified.",
"If neither is specified statistics will be gathered from all nodes."
),
"-c=<cache-name>" -> Seq(
"Name of the cache.",
"Note you can also use '@c0' ... '@cn' variables as shortcut to <cache-name>."
),
"-clear" -> Seq(
"Clears cache."
),
"-system" -> Seq(
"Enable showing of information about system caches."
),
"-scan" -> Seq(
"Prints list of all entries from cache."
),
"-stop" -> Seq(
"Stop cache with specified name."
),
"-reset" -> Seq(
"Reset metrics of cache with specified name."
),
"-s=hi|mi|rd|wr|cn" -> Seq(
"Defines sorting type. Sorted by:",
" hi Hits.",
" mi Misses.",
" rd Reads.",
" wr Writes.",
" cn Cache name.",
"If not specified - default sorting is 'cn'."
),
"-i" -> Seq(
"Interactive mode.",
"User can interactively select node for cache statistics."
),
"-r" -> Seq(
"Defines if sorting should be reversed.",
"Can be specified only with '-s' argument."
),
"-a" -> Seq(
"Prints details statistics about each cache.",
"By default only aggregated summary is printed."
),
"-p=<page size>" -> Seq(
"Number of object to fetch from cache at once.",
"Valid range from 1 to 100.",
"By default page size is 25."
)
),
examples = Seq(
"cache" ->
"Prints summary statistics about all non-system caches.",
"cache -system" ->
"Prints summary statistics about all caches including system cache.",
"cache -i" ->
"Prints cache statistics for interactively selected node.",
"cache -id8=12345678 -s=hi -r" -> Seq(
"Prints summary statistics about caches from node with specified id8",
"sorted by number of hits in reverse order."
),
"cache -id8=@n0 -s=hi -r" -> Seq(
"Prints summary statistics about caches from node with id8 taken from 'n0' memory variable.",
"sorted by number of hits in reverse order."
),
"cache -c=@c0 -a" -> Seq(
"Prints detailed statistics about cache with name taken from 'c0' memory variable."
),
"cache -s=hi -r -a" ->
"Prints detailed statistics about all caches sorted by number of hits in reverse order.",
"cache -clear" -> "Clears interactively selected cache.",
"cache -clear -c=cache" -> "Clears cache with name 'cache'.",
"cache -clear -c=@c0" -> "Clears cache with name taken from 'c0' memory variable.",
"cache -scan" -> "Prints list entries from interactively selected cache.",
"cache -scan -c=cache" -> "List entries from cache with name 'cache' from all nodes with this cache.",
"cache -scan -c=@c0 -p=50" -> ("Prints list entries from cache with name taken from 'c0' memory variable" +
" with page of 50 items from all nodes with this cache."),
"cache -scan -c=cache -id8=12345678" -> "Prints list entries from cache with name 'cache' and node '12345678' ID8.",
"cache -stop -c=@c0" -> "Stop cache with name taken from 'c0' memory variable.",
"cache -reset -c=@c0" -> "Reset metrics for cache with name taken from 'c0' memory variable."
),
emptyArgs = cmd.cache,
withArgs = cmd.cache
)
/** Default cache name to show on screen. */
private final val DFLT_CACHE_NAME = escapeName(null)
/** Default cache key. */
protected val DFLT_CACHE_KEY = DFLT_CACHE_NAME + "-" + UUID.randomUUID().toString
/**
* Singleton.
*/
def apply() = cmd
/**
* Implicit converter from visor to commands "pimp".
*
* @param vs Visor tagging trait.
*/
implicit def fromCinfo2Visor(vs: VisorTag): VisorCacheCommand = cmd
/**
* Show table of cache configuration information.
*
* @param title Specified title for table.
* @param cfg Config to show information.
*/
private[commands] def printCacheConfiguration(title: String, cfg: VisorCacheConfiguration) {
val affinityCfg = cfg.getAffinityConfiguration
val nearCfg = cfg.getNearConfiguration
val rebalanceCfg = cfg.getRebalanceConfiguration
val evictCfg = cfg.getEvictionConfiguration
val storeCfg = cfg.getStoreConfiguration
val queryCfg = cfg.getQueryConfiguration
val cacheT = VisorTextTable()
cacheT #= ("Name", "Value")
cacheT += ("Mode", cfg.getMode)
cacheT += ("Atomicity Mode", safe(cfg.getAtomicityMode))
cacheT += ("Statistic Enabled", bool2Str(cfg.isStatisticsEnabled))
cacheT += ("Management Enabled", bool2Str(cfg.isManagementEnabled))
cacheT += ("Time To Live Eager Flag", cfg.isEagerTtl)
cacheT += ("Write Synchronization Mode", safe(cfg.getWriteSynchronizationMode))
cacheT += ("Invalidate", bool2Str(cfg.isInvalidate))
cacheT += ("Start Size", cfg.getStartSize)
cacheT += ("Affinity Function", safe(affinityCfg.getFunction))
cacheT += ("Affinity Backups", affinityCfg.getPartitionedBackups)
cacheT += ("Affinity Partitions", safe(affinityCfg.getPartitions))
cacheT += ("Affinity Exclude Neighbors", safe(affinityCfg.isExcludeNeighbors))
cacheT += ("Affinity Mapper", safe(affinityCfg.getMapper))
cacheT += ("Rebalance Mode", rebalanceCfg.getMode)
cacheT += ("Rebalance Batch Size", rebalanceCfg.getBatchSize)
cacheT += ("Rebalance Timeout", rebalanceCfg.getTimeout)
cacheT += ("Rebalance Delay", rebalanceCfg.getPartitionedDelay)
cacheT += ("Time Between Rebalance Messages", rebalanceCfg.getThrottle)
cacheT += ("Eviction Policy Enabled", bool2Str(evictCfg.getPolicy != null))
cacheT += ("Eviction Policy", safe(evictCfg.getPolicy))
cacheT += ("Eviction Policy Max Size", safe(evictCfg.getPolicyMaxSize))
cacheT += ("Eviction Filter", safe(evictCfg.getFilter))
cacheT += ("Near Cache Enabled", bool2Str(nearCfg.isNearEnabled))
cacheT += ("Near Start Size", nearCfg.getNearStartSize)
cacheT += ("Near Eviction Policy", safe(nearCfg.getNearEvictPolicy))
cacheT += ("Near Eviction Policy Max Size", safe(nearCfg.getNearEvictMaxSize))
cacheT += ("Default Lock Timeout", cfg.getDefaultLockTimeout)
cacheT += ("Metadata type count", cfg.getJdbcTypes.size())
cacheT += ("Cache Interceptor", safe(cfg.getInterceptor))
cacheT += ("Store Enabled", bool2Str(storeCfg.isEnabled))
cacheT += ("Store Class", safe(storeCfg.getStore))
cacheT += ("Store Factory Class", storeCfg.getStoreFactory)
cacheT += ("Store Keep Binary", storeCfg.isStoreKeepBinary)
cacheT += ("Store Read Through", bool2Str(storeCfg.isReadThrough))
cacheT += ("Store Write Through", bool2Str(storeCfg.isWriteThrough))
cacheT += ("Write-Behind Enabled", bool2Str(storeCfg.isEnabled))
cacheT += ("Write-Behind Flush Size", storeCfg.getFlushSize)
cacheT += ("Write-Behind Frequency", storeCfg.getFlushFrequency)
cacheT += ("Write-Behind Flush Threads Count", storeCfg.getFlushThreadCount)
cacheT += ("Write-Behind Batch Size", storeCfg.getBatchSize)
cacheT += ("Concurrent Asynchronous Operations Number", cfg.getMaxConcurrentAsyncOperations)
cacheT += ("Loader Factory Class Name", safe(cfg.getLoaderFactory))
cacheT += ("Writer Factory Class Name", safe(cfg.getWriterFactory))
cacheT += ("Expiry Policy Factory Class Name", safe(cfg.getExpiryPolicyFactory))
cacheT +=("Query Execution Time Threshold", queryCfg.getLongQueryWarningTimeout)
cacheT +=("Query Schema Name", queryCfg.getSqlSchema)
cacheT +=("Query Escaped Names", bool2Str(queryCfg.isSqlEscapeAll))
cacheT +=("Query Onheap Cache Size", queryCfg.getSqlOnheapRowCacheSize)
val sqlFxs = queryCfg.getSqlFunctionClasses
val hasSqlFxs = sqlFxs != null && sqlFxs.nonEmpty
if (!hasSqlFxs)
cacheT +=("Query SQL functions", NA)
val indexedTypes = queryCfg.getIndexedTypes
val hasIndexedTypes = indexedTypes != null && indexedTypes.nonEmpty
if (!hasIndexedTypes)
cacheT +=("Query Indexed Types", NA)
println(title)
cacheT.render()
if (hasSqlFxs) {
println("\\nQuery SQL functions:")
val sqlFxsT = VisorTextTable()
sqlFxsT #= "Function Class Name"
sqlFxs.foreach(s => sqlFxsT += s)
sqlFxsT.render()
}
if (hasIndexedTypes) {
println("\\nQuery Indexed Types:")
val indexedTypesT = VisorTextTable()
indexedTypesT #= ("Key Class Name", "Value Class Name")
indexedTypes.grouped(2).foreach(types => indexedTypesT += (types(0), types(1)))
indexedTypesT.render()
}
}
}
| pperalta/ignite | modules/visor-console/src/main/scala/org/apache/ignite/visor/commands/cache/VisorCacheCommand.scala | Scala | apache-2.0 | 38,148 |
package rpgboss.editor.resourceselector
import scala.swing._
import rpgboss.editor._
import rpgboss.model.resource._
import rpgboss.model._
import scala.swing.event.MouseClicked
import scala.swing.event.EditDone
import com.typesafe.scalalogging.slf4j.LazyLogging
import rpgboss.editor.misc.MapLocPanel
import rpgboss.editor.uibase._
import rpgboss.editor.imageset.selector.AnimationImageFrameSelector
import rpgboss.lib.Utils
class StringSpecSelectDialog[M, MT](
owner: Window,
sm: StateMaster,
initialSelectionOpt: Option[String],
allowNone: Boolean,
metaResource: MetaResource[M, MT],
onSuccessF: (Option[String]) => Unit)
extends ResourceSelectDialog(
owner,
sm,
initialSelectionOpt,
allowNone,
metaResource) {
override def specToResourceName(spec: String): String = spec
override def newRcNameToSpec(name: String, prevSpec: Option[String]): String =
name
override def onSuccess(result: Option[String]) = onSuccessF(result)
}
class AnimationImageSelectDialog(
owner: Window,
sm: StateMaster,
initialSelectionOpt: Option[AnimationVisual],
onSuccessF: (Option[AnimationVisual]) => Unit)
extends ResourceSelectDialog(
owner,
sm,
initialSelectionOpt,
false,
AnimationImage) {
override def specToResourceName(spec: AnimationVisual): String =
spec.animationImage
override def newRcNameToSpec(
name: String,
prevSpecOpt: Option[AnimationVisual]): AnimationVisual = {
val newSpec = Utils.deepCopy(prevSpecOpt.getOrElse(AnimationVisual()))
val newAnimationImage = AnimationImage.readFromDisk(sm.getProj, name)
val maxFrameIndex = newAnimationImage.xTiles * newAnimationImage.yTiles - 1
newSpec.animationImage = name
newSpec.start.frameIndex =
math.min(newSpec.start.frameIndex, maxFrameIndex)
newSpec.end.frameIndex =
math.min(newSpec.end.frameIndex, maxFrameIndex)
newSpec
}
override def rightPaneFor(
selection: AnimationVisual,
updateF: AnimationVisual => Unit) = {
val animationImage =
AnimationImage.readFromDisk(sm.getProj, selection.animationImage)
new AnimationImageFrameSelector(animationImage, selection, updateF)
}
override def onSuccess(result: Option[AnimationVisual]) = onSuccessF(result)
}
class BattleBackgroundSelectDialog(
owner: Window,
sm: StateMaster,
initialSelectionOpt: Option[String],
onSuccessF: (Option[String]) => Unit,
allowNone: Boolean = false)
extends StringSpecSelectDialog(
owner,
sm,
initialSelectionOpt,
allowNone,
BattleBackground,
onSuccessF) {
override def rightPaneFor(selection: String, unused: String => Unit) = {
val img = BattleBackground.readFromDisk(sm.getProj, selection)
new ImagePanel(img.img) with DisposableComponent
}
}
class PictureSelectDialog(
owner: Window,
sm: StateMaster,
initialSelectionOpt: Option[String],
onSuccessF: (Option[String]) => Unit)
extends StringSpecSelectDialog(
owner,
sm,
initialSelectionOpt,
false,
Picture,
onSuccessF) {
override def rightPaneFor(selection: String, unused: String => Unit) = {
val img = Picture.readFromDisk(sm.getProj, selection)
new ImagePanel(img.img) with DisposableComponent
}
}
class WindowskinSelectDialog(
owner: Window,
sm: StateMaster,
initialSelectionOpt: Option[String],
onSuccessF: (Option[String]) => Unit)
extends StringSpecSelectDialog(
owner,
sm,
initialSelectionOpt,
false,
Windowskin,
onSuccessF) {
override def rightPaneFor(selection: String, unused: String => Unit) = {
val img = Windowskin.readFromDisk(sm.getProj, selection)
new ImagePanel(img.img) with DisposableComponent
}
}
class MapLocSelectDialog(
owner: Window,
sm: StateMaster,
selectMapOnly: Boolean,
initialLoc: MapLoc,
onSuccessF: MapLoc => Unit)
extends StdDialog(owner, "Select Map")
with LazyLogging {
val model = initialLoc.copy()
val locPanel = new MapLocPanel(this, sm, model, selectMapOnly)
def okFunc(): Unit = {
onSuccessF(model)
close()
}
contents = new DesignGridPanel {
row().grid().add(locPanel)
addButtons(okBtn, cancelBtn)
}
}
abstract class BrowseField[SpecType](
owner: Window,
sm: StateMaster,
initial: Option[SpecType],
onUpdate: Option[SpecType] => Unit)
extends BoxPanel(Orientation.Horizontal) with LazyLogging {
override def enabled_=(enabled: Boolean) = {
super.enabled_=(enabled)
textField.enabled = enabled
browseBtn.enabled = enabled
}
var model = initial
val textField = new TextField {
preferredSize = new Dimension(100, preferredSize.height)
editable = false
enabled = true
}
def modelToString(m: SpecType): String = m.toString
def updateWidgets() =
textField.text = model.map(modelToString).getOrElse("<None>")
updateWidgets()
def doBrowse()
val browseBtn = new Button(Action("...") {
doBrowse()
logger.debug("Post-browse button onUpdate")
updateWidgets()
onUpdate(model)
})
listenTo(textField.mouse.clicks)
reactions += {
case MouseClicked(`textField`, _, _, _, _) =>
browseBtn.action.apply()
}
contents += textField
contents += browseBtn
}
abstract class StringBrowseField(
owner: Window,
sm: StateMaster,
initial: String,
onUpdate: String => Unit)
extends BrowseField[String](
owner,
sm,
if(initial.isEmpty()) None else Some(initial),
result => onUpdate(result.getOrElse("")))
class WindowskinField(
owner: Window,
sm: StateMaster,
initial: String,
onUpdate: String => Unit)
extends StringBrowseField(owner, sm, initial, onUpdate) {
override def doBrowse() = {
val diag = new WindowskinSelectDialog(
owner, sm, model, model = _)
diag.open()
}
}
class BattleBackgroundField(
owner: Window,
sm: StateMaster,
initial: String,
onUpdate: String => Unit,
allowNone: Boolean = false)
extends StringBrowseField(owner, sm, initial, onUpdate) {
override def doBrowse() = {
val diag =
new BattleBackgroundSelectDialog(owner, sm, model, model = _, allowNone)
diag.open()
}
}
class PictureField(
owner: Window,
sm: StateMaster,
initial: String,
onUpdate: String => Unit)
extends StringBrowseField(owner, sm, initial, onUpdate) {
override def doBrowse() = {
val diag = new PictureSelectDialog(owner, sm, model, model = _)
diag.open()
}
}
class MsgfontField(
owner: Window,
sm: StateMaster,
initial: String,
onUpdate: String => Unit)
extends StringBrowseField(owner, sm, initial, onUpdate) {
def doBrowse() = {
val diag = new StringSpecSelectDialog(
owner, sm, model,
false, Msgfont,
model = _)
diag.open()
}
}
class MapField(
owner: Window,
sm: StateMaster,
initial: String,
onUpdate: String => Unit)
extends StringBrowseField(owner, sm, initial, onUpdate) {
override def modelToString(m: String) =
sm.getMap(m).map(_.displayId).getOrElse("[None]")
def doBrowse() = {
val diag = new MapLocSelectDialog(
owner,
sm,
true /* selectMapOnly */,
model.map(mapName => MapLoc(mapName, -1, -1)).get,
loc => model = Some(loc.map))
diag.open()
}
} | toastythought/rpgboss | editor/src/main/scala/rpgboss/editor/resourceselector/ResourceField.scala | Scala | agpl-3.0 | 7,235 |
/*
* Copyright 2016 Nikolay Donets
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.nikdon.telepooz.model.methods
import java.time.Duration
import com.github.nikdon.telepooz.model.{Message, ReplyMarkup, Response}
/**
* Use this method to send audio files, if you want Telegram clients to display the file as a playable voice message.
* For this to work, your audio must be in an .ogg file encoded with OPUS (other formats may be sent as Audio or
* Document). On success, the sent Message is returned. Bots can currently send voice messages of up to 50 MB in size,
* this limit may be changed in the future.
*
* @param chat_id Unique identifier for the target chat or username of the target channel
* (in the format @channelusername)
* @param voice Audio file to send. You can either pass a file_id as String to resend an audio that is
* already on the Telegram servers, or upload a new audio file using multipart/form-data.
* @param caption Audio caption, 0-200 characters
* @param duration Duration of the audio in seconds
* @param disable_notification Sends the message silently. iOS users will not receive a notification, Android users
* will receive a notification with no sound.
* @param reply_to_message_id If the message is a reply, ID of the original message
* @param reply_markup Additional interface options. A JSON-serialized object for an inline keyboard, custom
* reply keyboard, instructions to hide reply keyboard or to force a reply from the user.
*/
case class SendVoice(
chat_id: String,
voice: String, // TODO Add file upload
caption: Option[String] = None,
duration: Option[Duration] = None,
disable_notification: Option[Boolean] = None,
reply_to_message_id: Option[Long] = None,
reply_markup: Option[ReplyMarkup] = None
) extends Method[Response[Message]]
| nikdon/telepooz | src/main/scala/com/github/nikdon/telepooz/model/methods/SendVoice.scala | Scala | apache-2.0 | 2,550 |
package vexriscv.plugin
import spinal.core._
import spinal.lib._
import vexriscv._
class NoPipeliningPlugin() extends Plugin[VexRiscv] {
override def setup(pipeline: VexRiscv): Unit = {
import pipeline.config._
val decoderService = pipeline.service(classOf[DecoderService])
decoderService.addDefault(HAS_SIDE_EFFECT, False)
}
override def build(pipeline: VexRiscv): Unit = {
import pipeline._
import pipeline.config._
val writesInPipeline = stages.dropWhile(_ != execute).map(s => s.arbitration.isValid && s.input(REGFILE_WRITE_VALID)) :+ RegNext(stages.last.arbitration.isValid && stages.last.input(REGFILE_WRITE_VALID))
decode.arbitration.haltByOther.setWhen(stagesFromExecute.map(_.arbitration.isValid).orR)
}
}
| SpinalHDL/VexRiscv | src/main/scala/vexriscv/plugin/NoPipeliningPlugin.scala | Scala | mit | 757 |
package org.gc.scala.learningscala.db.postgres.jdbc
import java.sql.{Connection, DriverManager, ResultSet};
/**
* Created by gchand on 1/11/2016.
*/
object Sample {
def main(args: Array[String]) {
// Database Config
val connectionUrl = "jdbc:postgresql://localhost:5432/scala?user=postgres&password=gw"
// Load the driver
classOf[org.postgresql.Driver]
// Setup the connection
val conn = DriverManager.getConnection(connectionUrl)
insert()
//For Query: Read-Only
def query(): Unit = {
try {
// Configure to be Read Only
val statement = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)
// Execute Query
val rs = statement.executeQuery("SELECT id FROM users LIMIT 5")
// Iterate Over ResultSet
while (rs.next) {
println(rs.getString("id"))
}
} finally {
conn.close()
}
}
def insert(): Unit = {
//For Insert, Update & Delete
val statement = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE)
// do database insert
try {
val prep = conn.prepareStatement("INSERT INTO users (id, username) VALUES (?, ?) ")
prep.setInt(1, 3) // column 1
prep.setString(2, "Ganesh") // column 2
prep.executeUpdate
}
finally {
conn.close
}
}
}
}
| ganeshchand/learning-scala | src/main/scala-2.11/org/gc/scala/learningscala/db/postgres/jdbc/Sample.scala | Scala | mit | 1,385 |
package nl.gideondk.raiku
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent._
import scala.concurrent.duration._
class BucketSpec extends RaikuSpec {
import nl.gideondk.raiku.TestModels._
val bucket = RaikuBucket.default[Z]("raiku_test_z_bucket", client)
Await.result(bucket.setBucketProperties(RaikuBucketProperties(None, Some(true))), 5 seconds)
"A bucket" should {
"be able to store objects" in {
val newId = java.util.UUID.randomUUID.toString
val obj = Z(newId, "Should also be stored")
val v = bucket.store(obj, true)
v.futureValue should equal(Some(obj))
}
}
"be able to fetch stored objects" in {
val newId = java.util.UUID.randomUUID.toString
val obj = Z(newId, "Should also be stored")
val retObj = for {
v ← bucket << obj
retObj ← bucket ? obj.id
} yield {
retObj
}
retObj.futureValue should equal(Some(obj))
}
"create siblings (and fail) when unsafely updating objects" in {
val newId = java.util.UUID.randomUUID.toString
val obj = Z(newId, "Should also be stored")
val retObj = for {
v ← client unsafeStoreNew zConverter.write(bucket.bucketName, bucket.bucketType, obj)
v ← client unsafeStoreNew zConverter.write(bucket.bucketName, bucket.bucketType, obj)
v ← client unsafeStoreNew zConverter.write(bucket.bucketName, bucket.bucketType, obj)
retObj ← bucket ? obj.id
} yield {
()
}
an[UnresolvedSiblingsConflict] should be thrownBy Await.result(retObj, 5 seconds)
}
"shouldn't create siblings when updating safely" in {
val newId = java.util.UUID.randomUUID.toString
val obj = Z(newId, "Should also be stored")
val retObj = for {
v ← bucket << obj
v ← bucket << obj
v ← bucket << obj
retObj ← bucket ? obj.id
} yield {
retObj
}
retObj.futureValue should equal(Some(obj))
}
"be able to persist multiple objects" in {
val vec = List.fill(50)(Z(java.util.UUID.randomUUID.toString, "Should also be persisted"))
val retObj = for {
vs ← bucket <<* vec
retObj ← bucket ?* vec.map(_.id)
} yield retObj
retObj.futureValue should contain theSameElementsAs (vec)
}
"be able to delete objects correctly" in {
val newId = java.util.UUID.randomUUID.toString
val obj = Z(newId, "Should also be stored")
val retObj = for {
v ← bucket << obj
firstRet ← bucket ? obj.id
_ ← bucket - obj
secRet ← bucket ? obj.id
} yield {
firstRet.isDefined && !secRet.isDefined
}
retObj.futureValue should be(true)
}
"be able to delete objects correctly by key" in {
val newId = java.util.UUID.randomUUID.toString
val obj = Z(newId, "Should also be stored")
val retObj = for {
v ← bucket << obj
firstRet ← bucket ? obj.id
_ ← bucket deleteByKey obj.id
secRet ← bucket ? obj.id
} yield {
firstRet.isDefined && !secRet.isDefined
}
retObj.futureValue should be(true)
}
"shouldn't be able to fetch multiple deleted objects" in {
val vec = List.fill(50)(Z(java.util.UUID.randomUUID.toString, "Should also be persisted"))
val retObj = for {
vs ← bucket <<* vec
bef ← bucket ?* vec.map(_.id)
_ ← bucket -* vec
aft ← bucket ?* vec.map(_.id)
} yield {
bef.length == 50 && aft.length == 0
}
retObj.futureValue should be(true)
}
} | gideondk/Raiku | src/test/scala/nl/gideondk/raiku/BucketSpec.scala | Scala | apache-2.0 | 3,506 |
package dokutoku.golden_thumb.mod
import scala.collection.mutable.ListBuffer
import dokutoku.golden_thumb.seed.traits.GoldenSeed
import forestry.api.recipes.RecipeManagers
import net.minecraft.item.ItemStack
import net.minecraftforge.liquids.LiquidStack
import thermalexpansion.api.crafting.CraftingManagers
import dokutoku.golden_thumb.crop.GoldenCrop
import powercrystals.minefactoryreloaded.api.FarmingRegistry
import dokutoku.golden_thumb.mod.java.HarvestablePlant
import dokutoku.golden_thumb.mod.java.PlantableSeed
import net.minecraft.block.BlockCrops
import dokutoku.golden_thumb.mod.java.FertilizablePlant
object Integration {
var SqueezerRegistry = new ListBuffer[Tuple2[GoldenSeed, LiquidStack]]
var CrucibleRegistry = new ListBuffer[Tuple2[GoldenSeed, LiquidStack]]
var MFRCropRegistry = new ListBuffer[Tuple2[GoldenCrop, GoldenSeed]]
var LiquidSeeds = new ListBuffer[GoldenSeed]
def doForestryIntegration(): Unit = {
for(seed <- SqueezerRegistry) {
val seedStack = new ItemStack(seed._1)
val temp = Array[ItemStack](seedStack)
RecipeManagers.squeezerManager.addRecipe(10, temp, seed._2)
}
}
def doThermalExpansionIntegration(): Unit = {
for(seed <- CrucibleRegistry) {
val seedStack = new ItemStack(seed._1)
CraftingManagers.crucibleManager.addRecipe(1000, seedStack, seed._2)
}
}
def doMineFactoryIntegration(): Unit = {
for((crop, seed) <- MFRCropRegistry) {
FarmingRegistry.registerHarvestable(new HarvestablePlant(crop.blockID, seed.itemID))
FarmingRegistry.registerFertilizable(new FertilizablePlant(crop.blockID))
FarmingRegistry.registerPlantable(new PlantableSeed(seed.itemID, seed.getPlantID(null, 0, 0, 0)))
}
}
} | AtomFusion/GoldenThumb | dokutoku/golden_thumb/mod/Integration.scala | Scala | mit | 1,773 |
package org.saddle.vec
import org.saddle.stats._
import org.saddle.Vec
/**
* Enrich Vec with stats
*/
trait VecStatsImplicits {
type Vec2Stats[A] = Vec[A] => VecStats[A]
implicit def vecToIntStats(s: Vec[Int]): VecStats[Int] = new IntStats(s)
implicit def vecToLongStats(s: Vec[Long]): VecStats[Long] = new LongStats(s)
implicit def vecToDoubleStats(s: Vec[Double]): VecStats[Double] = new DoubleStats(s)
implicit def vecToIntExpStats(s: Vec[Int]): VecExpandingStats[Int] = new IntExpandingStats(s)
implicit def vecToLongExpStats(s: Vec[Long]): VecExpandingStats[Long] = new LongExpandingStats(s)
implicit def vecToDoubleExpStats(s: Vec[Double]): VecExpandingStats[Double] = new DoubleExpandingStats(s)
implicit def vecToIntRollingStats(s: Vec[Int]): VecRollingStats[Int] = new VecRollingStats[Int](s)
implicit def vecToLongRollingStats(s: Vec[Long]): VecRollingStats[Long] = new VecRollingStats[Long](s)
implicit def vecToDoubleRollingStats(s: Vec[Double]): VecRollingStats[Double] = new VecRollingStats[Double](s)
}
| saddle/saddle | saddle-core/src/main/scala/org/saddle/vec/VecStatsImplicits.scala | Scala | apache-2.0 | 1,045 |
package org.jetbrains.plugins.scala
package annotator
import org.intellij.lang.annotations.Language
import org.jetbrains.plugins.scala.base.SimpleTestCase
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunctionDefinition
/**
* Pavel.Fatin, 18.05.2010
*/
class FunctionAnnotatorTest extends SimpleTestCase {
final val Header = "class A; class B\\n"
def testUnitEmpty() {
assertMatches(messages("def f { }")) {
case Nil =>
}
}
def testUnitExpression() {
assertMatches(messages("def f { new A }")) {
case Nil =>
}
}
def testUnitExpressionUnit() {
assertMatches(messages("def f { () }")) {
case Nil =>
}
}
def testUnitReturn() {
assertMatches(messages("def f { return }")) {
case Nil =>
}
}
def testUnitReturnType() {
assertMatches(messages("def f { return new A }")) {
case Warning("new A", RedundantReturnData()) :: Nil =>
}
}
def testUnitReturnUnit() {
assertMatches(messages("def f { return () }")) {
case Warning("()", RedundantReturnData()) :: Nil =>
}
}
def testAssignNull() {
assertMatches(messages("def f = null")) {
case Nil =>
}
}
def testAssignEmpty() {
assertMatches(messages("def f = { }")) {
case Nil =>
}
}
def testAssignExpression() {
assertMatches(messages("def f = { new A }")) {
case Nil =>
}
}
def testAssignReturn() {
assertMatches(messages("def f = { return }")) {
case Error("return", NeedsResultType()) :: Nil =>
}
}
def testAssignReturnExpression() {
assertMatches(messages("def f = { return new A }")) {
case Error("return", NeedsResultType()) :: Nil =>
}
}
def testTypeNull() {
assertMatches(messages("def f: A = null")) {
case Nil =>
}
}
def testAnyValNull() {
assertMatches(messages("def f: AnyVal = null")) {
case Error("null", TypeMismatch()) :: Nil =>
}
}
def testTypeEmpty() {
assertMatches(messages("def f: A = { }")) {
case Error("}", TypeMismatch()) :: Nil =>
}
}
def testTypeAbsolutelyEmpty() {
assertMatches(messages("def f: A = {}")) {
case Error("}", TypeMismatch()) :: Nil =>
}
}
def testTypeExpression() {
assertMatches(messages("def f: A = { new A }")) {
case Nil =>
}
}
def testTypeWrongExpression() {
assertMatches(messages("def f: A = { new B }")) {
case Error("new B", TypeMismatch()) :: Nil =>
}
}
def testTypeWrongExpressionUnit() {
assertMatches(messages("def f: A = { () }")) {
case Error("()", TypeMismatch()) :: Nil =>
}
}
def testTypeWrongExpressionMultiple() {
assertMatches(messages("def f: A = { if(1 > 2) new B else new B }")) {
case Error("new B", TypeMismatch()) :: Error("new B", TypeMismatch()) :: Nil =>
}
}
def testTypeReturn() {
assertMatches(messages("def f: A = { return }")) {
case Error("return", TypeMismatch()) :: Nil =>
}
}
def testTypeUnitEmpty() {
assertMatches(messages("def f: Unit = { }")) {
case Nil =>
}
}
def testTypeUnitExpression() {
assertMatches(messages("def f: Unit = { new A }")) {
case Nil =>
}
}
def testTypeUnitExpressionUnit() {
assertMatches(messages("def f: Unit = { () }")) {
case Nil =>
}
}
def testTypeUnitReturn() {
assertMatches(messages("def f: Unit = { return }")) {
case Nil =>
}
}
def testTypeUnitReturnType() {
assertMatches(messages("def f: Unit = { return new A }")) {
case Warning("new A", RedundantReturnData()) :: Nil =>
}
}
def testTypeUnitReturnUnit() {
assertMatches(messages("def f: Unit = { return () }")) {
case Warning("()", RedundantReturnData()) :: Nil =>
}
}
def testTypeReturnType() {
assertMatches(messages("def f: A = { return new A }")) {
case Nil =>
}
}
def testInheritedTypeReturnType() {
assertMatches(messages("trait T { def f: T }; new T { def f = { return new T }}")) {
case Error("return", NeedsResultType()) :: Nil =>
}
}
def testTypeReturnWrongType() {
assertMatches(messages("def f: A = { return new B }")) {
case Error("new B", TypeMismatch()) :: Nil =>
}
}
def testTypeReturnWrongUnit() {
assertMatches(messages("def f: A = { return () }")) {
case Error("()", TypeMismatch()) :: Nil =>
}
}
def testTypeReturnWrongTypeMultiple() {
assertMatches(messages("def f: A = { if(1 > 2) return new B else return new B }")) {
case Error("new B", TypeMismatch()) :: Error("new B", TypeMismatch()) :: Nil =>
}
}
def testTypeReturnAndExpressionWrongType(){
assertMatches(messages("def f: A = { if(1 > 2) return new B; new B }")) {
case Error("new B", TypeMismatch()) :: Error("new B", TypeMismatch()) :: Nil =>
}
}
//todo: requires Function1 trait in scope
/*def testTypeExpressionImplicit() {
assertMatches(messages("implicit def toA(b: B) = new A; def f: A = { new B }")) {
case Nil =>
}
}*/
//todo: requires Function1 trait in scope
/*def testTypeReturnImplicit() {
assertMatches(messages("implicit def toA(b: B) = new A; def f: A = { return new B }")) {
case Nil =>
}
}*/
def testUnresolvedTypeEmpty() {
assertMatches(messages("def f: C = { }")) {
case Nil =>
}
}
def testUnresolvedTypeExpression() {
assertMatches(messages("def f: C = { new A }")) {
case Nil =>
}
}
def testUnresolvedTypeReturn() {
assertMatches(messages("def f: C = { return }")) {
case Nil =>
}
}
def testUnresolvedTypeReturnExpression() {
assertMatches(messages("def f: C = { return new A }")) {
case Nil =>
}
}
def testUnresolvedExpression() {
assertMatches(messages("def f: A = { new C }")) {
case Nil =>
}
}
def testReturnUnresolvedExpression() {
assertMatches(messages("def f: A = { return new C }")) {
case Nil =>
}
}
def testUnresolvedBoth() {
assertMatches(messages("def f: C = { new D }")) {
case Nil =>
}
}
def testUnresolvedBothReturn() {
assertMatches(messages("def f: C = { return new D }")) {
case Nil =>
}
}
def testUnresolvedReference() {
assertMatches(messages("def f: A = { foo }")) {
case Nil =>
}
}
def testUnitUnresolvedExpression() {
assertMatches(messages("def f { new C }")) {
case Nil =>
}
}
def testUnitReturnUnresolvedExpression() {
assertMatches(messages("def f { return new A }")) {
case Warning("new A", RedundantReturnData()) :: Nil =>
}
}
def testTypeUnitUnresolvedExpression() {
assertMatches(messages("def f: Unit = { new C }")) {
case Nil =>
}
}
def testTypeUnitReturnUnresolvedExpression() {
assertMatches(messages("def f: Unit = { return new A }")) {
case Warning("new A", RedundantReturnData()) :: Nil =>
}
}
def testAnyTypeUnresolvedExpression() {
assertMatches(messages("def f: Any = { new C }")) {
case Nil =>
}
}
def testAnyTypeUnresolvedReturnExpression() {
assertMatches(messages("def f: Any = { return new C }")) {
case Nil =>
}
}
def testNestedFunction() {
val code = """
def f1 = {
def f2 { return }
new A
}"""
assertMatches(messages(code)) {
case Nil =>
}
}
def testRecursiveUnit() {
assertMatches(messages("def f { f }")) {
case Nil =>
}
}
def testRecursiveType() {
assertMatches(messages("def f: A = { f }")) {
case Nil =>
}
}
def testRecursiveUnresolvedType() {
assertMatches(messages("def f: C = { f }")) {
case Nil =>
}
}
def testRecursiveUnapplicable() {
assertMatches(messages("def f = { f( new A ) }")) {
case Error("f", Recursive()) :: Nil =>
}
}
def testRecursive() {
assertMatches(messages("def f = { f }")) {
case Error("f", Recursive()) :: Nil =>
}
}
def testRecursiveMultiple() {
assertMatches(messages("def f = { f; f }")) {
case Error("f", Recursive()) :: Error("f", Recursive()) :: Nil =>
}
}
def testRecursiveParameter() {
assertMatches(messages("def f(a: A) = { f(new A) }")) {
case Error("f", Recursive()) :: Nil =>
}
}
def testRecursiveWithInheritedResultType() {
assertMatches(messages("trait T { def f: T }; new T { def f = { f }}")) {
case Nil =>
}
}
def testRecursiveAndNeedsResultType() {
assertMatches(messages("def f = { f; return new A }")) {
case Error("f", Recursive()) :: Error("return", NeedsResultType()) :: Nil =>
}
}
def testRecursiveAndTypeMismatch() {
assertMatches(messages("def f: A = { f; new B }")) {
case Error("new B", TypeMismatch()) :: Nil =>
}
}
def testRecursiveAndRedundantReturnData() {
assertMatches(messages("def f { f; return new A }")) {
case Warning("new A", RedundantReturnData()) :: Nil =>
}
}
def messages(@Language(value = "Scala", prefix = Header) code: String): List[Message] = {
val annotator = new FunctionAnnotator() {}
val mock = new AnnotatorHolderMock
val parse: ScalaFile = (Header + code).parse
parse.depthFirst.filterByType(classOf[ScFunctionDefinition]).foreach {
annotator.annotateFunction(_, mock, typeAware = true)
}
mock.annotations
}
val TypeMismatch = ContainsPattern("Type mismatch")
val RedundantReturnData = ContainsPattern("Unit result type")
val NeedsResultType = ContainsPattern("has return statement")
val Recursive = ContainsPattern("Recursive method")
} | LPTK/intellij-scala | test/org/jetbrains/plugins/scala/annotator/FunctionAnnotatorTest.scala | Scala | apache-2.0 | 9,743 |
package org.bizzle.plugin.ghosthost
import
java.{ io, net },
io.IOException,
net.URL
import
sbt.{ Classpaths, Compile, ModuleID, UpdateReport }
class ModuleSpec(val moduleID: ModuleID, paths: Seq[String]) {
private[ghosthost] def getPathMappings: Map[String, String] = {
import moduleID.{ name => libName, organization => org, revision => versionStr }
val webJarsPrefix = "META-INF/resources/webjars"
val mappings =
if (org == "org.webjars")
paths map {
case path if path.startsWith("/") || path.startsWith(webJarsPrefix) =>
path -> s"$libName/$path"
case path =>
s"$webJarsPrefix/$libName/$versionStr/$path" -> s"$libName/$path"
}
else
paths map (path => path -> s"$libName/$path")
mappings.toMap
}
def findURL()(implicit update: UpdateReport): URL =
Classpaths.managedJars(Compile, Set("jar"), update) map (_.data) find {
file =>
val str = file.toString
(moduleID.organization split '.' forall str.contains) && (str contains moduleID.name)
} map {
_.toURI.toURL
} getOrElse (
throw new IOException(s"Required '.jar' missing for the following `ModuleSpec`: $this")
)
}
object ModuleSpecs {
implicit class EnhancedModuleID(val moduleID: ModuleID) {
def usingFilesAt(paths: String*) = new ModuleSpec(moduleID, paths)
}
implicit def moduleSpec2ModuleID(spec: ModuleSpec): ModuleID = spec.moduleID
}
| TheBizzle/Ghost-Host | src/main/scala/org/bizzle/plugin/ghosthost/ModuleSpec.scala | Scala | bsd-3-clause | 1,481 |
package im.actor.server.api.rpc.service
import akka.contrib.pattern.DistributedPubSubMediator
import akka.testkit.TestProbe
import com.google.protobuf.CodedInputStream
import im.actor.api.rpc.Implicits._
import im.actor.api.rpc._
import im.actor.api.rpc.counters.UpdateCountersChanged
import im.actor.api.rpc.files.FileLocation
import im.actor.api.rpc.messaging._
import im.actor.api.rpc.misc.ResponseSeqDate
import im.actor.api.rpc.peers.{ Peer, PeerType, UserOutPeer }
import im.actor.server._
import im.actor.server.acl.ACLUtils
import im.actor.server.api.rpc.service.groups.{ GroupInviteConfig, GroupsServiceImpl }
import im.actor.server.api.rpc.service.messaging.Events
import im.actor.server.oauth.{ GoogleProvider, OAuth2GoogleConfig }
import im.actor.server.presences.{ GroupPresenceManager, PresenceManager }
import scala.concurrent.Future
import scala.util.Random
class MessagingServiceSpec
extends BaseAppSuite
with GroupsServiceHelpers
with ImplicitGroupRegions
with ImplicitSequenceService
with ImplicitSessionRegionProxy
with ImplicitAuthService
with SequenceMatchers {
behavior of "MessagingService"
"Private Messaging" should "send messages" in s.privat.sendMessage
it should "not repeat message sending with same authId and RandomId" in s.privat.cached
"Group Messaging" should "send messages" in s.group.sendMessage
it should "not send messages when user is not in group" in s.group.restrictAlienUser
it should "publish messages in PubSub" in s.pubsub.publish
it should "not repeat message sending with same authId and RandomId" in s.group.cached
object s {
implicit val ec = system.dispatcher
implicit val sessionRegion = buildSessionRegionProxy()
implicit val presenceManagerRegion = PresenceManager.startRegion()
implicit val groupPresenceManagerRegion = GroupPresenceManager.startRegion()
val groupInviteConfig = GroupInviteConfig("http://actor.im")
implicit val service = messaging.MessagingServiceImpl(mediator)
implicit val groupsService = new GroupsServiceImpl(groupInviteConfig)
object privat {
def sendMessage() = {
val (user1, user1AuthId1, _) = createUser()
val user1AuthId2 = createAuthId(user1.id)
val (user2, user2AuthId, _) = createUser()
val user2Model = getUserModel(user2.id)
val user2AccessHash = ACLUtils.userAccessHash(user1AuthId1, user2.id, user2Model.accessSalt)
val user2Peer = peers.OutPeer(PeerType.Private, user2.id, user2AccessHash)
val sessionId = createSessionId()
val clientData11 = ClientData(user1AuthId1, sessionId, Some(user1.id))
val clientData12 = ClientData(user1AuthId2, sessionId, Some(user1.id))
val clientData2 = ClientData(user2AuthId, sessionId, Some(user2.id))
val randomId = Random.nextLong()
{
implicit val clienData = clientData11
whenReady(service.handleSendMessage(user2Peer, randomId, TextMessage("Hi Shiva", Vector.empty, None))) { resp ⇒
resp should matchPattern {
case Ok(ResponseSeqDate(1000, _, _)) ⇒
}
}
expectUpdate[UpdateMessageSent](0, Array.empty, UpdateMessageSent.header, Some(1)) { update ⇒
update.peer shouldEqual Peer(PeerType.Private, user2.id)
update.randomId shouldEqual randomId
}
}
{
implicit val clientData = clientData12
expectUpdate[UpdateMessage](0, Array.empty, UpdateMessage.header, Some(1)) { update ⇒
update.peer shouldEqual Peer(PeerType.Private, user2.id)
update.randomId shouldEqual randomId
update.senderUserId shouldEqual user1.id
}
}
{
implicit val clientData = clientData2
expectUpdatesOrdered(failUnmatched)(0, Array.empty, List(UpdateMessage.header, UpdateCountersChanged.header)) {
case (UpdateMessage.header, u) ⇒
val update = parseUpdate[UpdateMessage](u)
update.peer shouldEqual Peer(PeerType.Private, user1.id)
update.randomId shouldEqual randomId
update.senderUserId shouldEqual user1.id
case (UpdateCountersChanged.header, update) ⇒ parseUpdate[UpdateCountersChanged](update)
}
}
}
def cached(): Unit = {
val (user1, user1AuthId1, _) = createUser()
val (user2, user2AuthId, _) = createUser()
val clientData1 = ClientData(user1AuthId1, createSessionId(), Some(user1.id))
val clientData2 = ClientData(user2AuthId, createSessionId(), Some(user2.id))
val user2Model = getUserModel(user2.id)
val user2AccessHash = ACLUtils.userAccessHash(user1AuthId1, user2.id, user2Model.accessSalt)
val user2Peer = peers.OutPeer(PeerType.Private, user2.id, user2AccessHash)
{
implicit val clientData = clientData1
val randomId = Random.nextLong()
val text = "Hi Shiva"
val actions = Future.sequence(List(
service.handleSendMessage(user2Peer, randomId, TextMessage(text, Vector.empty, None)),
service.handleSendMessage(user2Peer, randomId, TextMessage(text, Vector.empty, None)),
service.handleSendMessage(user2Peer, randomId, TextMessage(text, Vector.empty, None)),
service.handleSendMessage(user2Peer, randomId, TextMessage(text, Vector.empty, None)),
service.handleSendMessage(user2Peer, randomId, TextMessage(text, Vector.empty, None))
))
whenReady(actions) { resps ⇒
resps foreach (_ should matchPattern { case Ok(ResponseSeqDate(1000, _, _)) ⇒ })
}
expectUpdate[UpdateMessageSent](0, Array.empty, UpdateMessageSent.header, Some(1))(identity)
}
{
implicit val clientData = clientData2
expectUpdatesUnordered(failUnmatched)(0, Array.empty, Set(UpdateMessage.header, UpdateCountersChanged.header)) {
case (UpdateMessage.header, update) ⇒ parseUpdate[UpdateMessage](update)
case (UpdateCountersChanged.header, update) ⇒
val counters = parseUpdate[UpdateCountersChanged](update)
counters.counters.globalCounter shouldEqual Some(1)
}
}
}
}
object group {
val (user1, user1AuthId1, _) = createUser()
val user1AuthId2 = createAuthId(user1.id)
val (user2, user2AuthId, _) = createUser()
val sessionId = createSessionId()
val clientData11 = ClientData(user1AuthId1, sessionId, Some(user1.id))
val clientData12 = ClientData(user1AuthId2, sessionId, Some(user1.id))
val clientData2 = ClientData(user2AuthId, sessionId, Some(user2.id))
val groupResponse = {
implicit val clientData = clientData11
createGroup("Fun group", Set(user2.id))
}
val groupSeq = groupResponse.seq
val groupState = groupResponse.state
val groupOutPeer = groupResponse.groupPeer
def sendMessage() = {
val randomId = Random.nextLong()
{
implicit val clientData = clientData11
whenReady(service.handleSendMessage(groupOutPeer.asOutPeer, randomId, TextMessage("Hi again", Vector.empty, None))) { resp ⇒
resp should matchPattern {
case Ok(ResponseSeqDate(1002, _, _)) ⇒
}
}
expectUpdate[UpdateMessageSent](groupSeq, groupState, UpdateMessageSent.header) { update ⇒
update.peer shouldEqual Peer(PeerType.Group, groupOutPeer.groupId)
update.randomId shouldEqual randomId
}
}
{
implicit val clientData = clientData12
expectUpdate[UpdateMessage](0, Array.empty, UpdateMessage.header) { update ⇒
update.peer shouldEqual Peer(PeerType.Group, groupOutPeer.groupId)
update.randomId shouldEqual randomId
update.senderUserId shouldEqual user1.id
}
}
{
implicit val clientData = clientData2
expectUpdate[UpdateMessage](0, Array.empty, UpdateMessage.header) { update ⇒
update.peer shouldEqual Peer(PeerType.Group, groupOutPeer.groupId)
update.randomId shouldEqual randomId
update.senderUserId shouldEqual user1.id
}
}
}
def restrictAlienUser() = {
val (alien, authIdAlien, _) = createUser()
val alienClientData = ClientData(user1AuthId1, sessionId, Some(alien.id))
whenReady(service.handleSendMessage(groupOutPeer.asOutPeer, Random.nextLong(), TextMessage("Hi again", Vector.empty, None))(alienClientData)) { resp ⇒
resp should matchNotAuthorized
}
whenReady(groupsService.handleEditGroupTitle(groupOutPeer, 4L, "Loosers")(alienClientData)) { resp ⇒
resp should matchNotAuthorized
}
val (user3, authId3, _) = createUser()
val user3OutPeer = UserOutPeer(user3.id, 11)
whenReady(groupsService.handleInviteUser(groupOutPeer, 4L, user3OutPeer)(alienClientData)) { resp ⇒
resp should matchNotAuthorized
}
val fileLocation = FileLocation(1L, 1L)
whenReady(groupsService.handleEditGroupAvatar(groupOutPeer, 5L, fileLocation)(alienClientData)) { resp ⇒
resp should matchNotAuthorized
}
whenReady(groupsService.handleRemoveGroupAvatar(groupOutPeer, 5L)(alienClientData)) { resp ⇒
resp should matchNotAuthorized
}
whenReady(groupsService.handleLeaveGroup(groupOutPeer, 5L)(alienClientData)) { resp ⇒
resp should matchNotAuthorized
}
}
def cached(): Unit = {
val (user1, user1AuthId, _) = createUser()
val (user2, user2AuthId, _) = createUser()
val sessionId = createSessionId()
val clientData1 = ClientData(user1AuthId, sessionId, Some(user1.id))
val clientData2 = ClientData(user2AuthId, sessionId, Some(user2.id))
val group2OutPeer = {
implicit val clientData = clientData1
createGroup("Fun group 2", Set(user2.id)).groupPeer
}
{
implicit val clientData = clientData1
val randomId = Random.nextLong()
val text = "Hi Shiva"
val actions = Future.sequence(List(
service.handleSendMessage(group2OutPeer.asOutPeer, randomId, TextMessage(text, Vector.empty, None)),
service.handleSendMessage(group2OutPeer.asOutPeer, randomId, TextMessage(text, Vector.empty, None)),
service.handleSendMessage(group2OutPeer.asOutPeer, randomId, TextMessage(text, Vector.empty, None)),
service.handleSendMessage(group2OutPeer.asOutPeer, randomId, TextMessage(text, Vector.empty, None)),
service.handleSendMessage(group2OutPeer.asOutPeer, randomId, TextMessage(text, Vector.empty, None))
))
whenReady(actions) { resps ⇒
resps foreach (_ should matchPattern { case Ok(ResponseSeqDate(1002, _, _)) ⇒ })
}
expectUpdate[UpdateMessageSent](0, Array.empty, UpdateMessageSent.header)(identity)
}
{
implicit val clientData = clientData2
expectUpdatesUnordered(ignoreUnmatched)(0, Array.empty, Set(UpdateMessage.header, UpdateCountersChanged.header)) {
case (UpdateMessage.header, update) ⇒ parseUpdate[UpdateMessage](update)
case (UpdateCountersChanged.header, update) ⇒ parseUpdate[UpdateCountersChanged](update)
}
}
}
}
object pubsub {
import DistributedPubSubMediator._
val (user, authId, _) = createUser()
val sessionId = createSessionId()
implicit val clientData = ClientData(authId, sessionId, Some(user.id))
val (user2, _, _) = createUser()
val user2Model = getUserModel(user2.id)
val user2AccessHash = ACLUtils.userAccessHash(authId, user2.id, user2Model.accessSalt)
val user2Peer = peers.OutPeer(PeerType.Private, user2.id, user2AccessHash)
def publish() = {
val probe = TestProbe()
val topics = Seq(
s"messaging.messages.private.${user.id}",
s"messaging.messages.private.${user2.id}"
)
topics foreach { topic ⇒
mediator.tell(Subscribe(topic, Some("testProbe"), probe.ref), probe.ref)
probe.expectMsg(SubscribeAck(Subscribe(topic, Some("testProbe"), probe.ref)))
}
whenReady(service.handleSendMessage(user2Peer, Random.nextLong(), TextMessage("Hi PubSub", Vector.empty, None))) { resp ⇒
probe.expectMsgClass(classOf[Events.PeerMessage])
probe.expectMsgClass(classOf[Events.PeerMessage])
}
}
}
}
}
| chenbk85/actor-platform | actor-server/actor-tests/src/test/scala/im/actor/server/api/rpc/service/MessagingServiceSpec.scala | Scala | mit | 12,808 |
package rpn
import scala.annotation.tailrec
object Rpn {
def main(args: Array[String]) {
println(rpn("35 1 2 + 4 x+13 −random comment2*5%")) // prints 3
}
def rpn(s: String) = {
val parse = rpnParse(s)
rpnSolve(parse)
}
def rpnParse(s: String) = {
@tailrec
def rpnParse_rec(s: List[Char], curNum: Option[Int], nbNum: Int, acc: Seq[Option[Elt]]): Seq[Elt] = {
s match {
case List() =>
if (nbNum == 1) acc.flatten
else throw new IllegalArgumentException
case x :: xs => {
val elt = eltParse(x)
elt match {
case Some(Num(n)) => curNum match {
case None => rpnParse_rec(xs, Some(n), nbNum + 1, acc)
case Some(m) => rpnParse_rec(xs, Some(m * 10 + n), nbNum, acc)
}
case Some(Opr(_)) => {
if (nbNum >= 2)
rpnParse_rec(xs, None, nbNum - 1, acc :+ (curNum map (Num(_))) :+ elt)
else throw new IllegalArgumentException
}
case _ => rpnParse_rec(xs, None, nbNum, acc :+ (curNum map (Num(_))))
}
}
}
}
rpnParse_rec(s.toList, None, 0, List())
}
def eltParse(arg: Char): Option[Elt] = arg match {
case '+' => Some(Opr(_ + _))
case '-' | '−' => Some(Opr(_ - _))
case 'x' | '*' => Some(Opr(_ * _))
case '/' => Some(Opr(_ / _))
case '%' => Some(Opr(_ % _))
case n if (n.toInt >= 48 && n.toInt < 58) => Some(Num(n.toInt - 48))
case _ => None
}
def rpnSolve(args: Seq[Elt]) = {
@tailrec
def rpnSolve_rec(args: Seq[Elt], acc: List[Int]): Int = args match {
case List() => acc(0)
case x :: xs => {
x match {
case Num(n) => rpnSolve_rec(xs, n :: acc)
case Opr(op) => {
val (a :: b :: as) = acc
rpnSolve_rec(xs, op(b, a) :: as)
}
}
}
}
rpnSolve_rec(args, List())
}
}
| HiinoFW/stuff | scala/rpn/Rpn.scala | Scala | mit | 2,143 |
package vectors4s.base
/**
* vectors4s
* Created by oruebenacker on 5/11/17.
*/
trait AbstractVector[I, V <: AbstractVector[I, V]] {
def indices: Iterable[I]
def apply(index: I): Double
def *(a: Double): V
def /(a: Double): V = *(1.0 / a)
def unary_- : V = *(-1.0)
def +(o: V): V
def -(o: V): V = this.+(-o)
def lenSquared: Double = indices.map(index => apply(index)).map(x => x * x).sum
def len: Double = Math.sqrt(lenSquared)
def normalized: V = /(len)
def zero: V
}
| curoli/vectors4s | src/main/scala/vectors4s/base/AbstractVector.scala | Scala | mit | 509 |
package pl.touk.nussknacker.engine.management.sample.transformer
import pl.touk.nussknacker.engine.api.{CustomStreamTransformer, LazyParameter, MethodToInvoke, ParamName}
case object NoneReturnTypeTransformer extends CustomStreamTransformer {
@MethodToInvoke(returnType = classOf[Void])
def execute(@ParamName("expression") expression: LazyParameter[java.lang.Boolean]): Unit = {}
}
| TouK/nussknacker | engine/flink/management/dev-model/src/main/scala/pl/touk/nussknacker/engine/management/sample/transformer/NoneReturnTypeTransformer.scala | Scala | apache-2.0 | 389 |
// Databricks notebook source
// MAGIC %md
// MAGIC # [SDS-2.2-360-in-525-01: Intro to Apache Spark for data Scientists](https://lamastex.github.io/scalable-data-science/360-in-525/2018/01/)
// MAGIC ### [SDS-2.2, Scalable Data Science](https://lamastex.github.io/scalable-data-science/sds/2/2/)
// COMMAND ----------
// MAGIC %md
// MAGIC # Why Apache Spark?
// MAGIC
// MAGIC * [Apache Spark: A Unified Engine for Big Data Processing](https://cacm.acm.org/magazines/2016/11/209116-apache-spark/fulltext) By Matei Zaharia, Reynold S. Xin, Patrick Wendell, Tathagata Das, Michael Armbrust, Ankur Dave, Xiangrui Meng, Josh Rosen, Shivaram Venkataraman, Michael J. Franklin, Ali Ghodsi, Joseph Gonzalez, Scott Shenker, Ion Stoica
// MAGIC Communications of the ACM, Vol. 59 No. 11, Pages 56-65
// MAGIC 10.1145/2934664
// MAGIC
// MAGIC [](https://player.vimeo.com/video/185645796)
// MAGIC
// MAGIC Right-click the above image-link, open in a new tab and watch the video (4 minutes) or read about it in the Communications of the ACM in the frame below or from the link above.
// COMMAND ----------
//This allows easy embedding of publicly available information into any other notebook
//Example usage:
// displayHTML(frameIt("https://en.wikipedia.org/wiki/Latent_Dirichlet_allocation#Topics_in_LDA",250))
def frameIt( u:String, h:Int ) : String = {
"""<iframe
src=""""+ u+""""
width="95%" height="""" + h + """">
<p>
<a href="http://spark.apache.org/docs/latest/index.html">
Fallback link for browsers that, unlikely, don't support frames
</a>
</p>
</iframe>"""
}
displayHTML(frameIt("https://cacm.acm.org/magazines/2016/11/209116-apache-spark/fulltext",600))
// COMMAND ----------
// MAGIC %md
// MAGIC # Some BDAS History behind Apache Spark
// MAGIC ## The Berkeley Data Analytics Stack is BDAS
// MAGIC ### Spark is a sub-stack of BDAS
// MAGIC
// MAGIC **Source:**
// MAGIC
// MAGIC * [Ion Stoica's State of Spark Union AmpCamp 6, Nov 2015](https://www.slideshare.net/secret/9ON8EEAlVKP3Sl)
// MAGIC * [Machine learning: Trends, perspectives, and prospects, M. I. Jordan, T. M. Mitchell, Science 17 Jul 2015: Vol. 349, Issue 6245, pp. 255-260, DOI: 10.1126/science.aaa8415](http://science.sciencemag.org/content/349/6245/255.full-text.pdf+html)
// MAGIC
// MAGIC ### BDAS State of The Union Talk by Ion Stoica, AMP Camp 6, Nov 2015
// MAGIC The followign talk outlines the motivation and insights behind BDAS' research approach and how they address the cross-disciplinary nature of Big Data challenges and current work.
// MAGIC * **watch later (5 mins.):**
// MAGIC
// MAGIC [](https://www.youtube.com/watch?v=s7kj9XzRBQk&start=91&end=386)
// MAGIC
// MAGIC ## key points
// MAGIC * started in 2011 with strong public-private funding
// MAGIC * Defense Advanced Research Projects Agency
// MAGIC * Lawrance Berkeley Laboratory
// MAGIC * National Science Foundation
// MAGIC * Amazon Web Services
// MAGIC * Google
// MAGIC * SAP
// MAGIC * The Berkeley AMPLab is creating a new approach to data analytics to seamlessly integrate the three main resources available for making sense of data at scale:
// MAGIC * Algorithms (machine learning and statistical techniques),
// MAGIC * Machines (in the form of scalable clusters and elastic cloud computing), and
// MAGIC * People (both individually as analysts and in crowds).
// MAGIC * The lab is realizing its ideas through the development of a freely-available Open Source software stack called BDAS: the Berkeley Data Analytics Stack.
// MAGIC * Several components of BDAS have gained significant traction in industry and elsewhere, including:
// MAGIC * the Mesos cluster resource manager,
// MAGIC * the Spark in-memory computation framework, a sub-stack of the BDAS stack,
// MAGIC * and more...
// COMMAND ----------
// MAGIC %md
// MAGIC ### The big data problem, Hardware, distributing work, handling failed and slow machines
// MAGIC #### by Anthony Joseph in BerkeleyX/CS100.1x
// MAGIC
// MAGIC * **(watch now 1:48)**: The Big Data Problem
// MAGIC * [](https://www.youtube.com/watch?v=0JdJe5iehhw&modestbranding=1&start=1)
// MAGIC * **(watch now 1:43)**: Hardware for Big Data
// MAGIC * [](https://www.youtube.com/watch?v=KmIIMdsXGzc&rel=0&autoplay=1&modestbranding=1&start=1)
// MAGIC * **(watch now 1:17)**: How to distribute work across a cluster of commodity machines?
// MAGIC * [](https://www.youtube.com/watch?v=Euk1v3VtNcM&rel=0&autoplay=1&modestbranding=1&start=1)
// MAGIC * **(watch now 0:36)**: How to deal with failures or slow machines?
// MAGIC * [](https://www.youtube.com/watch?v=NaHNsPEK3KA&rel=0&autoplay=1&modestbranding=1&start=1)
// MAGIC
// COMMAND ----------
// MAGIC %md
// MAGIC ## MapReduce and Apache Spark.
// MAGIC #### by Anthony Joseph in BerkeleyX/CS100.1x
// MAGIC
// MAGIC * **(watch now 1:48)**: Map Reduce (is bounded by Disk I/O)
// MAGIC * [](https://www.youtube.com/watch?v=NqG_hYAKjYk&rel=0&autoplay=1&modestbranding=1&start=1)
// MAGIC * **(watch now 2:49)**: Apache Spark (uses Memory instead of Disk)
// MAGIC * [](https://www.youtube.com/watch?v=vat5Jki1lbI&rel=0&autoplay=1&modestbranding=1&start=1)
// MAGIC * **(watch now 3:00)**: Spark Versus MapReduce
// MAGIC * [](https://www.youtube.com/watch?v=Ddq3Gua2QFg&rel=0&autoplay=1&modestbranding=1&start=1)
// MAGIC * SUMMARY
// MAGIC * uses memory instead of disk alone and is thus fater than Hadoop MapReduce
// MAGIC * resilience abstraction is by RDD (resilient distributed dataset)
// MAGIC * RDDs can be recovered upon failures from their *lineage graphs*, the recipes to make them starting from raw data
// MAGIC * Spark supports a lot more than MapReduce, including streaming, interactive in-memory querying, etc.
// MAGIC * Spark demonstrated an unprecedented sort of 1 petabyte (1,000 terabytes) worth of data in 234 minutes running on 190 Amazon EC2 instances (in 2015).
// MAGIC * Spark expertise corresponds to the highest Median Salary in the US (~ 150K)
// COMMAND ----------
// MAGIC %md
// MAGIC ## Key Papers
// MAGIC
// MAGIC * Key Historical Milestones
// MAGIC * 1956-1979: [Stanford, MIT, CMU, and other universities develop set/list operations in LISP, Prolog, and other languages for parallel processing](http://www-formal.stanford.edu/jmc/history/lisp/lisp.html)
// MAGIC * 2004: **READ**: [Google's MapReduce: Simplified Data Processing on Large Clusters, by Jeffrey Dean and Sanjay Ghemawat](http://research.google.com/archive/mapreduce.html)
// MAGIC * 2006: [Yahoo!'s Apache Hadoop, originating from the Yahoo!’s Nutch Project, Doug Cutting](http://developer.yahoo.com/hadoop/)
// MAGIC * 2009: [Cloud computing with Amazon Web Services Elastic MapReduce](http://aws.amazon.com/elasticmapreduce/), a Hadoop version modified for Amazon Elastic Cloud Computing (EC2) and Amazon Simple Storage System (S3), including support for Apache Hive and Pig.
// MAGIC * 2010: **READ**: [The Hadoop Distributed File System, by Konstantin Shvachko, Hairong Kuang, Sanjay Radia, and Robert Chansler. IEEE MSST](http://dx.doi.org/10.1109/MSST.2010.5496972)
// MAGIC * Apache Spark Core Papers
// MAGIC * 2010: [Spark: Cluster Computing with Working Sets, Matei Zaharia, Mosharaf Chowdhury, Michael J. Franklin, Scott Shenker, Ion Stoica. USENIX HotCloud](http://people.csail.mit.edu/matei/papers/2010/hotcloud_spark.pdf).
// MAGIC * 2012: **READ**: [Resilient Distributed Datasets: A Fault-Tolerant Abstraction for In-Memory Cluster Computing, Matei Zaharia, Mosharaf Chowdhury, Tathagata Das, Ankur Dave, Justin Ma, Murphy McCauley, Michael J. Franklin, Scott Shenker and Ion Stoica. NSDI](http://usenix.org/system/files/conference/nsdi12/nsdi12-final138.pdf)
// MAGIC * 2016: [Apache Spark: A Unified Engine for Big Data Processing](https://cacm.acm.org/magazines/2016/11/209116-apache-spark/fulltext) By Matei Zaharia, Reynold S. Xin, Patrick Wendell, Tathagata Das, Michael Armbrust, Ankur Dave, Xiangrui Meng, Josh Rosen, Shivaram Venkataraman, Michael J. Franklin, Ali Ghodsi, Joseph Gonzalez, Scott Shenker, Ion Stoica , Communications of the ACM, Vol. 59 No. 11, Pages 56-65, 10.1145/2934664
// MAGIC
// MAGIC 
// MAGIC
// MAGIC * Here are some directions the creators of Apache Spark at Berkeley and Stanford are currently (2018) taking:
// MAGIC * [Stanford's Dawn Lab](http://dawn.cs.stanford.edu/)
// MAGIC * [Berkeley's RISE lab](https://rise.cs.berkeley.edu/)
// MAGIC
// MAGIC * Listen to [The state of machine learning in Apache Spark, The O’Reilly Data Show Podcast: Ion Stoica and Matei Zaharia explore the rich ecosystem of analytic tools around Apache Spark. By Ben Lorica September 14, 2017](https://www.oreilly.com/ideas/the-state-of-machine-learning-in-apache-spark) for staying up to date.
// COMMAND ----------
// MAGIC %md
// MAGIC ***
// MAGIC ***
// MAGIC 55 minutes
// MAGIC 55 out of 90+10 minutes.
// MAGIC
// MAGIC We have come to the end of this section.
// MAGIC
// MAGIC **Next let us get everyone to login to databricks** to get our hands dirty with some Spark code!
// MAGIC
// MAGIC 10-15 minutes.
// MAGIC Then break for 5.
// MAGIC ***
// MAGIC ***
// COMMAND ----------
// MAGIC %md
// MAGIC
// MAGIC # To Stay Connected to Changes in Spark
// MAGIC
// MAGIC Subscribe to YouTube Channels:
// MAGIC
// MAGIC * [https://www.youtube.com/user/TheApacheSpark](https://www.youtube.com/user/TheApacheSpark)
// MAGIC * [databricks product-focused channel](https://www.youtube.com/channel/UC3q8O3Bh2Le8Rj1-Q-_UUbA)
// MAGIC
// MAGIC ## EXTRA: For a historical insight see excerpts from an interview with Ion Stoica
// MAGIC
// MAGIC #### Beginnings of Apache Spark and Databricks (academia-industry roots)
// MAGIC [](https://www.youtube.com/watch?v=bHH8sG-F9tg&rel=0&autoplay=1&modestbranding=1&start=231&end=365)
// MAGIC
// MAGIC #### Advantages of Apache Spark: A Unified System for Batch, Stream, Interactive / Ad Hoc or Graph Processing
// MAGIC [](https://www.youtube.com/watch?v=bHH8sG-F9tg&rel=0&autoplay=1&modestbranding=1&start=458&end=726)
// MAGIC
// MAGIC #### Main Goal of Databricks Cloud: To Make Big Data Easy
// MAGIC [](https://www.youtube.com/watch?v=bHH8sG-F9tg&rel=0&autoplay=1&modestbranding=1&start=890&end=985)
// MAGIC
// MAGIC ***
// MAGIC *** | lamastex/scalable-data-science | db/2/2/360-in-525-01/001_whySpark.scala | Scala | unlicense | 11,913 |
package java.nio
import scala.scalajs.js.typedarray._
private[nio] final class TypedArrayIntBuffer private (
override private[nio] val _typedArray: Int32Array,
_initialPosition: Int, _initialLimit: Int, _readOnly: Boolean)
extends IntBuffer(_typedArray.length, null, -1) {
position(_initialPosition)
limit(_initialLimit)
private[this] implicit def newTypedArrayIntBuffer =
TypedArrayIntBuffer.NewTypedArrayIntBuffer
def isReadOnly(): Boolean = _readOnly
def isDirect(): Boolean = true
@noinline
def slice(): IntBuffer =
GenTypedArrayBuffer(this).generic_slice()
@noinline
def duplicate(): IntBuffer =
GenTypedArrayBuffer(this).generic_duplicate()
@noinline
def asReadOnlyBuffer(): IntBuffer =
GenTypedArrayBuffer(this).generic_asReadOnlyBuffer()
@noinline
def get(): Int =
GenBuffer(this).generic_get()
@noinline
def put(c: Int): IntBuffer =
GenBuffer(this).generic_put(c)
@noinline
def get(index: Int): Int =
GenBuffer(this).generic_get(index)
@noinline
def put(index: Int, c: Int): IntBuffer =
GenBuffer(this).generic_put(index, c)
@noinline
override def get(dst: Array[Int], offset: Int, length: Int): IntBuffer =
GenBuffer(this).generic_get(dst, offset, length)
@noinline
override def put(src: Array[Int], offset: Int, length: Int): IntBuffer =
GenBuffer(this).generic_put(src, offset, length)
@noinline
def compact(): IntBuffer =
GenTypedArrayBuffer(this).generic_compact()
def order(): ByteOrder =
ByteOrder.nativeOrder()
// Internal API
@inline
override private[nio] def _arrayBuffer: ArrayBuffer =
GenTypedArrayBuffer(this).generic_arrayBuffer
@inline
override private[nio] def _arrayBufferOffset: Int =
GenTypedArrayBuffer(this).generic_arrayBufferOffset
@inline
override private[nio] def _dataView: DataView =
GenTypedArrayBuffer(this).generic_dataView
@inline
private[nio] def load(index: Int): Int =
_typedArray(index)
@inline
private[nio] def store(index: Int, elem: Int): Unit =
_typedArray(index) = elem
@inline
override private[nio] def load(startIndex: Int,
dst: Array[Int], offset: Int, length: Int): Unit =
GenBuffer(this).generic_load(startIndex, dst, offset, length)
@inline
override private[nio] def store(startIndex: Int,
src: Array[Int], offset: Int, length: Int): Unit =
GenBuffer(this).generic_store(startIndex, src, offset, length)
}
private[nio] object TypedArrayIntBuffer {
private[nio] implicit object NewTypedArrayIntBuffer
extends GenTypedArrayBuffer.NewTypedArrayBuffer[IntBuffer] {
def bytesPerElem: Int = 4
def apply(typedArray: Int32Array,
initialPosition: Int, initialLimit: Int,
readOnly: Boolean): TypedArrayIntBuffer = {
new TypedArrayIntBuffer(typedArray,
initialPosition, initialLimit, readOnly)
}
@inline
def newTypedArray(buffer: ArrayBuffer,
byteOffset: Int, length: Int): Int32Array = {
new Int32Array(buffer, byteOffset, length)
}
}
@inline
def fromTypedArrayByteBuffer(byteBuffer: TypedArrayByteBuffer): IntBuffer =
GenTypedArrayBuffer.generic_fromTypedArrayByteBuffer(byteBuffer)
def wrap(array: Int32Array): IntBuffer =
new TypedArrayIntBuffer(array, 0, array.length, false)
}
| jmnarloch/scala-js | javalib/src/main/scala/java/nio/TypedArrayIntBuffer.scala | Scala | bsd-3-clause | 3,333 |
object Test extends App {
// First make sure specific types are preserved
val tmp: Vector[Int] = Vector(Vector(1,2), Vector(3,4)).view.flatten.force
// Now make sure we really get a view
val seq = Seq(Seq(1, 2), Seq(3, 4)).view.flatten
Console.println(seq.isInstanceOf[collection.SeqView[_,_]])
}
| felixmulder/scala | test/files/run/t5201.scala | Scala | bsd-3-clause | 308 |
package au.com.onegeek.respite
import org.scalatest._
abstract class AcceptanceSpec extends WordSpec with Matchers with BeforeAndAfter | mefellows/respite | respite-core/src/test/scala/au/com/onegeek/respite/AcceptanceSpec.scala | Scala | mit | 136 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import java.sql.Timestamp
import org.apache.spark.sql.catalyst.analysis.TypeCoercion._
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.{Rule, RuleExecutor}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.CalendarInterval
class TypeCoercionSuite extends PlanTest {
// scalastyle:off line.size.limit
// The following table shows all implicit data type conversions that are not visible to the user.
// +----------------------+----------+-----------+-------------+----------+------------+-----------+------------+------------+-------------+------------+----------+---------------+------------+----------+-------------+----------+----------------------+---------------------+-------------+--------------+
// | Source Type\\CAST TO | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | BinaryType | BooleanType | StringType | DateType | TimestampType | ArrayType | MapType | StructType | NullType | CalendarIntervalType | DecimalType | NumericType | IntegralType |
// +----------------------+----------+-----------+-------------+----------+------------+-----------+------------+------------+-------------+------------+----------+---------------+------------+----------+-------------+----------+----------------------+---------------------+-------------+--------------+
// | ByteType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(3, 0) | ByteType | ByteType |
// | ShortType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(5, 0) | ShortType | ShortType |
// | IntegerType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(10, 0) | IntegerType | IntegerType |
// | LongType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(20, 0) | LongType | LongType |
// | DoubleType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(30, 15) | DoubleType | IntegerType |
// | FloatType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(14, 7) | FloatType | IntegerType |
// | Dec(10, 2) | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(10, 2) | Dec(10, 2) | IntegerType |
// | BinaryType | X | X | X | X | X | X | X | BinaryType | X | StringType | X | X | X | X | X | X | X | X | X | X |
// | BooleanType | X | X | X | X | X | X | X | X | BooleanType | StringType | X | X | X | X | X | X | X | X | X | X |
// | StringType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | BinaryType | X | StringType | DateType | TimestampType | X | X | X | X | X | DecimalType(38, 18) | DoubleType | X |
// | DateType | X | X | X | X | X | X | X | X | X | StringType | DateType | TimestampType | X | X | X | X | X | X | X | X |
// | TimestampType | X | X | X | X | X | X | X | X | X | StringType | DateType | TimestampType | X | X | X | X | X | X | X | X |
// | ArrayType | X | X | X | X | X | X | X | X | X | X | X | X | ArrayType* | X | X | X | X | X | X | X |
// | MapType | X | X | X | X | X | X | X | X | X | X | X | X | X | MapType* | X | X | X | X | X | X |
// | StructType | X | X | X | X | X | X | X | X | X | X | X | X | X | X | StructType* | X | X | X | X | X |
// | NullType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | BinaryType | BooleanType | StringType | DateType | TimestampType | ArrayType | MapType | StructType | NullType | CalendarIntervalType | DecimalType(38, 18) | DoubleType | IntegerType |
// | CalendarIntervalType | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | CalendarIntervalType | X | X | X |
// +----------------------+----------+-----------+-------------+----------+------------+-----------+------------+------------+-------------+------------+----------+---------------+------------+----------+-------------+----------+----------------------+---------------------+-------------+--------------+
// Note: MapType*, StructType* are castable only when the internal child types also match; otherwise, not castable.
// Note: ArrayType* is castable when the element type is castable according to the table.
// scalastyle:on line.size.limit
private def shouldCast(from: DataType, to: AbstractDataType, expected: DataType): Unit = {
// Check default value
val castDefault = TypeCoercion.ImplicitTypeCasts.implicitCast(default(from), to)
assert(DataType.equalsIgnoreCompatibleNullability(
castDefault.map(_.dataType).getOrElse(null), expected),
s"Failed to cast $from to $to")
// Check null value
val castNull = TypeCoercion.ImplicitTypeCasts.implicitCast(createNull(from), to)
assert(DataType.equalsIgnoreCaseAndNullability(
castNull.map(_.dataType).getOrElse(null), expected),
s"Failed to cast $from to $to")
}
private def shouldNotCast(from: DataType, to: AbstractDataType): Unit = {
// Check default value
val castDefault = TypeCoercion.ImplicitTypeCasts.implicitCast(default(from), to)
assert(castDefault.isEmpty, s"Should not be able to cast $from to $to, but got $castDefault")
// Check null value
val castNull = TypeCoercion.ImplicitTypeCasts.implicitCast(createNull(from), to)
assert(castNull.isEmpty, s"Should not be able to cast $from to $to, but got $castNull")
}
private def default(dataType: DataType): Expression = dataType match {
case ArrayType(internalType: DataType, _) =>
CreateArray(Seq(Literal.default(internalType)))
case MapType(keyDataType: DataType, valueDataType: DataType, _) =>
CreateMap(Seq(Literal.default(keyDataType), Literal.default(valueDataType)))
case _ => Literal.default(dataType)
}
private def createNull(dataType: DataType): Expression = dataType match {
case ArrayType(internalType: DataType, _) =>
CreateArray(Seq(Literal.create(null, internalType)))
case MapType(keyDataType: DataType, valueDataType: DataType, _) =>
CreateMap(Seq(Literal.create(null, keyDataType), Literal.create(null, valueDataType)))
case _ => Literal.create(null, dataType)
}
val integralTypes: Seq[DataType] =
Seq(ByteType, ShortType, IntegerType, LongType)
val fractionalTypes: Seq[DataType] =
Seq(DoubleType, FloatType, DecimalType.SYSTEM_DEFAULT, DecimalType(10, 2))
val numericTypes: Seq[DataType] = integralTypes ++ fractionalTypes
val atomicTypes: Seq[DataType] =
numericTypes ++ Seq(BinaryType, BooleanType, StringType, DateType, TimestampType)
val complexTypes: Seq[DataType] =
Seq(ArrayType(IntegerType),
ArrayType(StringType),
MapType(StringType, StringType),
new StructType().add("a1", StringType),
new StructType().add("a1", StringType).add("a2", IntegerType))
val allTypes: Seq[DataType] =
atomicTypes ++ complexTypes ++ Seq(NullType, CalendarIntervalType)
// Check whether the type `checkedType` can be cast to all the types in `castableTypes`,
// but cannot be cast to the other types in `allTypes`.
private def checkTypeCasting(checkedType: DataType, castableTypes: Seq[DataType]): Unit = {
val nonCastableTypes = allTypes.filterNot(castableTypes.contains)
castableTypes.foreach { tpe =>
shouldCast(checkedType, tpe, tpe)
}
nonCastableTypes.foreach { tpe =>
shouldNotCast(checkedType, tpe)
}
}
private def checkWidenType(
widenFunc: (DataType, DataType) => Option[DataType],
t1: DataType,
t2: DataType,
expected: Option[DataType]): Unit = {
var found = widenFunc(t1, t2)
assert(found == expected,
s"Expected $expected as wider common type for $t1 and $t2, found $found")
// Test both directions to make sure the widening is symmetric.
found = widenFunc(t2, t1)
assert(found == expected,
s"Expected $expected as wider common type for $t2 and $t1, found $found")
}
test("implicit type cast - ByteType") {
val checkedType = ByteType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, DecimalType.ByteDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldCast(checkedType, IntegralType, checkedType)
}
test("implicit type cast - ShortType") {
val checkedType = ShortType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, DecimalType.ShortDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldCast(checkedType, IntegralType, checkedType)
}
test("implicit type cast - IntegerType") {
val checkedType = IntegerType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(IntegerType, DecimalType, DecimalType.IntDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldCast(checkedType, IntegralType, checkedType)
}
test("implicit type cast - LongType") {
val checkedType = LongType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, DecimalType.LongDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldCast(checkedType, IntegralType, checkedType)
}
test("implicit type cast - FloatType") {
val checkedType = FloatType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, DecimalType.FloatDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - DoubleType") {
val checkedType = DoubleType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, DecimalType.DoubleDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - DecimalType(10, 2)") {
val checkedType = DecimalType(10, 2)
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, checkedType)
shouldCast(checkedType, NumericType, checkedType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - BinaryType") {
val checkedType = BinaryType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType, StringType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - BooleanType") {
val checkedType = BooleanType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType, StringType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - StringType") {
val checkedType = StringType
val nonCastableTypes =
complexTypes ++ Seq(BooleanType, NullType, CalendarIntervalType)
checkTypeCasting(checkedType, castableTypes = allTypes.filterNot(nonCastableTypes.contains))
shouldCast(checkedType, DecimalType, DecimalType.SYSTEM_DEFAULT)
shouldCast(checkedType, NumericType, NumericType.defaultConcreteType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - DateType") {
val checkedType = DateType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType, StringType, TimestampType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - TimestampType") {
val checkedType = TimestampType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType, StringType, DateType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - ArrayType(StringType)") {
val checkedType = ArrayType(StringType)
val nonCastableTypes =
complexTypes ++ Seq(BooleanType, NullType, CalendarIntervalType)
checkTypeCasting(checkedType,
castableTypes = allTypes.filterNot(nonCastableTypes.contains).map(ArrayType(_)))
nonCastableTypes.map(ArrayType(_)).foreach(shouldNotCast(checkedType, _))
shouldNotCast(ArrayType(DoubleType, containsNull = false),
ArrayType(LongType, containsNull = false))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - MapType(StringType, StringType)") {
val checkedType = MapType(StringType, StringType)
checkTypeCasting(checkedType, castableTypes = Seq(checkedType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - StructType().add(\\"a1\\", StringType)") {
val checkedType = new StructType().add("a1", StringType)
checkTypeCasting(checkedType, castableTypes = Seq(checkedType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - NullType") {
val checkedType = NullType
checkTypeCasting(checkedType, castableTypes = allTypes)
shouldCast(checkedType, DecimalType, DecimalType.SYSTEM_DEFAULT)
shouldCast(checkedType, NumericType, NumericType.defaultConcreteType)
shouldCast(checkedType, IntegralType, IntegralType.defaultConcreteType)
}
test("implicit type cast - CalendarIntervalType") {
val checkedType = CalendarIntervalType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("eligible implicit type cast - TypeCollection") {
shouldCast(NullType, TypeCollection(StringType, BinaryType), StringType)
shouldCast(StringType, TypeCollection(StringType, BinaryType), StringType)
shouldCast(BinaryType, TypeCollection(StringType, BinaryType), BinaryType)
shouldCast(StringType, TypeCollection(BinaryType, StringType), StringType)
shouldCast(IntegerType, TypeCollection(IntegerType, BinaryType), IntegerType)
shouldCast(IntegerType, TypeCollection(BinaryType, IntegerType), IntegerType)
shouldCast(BinaryType, TypeCollection(BinaryType, IntegerType), BinaryType)
shouldCast(BinaryType, TypeCollection(IntegerType, BinaryType), BinaryType)
shouldCast(IntegerType, TypeCollection(StringType, BinaryType), StringType)
shouldCast(IntegerType, TypeCollection(BinaryType, StringType), StringType)
shouldCast(DecimalType.SYSTEM_DEFAULT,
TypeCollection(IntegerType, DecimalType), DecimalType.SYSTEM_DEFAULT)
shouldCast(DecimalType(10, 2), TypeCollection(IntegerType, DecimalType), DecimalType(10, 2))
shouldCast(DecimalType(10, 2), TypeCollection(DecimalType, IntegerType), DecimalType(10, 2))
shouldCast(IntegerType, TypeCollection(DecimalType(10, 2), StringType), DecimalType(10, 2))
shouldCast(StringType, TypeCollection(NumericType, BinaryType), DoubleType)
shouldCast(
ArrayType(StringType, false),
TypeCollection(ArrayType(StringType), StringType),
ArrayType(StringType, false))
shouldCast(
ArrayType(StringType, true),
TypeCollection(ArrayType(StringType), StringType),
ArrayType(StringType, true))
}
test("ineligible implicit type cast - TypeCollection") {
shouldNotCast(IntegerType, TypeCollection(DateType, TimestampType))
}
test("tightest common bound for types") {
def widenTest(t1: DataType, t2: DataType, expected: Option[DataType]): Unit =
checkWidenType(TypeCoercion.findTightestCommonType, t1, t2, expected)
// Null
widenTest(NullType, NullType, Some(NullType))
// Boolean
widenTest(NullType, BooleanType, Some(BooleanType))
widenTest(BooleanType, BooleanType, Some(BooleanType))
widenTest(IntegerType, BooleanType, None)
widenTest(LongType, BooleanType, None)
// Integral
widenTest(NullType, ByteType, Some(ByteType))
widenTest(NullType, IntegerType, Some(IntegerType))
widenTest(NullType, LongType, Some(LongType))
widenTest(ShortType, IntegerType, Some(IntegerType))
widenTest(ShortType, LongType, Some(LongType))
widenTest(IntegerType, LongType, Some(LongType))
widenTest(LongType, LongType, Some(LongType))
// Floating point
widenTest(NullType, FloatType, Some(FloatType))
widenTest(NullType, DoubleType, Some(DoubleType))
widenTest(FloatType, DoubleType, Some(DoubleType))
widenTest(FloatType, FloatType, Some(FloatType))
widenTest(DoubleType, DoubleType, Some(DoubleType))
// Integral mixed with floating point.
widenTest(IntegerType, FloatType, Some(FloatType))
widenTest(IntegerType, DoubleType, Some(DoubleType))
widenTest(IntegerType, DoubleType, Some(DoubleType))
widenTest(LongType, FloatType, Some(FloatType))
widenTest(LongType, DoubleType, Some(DoubleType))
// No up-casting for fixed-precision decimal (this is handled by arithmetic rules)
widenTest(DecimalType(2, 1), DecimalType(3, 2), None)
widenTest(DecimalType(2, 1), DoubleType, None)
widenTest(DecimalType(2, 1), IntegerType, None)
widenTest(DoubleType, DecimalType(2, 1), None)
// StringType
widenTest(NullType, StringType, Some(StringType))
widenTest(StringType, StringType, Some(StringType))
widenTest(IntegerType, StringType, None)
widenTest(LongType, StringType, None)
// TimestampType
widenTest(NullType, TimestampType, Some(TimestampType))
widenTest(TimestampType, TimestampType, Some(TimestampType))
widenTest(DateType, TimestampType, Some(TimestampType))
widenTest(IntegerType, TimestampType, None)
widenTest(StringType, TimestampType, None)
// ComplexType
widenTest(NullType,
MapType(IntegerType, StringType, false),
Some(MapType(IntegerType, StringType, false)))
widenTest(NullType, StructType(Seq()), Some(StructType(Seq())))
widenTest(StringType, MapType(IntegerType, StringType, true), None)
widenTest(ArrayType(IntegerType), StructType(Seq()), None)
}
test("wider common type for decimal and array") {
def widenTestWithStringPromotion(
t1: DataType,
t2: DataType,
expected: Option[DataType]): Unit = {
checkWidenType(TypeCoercion.findWiderTypeForTwo, t1, t2, expected)
}
def widenTestWithoutStringPromotion(
t1: DataType,
t2: DataType,
expected: Option[DataType]): Unit = {
checkWidenType(TypeCoercion.findWiderTypeWithoutStringPromotionForTwo, t1, t2, expected)
}
// Decimal
widenTestWithStringPromotion(
DecimalType(2, 1), DecimalType(3, 2), Some(DecimalType(3, 2)))
widenTestWithStringPromotion(
DecimalType(2, 1), DoubleType, Some(DoubleType))
widenTestWithStringPromotion(
DecimalType(2, 1), IntegerType, Some(DecimalType(11, 1)))
widenTestWithStringPromotion(
DecimalType(2, 1), LongType, Some(DecimalType(21, 1)))
// ArrayType
widenTestWithStringPromotion(
ArrayType(ShortType, containsNull = true),
ArrayType(DoubleType, containsNull = false),
Some(ArrayType(DoubleType, containsNull = true)))
widenTestWithStringPromotion(
ArrayType(TimestampType, containsNull = false),
ArrayType(StringType, containsNull = true),
Some(ArrayType(StringType, containsNull = true)))
widenTestWithStringPromotion(
ArrayType(ArrayType(IntegerType), containsNull = false),
ArrayType(ArrayType(LongType), containsNull = false),
Some(ArrayType(ArrayType(LongType), containsNull = false)))
// Without string promotion
widenTestWithoutStringPromotion(IntegerType, StringType, None)
widenTestWithoutStringPromotion(StringType, TimestampType, None)
widenTestWithoutStringPromotion(ArrayType(LongType), ArrayType(StringType), None)
widenTestWithoutStringPromotion(ArrayType(StringType), ArrayType(TimestampType), None)
// String promotion
widenTestWithStringPromotion(IntegerType, StringType, Some(StringType))
widenTestWithStringPromotion(StringType, TimestampType, Some(StringType))
widenTestWithStringPromotion(
ArrayType(LongType), ArrayType(StringType), Some(ArrayType(StringType)))
widenTestWithStringPromotion(
ArrayType(StringType), ArrayType(TimestampType), Some(ArrayType(StringType)))
}
private def ruleTest(rule: Rule[LogicalPlan], initial: Expression, transformed: Expression) {
ruleTest(Seq(rule), initial, transformed)
}
private def ruleTest(
rules: Seq[Rule[LogicalPlan]],
initial: Expression,
transformed: Expression): Unit = {
val testRelation = LocalRelation(AttributeReference("a", IntegerType)())
val analyzer = new RuleExecutor[LogicalPlan] {
override val batches = Seq(Batch("Resolution", FixedPoint(3), rules: _*))
}
comparePlans(
analyzer.execute(Project(Seq(Alias(initial, "a")()), testRelation)),
Project(Seq(Alias(transformed, "a")()), testRelation))
}
test("cast NullType for expressions that implement ExpectsInputTypes") {
import TypeCoercionSuite._
ruleTest(TypeCoercion.ImplicitTypeCasts,
AnyTypeUnaryExpression(Literal.create(null, NullType)),
AnyTypeUnaryExpression(Literal.create(null, NullType)))
ruleTest(TypeCoercion.ImplicitTypeCasts,
NumericTypeUnaryExpression(Literal.create(null, NullType)),
NumericTypeUnaryExpression(Literal.create(null, DoubleType)))
}
test("cast NullType for binary operators") {
import TypeCoercionSuite._
ruleTest(TypeCoercion.ImplicitTypeCasts,
AnyTypeBinaryOperator(Literal.create(null, NullType), Literal.create(null, NullType)),
AnyTypeBinaryOperator(Literal.create(null, NullType), Literal.create(null, NullType)))
ruleTest(TypeCoercion.ImplicitTypeCasts,
NumericTypeBinaryOperator(Literal.create(null, NullType), Literal.create(null, NullType)),
NumericTypeBinaryOperator(Literal.create(null, DoubleType), Literal.create(null, DoubleType)))
}
test("coalesce casts") {
val rule = TypeCoercion.FunctionArgumentConversion
val intLit = Literal(1)
val longLit = Literal.create(1L)
val doubleLit = Literal(1.0)
val stringLit = Literal.create("c", StringType)
val nullLit = Literal.create(null, NullType)
val floatNullLit = Literal.create(null, FloatType)
val floatLit = Literal.create(1.0f, FloatType)
val timestampLit = Literal.create("2017-04-12", TimestampType)
val decimalLit = Literal(new java.math.BigDecimal("1000000000000000000000"))
ruleTest(rule,
Coalesce(Seq(doubleLit, intLit, floatLit)),
Coalesce(Seq(Cast(doubleLit, DoubleType),
Cast(intLit, DoubleType), Cast(floatLit, DoubleType))))
ruleTest(rule,
Coalesce(Seq(longLit, intLit, decimalLit)),
Coalesce(Seq(Cast(longLit, DecimalType(22, 0)),
Cast(intLit, DecimalType(22, 0)), Cast(decimalLit, DecimalType(22, 0)))))
ruleTest(rule,
Coalesce(Seq(nullLit, intLit)),
Coalesce(Seq(Cast(nullLit, IntegerType), Cast(intLit, IntegerType))))
ruleTest(rule,
Coalesce(Seq(timestampLit, stringLit)),
Coalesce(Seq(Cast(timestampLit, StringType), Cast(stringLit, StringType))))
ruleTest(rule,
Coalesce(Seq(nullLit, floatNullLit, intLit)),
Coalesce(Seq(Cast(nullLit, FloatType), Cast(floatNullLit, FloatType),
Cast(intLit, FloatType))))
ruleTest(rule,
Coalesce(Seq(nullLit, intLit, decimalLit, doubleLit)),
Coalesce(Seq(Cast(nullLit, DoubleType), Cast(intLit, DoubleType),
Cast(decimalLit, DoubleType), Cast(doubleLit, DoubleType))))
ruleTest(rule,
Coalesce(Seq(nullLit, floatNullLit, doubleLit, stringLit)),
Coalesce(Seq(Cast(nullLit, StringType), Cast(floatNullLit, StringType),
Cast(doubleLit, StringType), Cast(stringLit, StringType))))
}
test("CreateArray casts") {
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateArray(Literal(1.0)
:: Literal(1)
:: Literal.create(1.0, FloatType)
:: Nil),
CreateArray(Cast(Literal(1.0), DoubleType)
:: Cast(Literal(1), DoubleType)
:: Cast(Literal.create(1.0, FloatType), DoubleType)
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateArray(Literal(1.0)
:: Literal(1)
:: Literal("a")
:: Nil),
CreateArray(Cast(Literal(1.0), StringType)
:: Cast(Literal(1), StringType)
:: Cast(Literal("a"), StringType)
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateArray(Literal.create(null, DecimalType(5, 3))
:: Literal(1)
:: Nil),
CreateArray(Literal.create(null, DecimalType(5, 3)).cast(DecimalType(13, 3))
:: Literal(1).cast(DecimalType(13, 3))
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateArray(Literal.create(null, DecimalType(5, 3))
:: Literal.create(null, DecimalType(22, 10))
:: Literal.create(null, DecimalType(38, 38))
:: Nil),
CreateArray(Literal.create(null, DecimalType(5, 3)).cast(DecimalType(38, 38))
:: Literal.create(null, DecimalType(22, 10)).cast(DecimalType(38, 38))
:: Literal.create(null, DecimalType(38, 38)).cast(DecimalType(38, 38))
:: Nil))
}
test("CreateMap casts") {
// type coercion for map keys
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateMap(Literal(1)
:: Literal("a")
:: Literal.create(2.0, FloatType)
:: Literal("b")
:: Nil),
CreateMap(Cast(Literal(1), FloatType)
:: Literal("a")
:: Cast(Literal.create(2.0, FloatType), FloatType)
:: Literal("b")
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateMap(Literal.create(null, DecimalType(5, 3))
:: Literal("a")
:: Literal.create(2.0, FloatType)
:: Literal("b")
:: Nil),
CreateMap(Literal.create(null, DecimalType(5, 3)).cast(DoubleType)
:: Literal("a")
:: Literal.create(2.0, FloatType).cast(DoubleType)
:: Literal("b")
:: Nil))
// type coercion for map values
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateMap(Literal(1)
:: Literal("a")
:: Literal(2)
:: Literal(3.0)
:: Nil),
CreateMap(Literal(1)
:: Cast(Literal("a"), StringType)
:: Literal(2)
:: Cast(Literal(3.0), StringType)
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateMap(Literal(1)
:: Literal.create(null, DecimalType(38, 0))
:: Literal(2)
:: Literal.create(null, DecimalType(38, 38))
:: Nil),
CreateMap(Literal(1)
:: Literal.create(null, DecimalType(38, 0)).cast(DecimalType(38, 38))
:: Literal(2)
:: Literal.create(null, DecimalType(38, 38)).cast(DecimalType(38, 38))
:: Nil))
// type coercion for both map keys and values
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateMap(Literal(1)
:: Literal("a")
:: Literal(2.0)
:: Literal(3.0)
:: Nil),
CreateMap(Cast(Literal(1), DoubleType)
:: Cast(Literal("a"), StringType)
:: Cast(Literal(2.0), DoubleType)
:: Cast(Literal(3.0), StringType)
:: Nil))
}
test("greatest/least cast") {
for (operator <- Seq[(Seq[Expression] => Expression)](Greatest, Least)) {
ruleTest(TypeCoercion.FunctionArgumentConversion,
operator(Literal(1.0)
:: Literal(1)
:: Literal.create(1.0, FloatType)
:: Nil),
operator(Cast(Literal(1.0), DoubleType)
:: Cast(Literal(1), DoubleType)
:: Cast(Literal.create(1.0, FloatType), DoubleType)
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
operator(Literal(1L)
:: Literal(1)
:: Literal(new java.math.BigDecimal("1000000000000000000000"))
:: Nil),
operator(Cast(Literal(1L), DecimalType(22, 0))
:: Cast(Literal(1), DecimalType(22, 0))
:: Cast(Literal(new java.math.BigDecimal("1000000000000000000000")), DecimalType(22, 0))
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
operator(Literal(1.0)
:: Literal.create(null, DecimalType(10, 5))
:: Literal(1)
:: Nil),
operator(Literal(1.0).cast(DoubleType)
:: Literal.create(null, DecimalType(10, 5)).cast(DoubleType)
:: Literal(1).cast(DoubleType)
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
operator(Literal.create(null, DecimalType(15, 0))
:: Literal.create(null, DecimalType(10, 5))
:: Literal(1)
:: Nil),
operator(Literal.create(null, DecimalType(15, 0)).cast(DecimalType(20, 5))
:: Literal.create(null, DecimalType(10, 5)).cast(DecimalType(20, 5))
:: Literal(1).cast(DecimalType(20, 5))
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
operator(Literal.create(2L, LongType)
:: Literal(1)
:: Literal.create(null, DecimalType(10, 5))
:: Nil),
operator(Literal.create(2L, LongType).cast(DecimalType(25, 5))
:: Literal(1).cast(DecimalType(25, 5))
:: Literal.create(null, DecimalType(10, 5)).cast(DecimalType(25, 5))
:: Nil))
}
}
test("nanvl casts") {
ruleTest(TypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0f, FloatType), Literal.create(1.0, DoubleType)),
NaNvl(Cast(Literal.create(1.0f, FloatType), DoubleType), Literal.create(1.0, DoubleType)))
ruleTest(TypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0, DoubleType), Literal.create(1.0f, FloatType)),
NaNvl(Literal.create(1.0, DoubleType), Cast(Literal.create(1.0f, FloatType), DoubleType)))
ruleTest(TypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0, DoubleType), Literal.create(1.0, DoubleType)),
NaNvl(Literal.create(1.0, DoubleType), Literal.create(1.0, DoubleType)))
ruleTest(TypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0f, FloatType), Literal.create(null, NullType)),
NaNvl(Literal.create(1.0f, FloatType), Cast(Literal.create(null, NullType), FloatType)))
ruleTest(TypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0, DoubleType), Literal.create(null, NullType)),
NaNvl(Literal.create(1.0, DoubleType), Cast(Literal.create(null, NullType), DoubleType)))
}
test("type coercion for If") {
val rule = TypeCoercion.IfCoercion
val intLit = Literal(1)
val doubleLit = Literal(1.0)
val trueLit = Literal.create(true, BooleanType)
val falseLit = Literal.create(false, BooleanType)
val stringLit = Literal.create("c", StringType)
val floatLit = Literal.create(1.0f, FloatType)
val timestampLit = Literal.create("2017-04-12", TimestampType)
val decimalLit = Literal(new java.math.BigDecimal("1000000000000000000000"))
ruleTest(rule,
If(Literal(true), Literal(1), Literal(1L)),
If(Literal(true), Cast(Literal(1), LongType), Literal(1L)))
ruleTest(rule,
If(Literal.create(null, NullType), Literal(1), Literal(1)),
If(Literal.create(null, BooleanType), Literal(1), Literal(1)))
ruleTest(rule,
If(AssertTrue(trueLit), Literal(1), Literal(2)),
If(Cast(AssertTrue(trueLit), BooleanType), Literal(1), Literal(2)))
ruleTest(rule,
If(AssertTrue(falseLit), Literal(1), Literal(2)),
If(Cast(AssertTrue(falseLit), BooleanType), Literal(1), Literal(2)))
ruleTest(rule,
If(trueLit, intLit, doubleLit),
If(trueLit, Cast(intLit, DoubleType), doubleLit))
ruleTest(rule,
If(trueLit, floatLit, doubleLit),
If(trueLit, Cast(floatLit, DoubleType), doubleLit))
ruleTest(rule,
If(trueLit, floatLit, decimalLit),
If(trueLit, Cast(floatLit, DoubleType), Cast(decimalLit, DoubleType)))
ruleTest(rule,
If(falseLit, stringLit, doubleLit),
If(falseLit, stringLit, Cast(doubleLit, StringType)))
ruleTest(rule,
If(trueLit, timestampLit, stringLit),
If(trueLit, Cast(timestampLit, StringType), stringLit))
}
test("type coercion for CaseKeyWhen") {
ruleTest(TypeCoercion.ImplicitTypeCasts,
CaseKeyWhen(Literal(1.toShort), Seq(Literal(1), Literal("a"))),
CaseKeyWhen(Cast(Literal(1.toShort), IntegerType), Seq(Literal(1), Literal("a")))
)
ruleTest(TypeCoercion.CaseWhenCoercion,
CaseKeyWhen(Literal(true), Seq(Literal(1), Literal("a"))),
CaseKeyWhen(Literal(true), Seq(Literal(1), Literal("a")))
)
ruleTest(TypeCoercion.CaseWhenCoercion,
CaseWhen(Seq((Literal(true), Literal(1.2))), Literal.create(1, DecimalType(7, 2))),
CaseWhen(Seq((Literal(true), Literal(1.2))),
Cast(Literal.create(1, DecimalType(7, 2)), DoubleType))
)
ruleTest(TypeCoercion.CaseWhenCoercion,
CaseWhen(Seq((Literal(true), Literal(100L))), Literal.create(1, DecimalType(7, 2))),
CaseWhen(Seq((Literal(true), Cast(Literal(100L), DecimalType(22, 2)))),
Cast(Literal.create(1, DecimalType(7, 2)), DecimalType(22, 2)))
)
}
test("BooleanEquality type cast") {
val be = TypeCoercion.BooleanEquality
// Use something more than a literal to avoid triggering the simplification rules.
val one = Add(Literal(Decimal(1)), Literal(Decimal(0)))
ruleTest(be,
EqualTo(Literal(true), one),
EqualTo(Cast(Literal(true), one.dataType), one)
)
ruleTest(be,
EqualTo(one, Literal(true)),
EqualTo(one, Cast(Literal(true), one.dataType))
)
ruleTest(be,
EqualNullSafe(Literal(true), one),
EqualNullSafe(Cast(Literal(true), one.dataType), one)
)
ruleTest(be,
EqualNullSafe(one, Literal(true)),
EqualNullSafe(one, Cast(Literal(true), one.dataType))
)
}
test("BooleanEquality simplification") {
val be = TypeCoercion.BooleanEquality
ruleTest(be,
EqualTo(Literal(true), Literal(1)),
Literal(true)
)
ruleTest(be,
EqualTo(Literal(true), Literal(0)),
Not(Literal(true))
)
ruleTest(be,
EqualNullSafe(Literal(true), Literal(1)),
And(IsNotNull(Literal(true)), Literal(true))
)
ruleTest(be,
EqualNullSafe(Literal(true), Literal(0)),
And(IsNotNull(Literal(true)), Not(Literal(true)))
)
ruleTest(be,
EqualTo(Literal(true), Literal(1L)),
Literal(true)
)
ruleTest(be,
EqualTo(Literal(new java.math.BigDecimal(1)), Literal(true)),
Literal(true)
)
ruleTest(be,
EqualTo(Literal(BigDecimal(0)), Literal(true)),
Not(Literal(true))
)
ruleTest(be,
EqualTo(Literal(Decimal(1)), Literal(true)),
Literal(true)
)
ruleTest(be,
EqualTo(Literal.create(Decimal(1), DecimalType(8, 0)), Literal(true)),
Literal(true)
)
}
private def checkOutput(logical: LogicalPlan, expectTypes: Seq[DataType]): Unit = {
logical.output.zip(expectTypes).foreach { case (attr, dt) =>
assert(attr.dataType === dt)
}
}
private val timeZoneResolver = ResolveTimeZone(new SQLConf)
private def widenSetOperationTypes(plan: LogicalPlan): LogicalPlan = {
timeZoneResolver(TypeCoercion.WidenSetOperationTypes(plan))
}
test("WidenSetOperationTypes for except and intersect") {
val firstTable = LocalRelation(
AttributeReference("i", IntegerType)(),
AttributeReference("u", DecimalType.SYSTEM_DEFAULT)(),
AttributeReference("b", ByteType)(),
AttributeReference("d", DoubleType)())
val secondTable = LocalRelation(
AttributeReference("s", StringType)(),
AttributeReference("d", DecimalType(2, 1))(),
AttributeReference("f", FloatType)(),
AttributeReference("l", LongType)())
val expectedTypes = Seq(StringType, DecimalType.SYSTEM_DEFAULT, FloatType, DoubleType)
val r1 = widenSetOperationTypes(Except(firstTable, secondTable)).asInstanceOf[Except]
val r2 = widenSetOperationTypes(Intersect(firstTable, secondTable)).asInstanceOf[Intersect]
checkOutput(r1.left, expectedTypes)
checkOutput(r1.right, expectedTypes)
checkOutput(r2.left, expectedTypes)
checkOutput(r2.right, expectedTypes)
// Check if a Project is added
assert(r1.left.isInstanceOf[Project])
assert(r1.right.isInstanceOf[Project])
assert(r2.left.isInstanceOf[Project])
assert(r2.right.isInstanceOf[Project])
}
test("WidenSetOperationTypes for union") {
val firstTable = LocalRelation(
AttributeReference("i", IntegerType)(),
AttributeReference("u", DecimalType.SYSTEM_DEFAULT)(),
AttributeReference("b", ByteType)(),
AttributeReference("d", DoubleType)())
val secondTable = LocalRelation(
AttributeReference("s", StringType)(),
AttributeReference("d", DecimalType(2, 1))(),
AttributeReference("f", FloatType)(),
AttributeReference("l", LongType)())
val thirdTable = LocalRelation(
AttributeReference("m", StringType)(),
AttributeReference("n", DecimalType.SYSTEM_DEFAULT)(),
AttributeReference("p", FloatType)(),
AttributeReference("q", DoubleType)())
val forthTable = LocalRelation(
AttributeReference("m", StringType)(),
AttributeReference("n", DecimalType.SYSTEM_DEFAULT)(),
AttributeReference("p", ByteType)(),
AttributeReference("q", DoubleType)())
val expectedTypes = Seq(StringType, DecimalType.SYSTEM_DEFAULT, FloatType, DoubleType)
val unionRelation = widenSetOperationTypes(
Union(firstTable :: secondTable :: thirdTable :: forthTable :: Nil)).asInstanceOf[Union]
assert(unionRelation.children.length == 4)
checkOutput(unionRelation.children.head, expectedTypes)
checkOutput(unionRelation.children(1), expectedTypes)
checkOutput(unionRelation.children(2), expectedTypes)
checkOutput(unionRelation.children(3), expectedTypes)
assert(unionRelation.children.head.isInstanceOf[Project])
assert(unionRelation.children(1).isInstanceOf[Project])
assert(unionRelation.children(2).isInstanceOf[Project])
assert(unionRelation.children(3).isInstanceOf[Project])
}
test("Transform Decimal precision/scale for union except and intersect") {
def checkOutput(logical: LogicalPlan, expectTypes: Seq[DataType]): Unit = {
logical.output.zip(expectTypes).foreach { case (attr, dt) =>
assert(attr.dataType === dt)
}
}
val left1 = LocalRelation(
AttributeReference("l", DecimalType(10, 8))())
val right1 = LocalRelation(
AttributeReference("r", DecimalType(5, 5))())
val expectedType1 = Seq(DecimalType(10, 8))
val r1 = widenSetOperationTypes(Union(left1, right1)).asInstanceOf[Union]
val r2 = widenSetOperationTypes(Except(left1, right1)).asInstanceOf[Except]
val r3 = widenSetOperationTypes(Intersect(left1, right1)).asInstanceOf[Intersect]
checkOutput(r1.children.head, expectedType1)
checkOutput(r1.children.last, expectedType1)
checkOutput(r2.left, expectedType1)
checkOutput(r2.right, expectedType1)
checkOutput(r3.left, expectedType1)
checkOutput(r3.right, expectedType1)
val plan1 = LocalRelation(AttributeReference("l", DecimalType(10, 5))())
val rightTypes = Seq(ByteType, ShortType, IntegerType, LongType, FloatType, DoubleType)
val expectedTypes = Seq(DecimalType(10, 5), DecimalType(10, 5), DecimalType(15, 5),
DecimalType(25, 5), DoubleType, DoubleType)
rightTypes.zip(expectedTypes).foreach { case (rType, expectedType) =>
val plan2 = LocalRelation(
AttributeReference("r", rType)())
val r1 = widenSetOperationTypes(Union(plan1, plan2)).asInstanceOf[Union]
val r2 = widenSetOperationTypes(Except(plan1, plan2)).asInstanceOf[Except]
val r3 = widenSetOperationTypes(Intersect(plan1, plan2)).asInstanceOf[Intersect]
checkOutput(r1.children.last, Seq(expectedType))
checkOutput(r2.right, Seq(expectedType))
checkOutput(r3.right, Seq(expectedType))
val r4 = widenSetOperationTypes(Union(plan2, plan1)).asInstanceOf[Union]
val r5 = widenSetOperationTypes(Except(plan2, plan1)).asInstanceOf[Except]
val r6 = widenSetOperationTypes(Intersect(plan2, plan1)).asInstanceOf[Intersect]
checkOutput(r4.children.last, Seq(expectedType))
checkOutput(r5.left, Seq(expectedType))
checkOutput(r6.left, Seq(expectedType))
}
}
test("rule for date/timestamp operations") {
val dateTimeOperations = TypeCoercion.DateTimeOperations
val date = Literal(new java.sql.Date(0L))
val timestamp = Literal(new Timestamp(0L))
val interval = Literal(new CalendarInterval(0, 0))
val str = Literal("2015-01-01")
ruleTest(dateTimeOperations, Add(date, interval), Cast(TimeAdd(date, interval), DateType))
ruleTest(dateTimeOperations, Add(interval, date), Cast(TimeAdd(date, interval), DateType))
ruleTest(dateTimeOperations, Add(timestamp, interval),
Cast(TimeAdd(timestamp, interval), TimestampType))
ruleTest(dateTimeOperations, Add(interval, timestamp),
Cast(TimeAdd(timestamp, interval), TimestampType))
ruleTest(dateTimeOperations, Add(str, interval), Cast(TimeAdd(str, interval), StringType))
ruleTest(dateTimeOperations, Add(interval, str), Cast(TimeAdd(str, interval), StringType))
ruleTest(dateTimeOperations, Subtract(date, interval), Cast(TimeSub(date, interval), DateType))
ruleTest(dateTimeOperations, Subtract(timestamp, interval),
Cast(TimeSub(timestamp, interval), TimestampType))
ruleTest(dateTimeOperations, Subtract(str, interval), Cast(TimeSub(str, interval), StringType))
// interval operations should not be effected
ruleTest(dateTimeOperations, Add(interval, interval), Add(interval, interval))
ruleTest(dateTimeOperations, Subtract(interval, interval), Subtract(interval, interval))
}
/**
* There are rules that need to not fire before child expressions get resolved.
* We use this test to make sure those rules do not fire early.
*/
test("make sure rules do not fire early") {
// InConversion
val inConversion = TypeCoercion.InConversion
ruleTest(inConversion,
In(UnresolvedAttribute("a"), Seq(Literal(1))),
In(UnresolvedAttribute("a"), Seq(Literal(1)))
)
ruleTest(inConversion,
In(Literal("test"), Seq(UnresolvedAttribute("a"), Literal(1))),
In(Literal("test"), Seq(UnresolvedAttribute("a"), Literal(1)))
)
ruleTest(inConversion,
In(Literal("a"), Seq(Literal(1), Literal("b"))),
In(Cast(Literal("a"), StringType),
Seq(Cast(Literal(1), StringType), Cast(Literal("b"), StringType)))
)
}
test("SPARK-15776 Divide expression's dataType should be casted to Double or Decimal " +
"in aggregation function like sum") {
val rules = Seq(FunctionArgumentConversion, Division)
// Casts Integer to Double
ruleTest(rules, sum(Divide(4, 3)), sum(Divide(Cast(4, DoubleType), Cast(3, DoubleType))))
// Left expression is Double, right expression is Int. Another rule ImplicitTypeCasts will
// cast the right expression to Double.
ruleTest(rules, sum(Divide(4.0, 3)), sum(Divide(4.0, 3)))
// Left expression is Int, right expression is Double
ruleTest(rules, sum(Divide(4, 3.0)), sum(Divide(Cast(4, DoubleType), Cast(3.0, DoubleType))))
// Casts Float to Double
ruleTest(
rules,
sum(Divide(4.0f, 3)),
sum(Divide(Cast(4.0f, DoubleType), Cast(3, DoubleType))))
// Left expression is Decimal, right expression is Int. Another rule DecimalPrecision will cast
// the right expression to Decimal.
ruleTest(rules, sum(Divide(Decimal(4.0), 3)), sum(Divide(Decimal(4.0), 3)))
}
test("SPARK-17117 null type coercion in divide") {
val rules = Seq(FunctionArgumentConversion, Division, ImplicitTypeCasts)
val nullLit = Literal.create(null, NullType)
ruleTest(rules, Divide(1L, nullLit), Divide(Cast(1L, DoubleType), Cast(nullLit, DoubleType)))
ruleTest(rules, Divide(nullLit, 1L), Divide(Cast(nullLit, DoubleType), Cast(1L, DoubleType)))
}
test("binary comparison with string promotion") {
ruleTest(PromoteStrings,
GreaterThan(Literal("123"), Literal(1)),
GreaterThan(Cast(Literal("123"), IntegerType), Literal(1)))
ruleTest(PromoteStrings,
LessThan(Literal(true), Literal("123")),
LessThan(Literal(true), Cast(Literal("123"), BooleanType)))
ruleTest(PromoteStrings,
EqualTo(Literal(Array(1, 2)), Literal("123")),
EqualTo(Literal(Array(1, 2)), Literal("123")))
}
}
object TypeCoercionSuite {
case class AnyTypeUnaryExpression(child: Expression)
extends UnaryExpression with ExpectsInputTypes with Unevaluable {
override def inputTypes: Seq[AbstractDataType] = Seq(AnyDataType)
override def dataType: DataType = NullType
}
case class NumericTypeUnaryExpression(child: Expression)
extends UnaryExpression with ExpectsInputTypes with Unevaluable {
override def inputTypes: Seq[AbstractDataType] = Seq(NumericType)
override def dataType: DataType = NullType
}
case class AnyTypeBinaryOperator(left: Expression, right: Expression)
extends BinaryOperator with Unevaluable {
override def dataType: DataType = NullType
override def inputType: AbstractDataType = AnyDataType
override def symbol: String = "anytype"
}
case class NumericTypeBinaryOperator(left: Expression, right: Expression)
extends BinaryOperator with Unevaluable {
override def dataType: DataType = NullType
override def inputType: AbstractDataType = NumericType
override def symbol: String = "numerictype"
}
}
| map222/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala | Scala | apache-2.0 | 50,053 |
package Tutorial
import Chisel._
import Node._
import Literal._
import scala.collection.mutable.HashMap
import scala.collection.mutable.ArrayBuffer
class IncThroughOffloadgComponent extends gComponentLeaf (() => UFix(width = 32)) (() => UFix(width = 32)) (ArrayBuffer(("constOff", () => UFix(width = 32), () => UFix(width = 32)))) {
println("This is the main component")
val waitForInput :: requesting :: waitForOffload :: waitForCounter :: waitForOutput :: Nil = Enum(5){UFix()}
val counterResetValue = UFix(2, 32)
val inputReg = Reg(resetVal = UFix(0, 32))
val offReg = Reg(resetVal = UFix(0, 32))
val counter = Reg(resetVal = counterResetValue)
val state = Reg(resetVal = waitForInput)
def myOff = io.elements.find(_._1 == "off").getOrElse(elseV)._2//.asInstanceOf[gOffBundle[UFix, UFix]]
def mymyOff = myOff.asInstanceOf[Bundle].elements.find(_._1 == "constOff").getOrElse(elseV)._2
val offWire = new gOffBundleND(() => UFix(width = 32), () => UFix(width = 32))
offWire <> mymyOff
when (state === waitForInput && io.in.valid) {
inputReg := io.in.bits
state := requesting
} .elsewhen (state === requesting && offWire.req.ready) {
state := waitForOffload
} .elsewhen (state === waitForOffload && offWire.rep.valid) {
state := waitForCounter
offReg := offWire.rep.bits
} .elsewhen (state === waitForCounter) {
when (counter === UFix(0)) {
state := waitForOutput
counter := counterResetValue
} .otherwise {
counter := counter - UFix(1)
}
} .elsewhen (state === waitForOutput && io.out.ready) {
state := waitForInput
} .otherwise {
}
io.in.ready := state === waitForInput
io.out.valid := state === waitForOutput
io.out.bits := inputReg + offReg
offWire.req.valid := state === requesting
offWire.rep.ready := state != waitForOutput
}
//class SendConstantgComponent extends gComponentLeaf (() => UFix(width = 32)) (() => UFix(width = 32)) (ArrayBuffer(("test", () => UFix(width = 32), ()=> UFix(width = 32)))) {
class SendConstantgComponent extends gComponentLeaf (() => UFix(width = 32)) (() => UFix(width = 32)) (ArrayBuffer()) {
val outputData = Reg(UFix(width=32))
val outputValid = Reg(Bool(false))
outputValid := io.in.valid
outputData := UFix(2, 32)
io.in.ready := io.out.ready
io.out.bits := outputData
io.out.valid := outputValid && io.out.ready
io.out.bits := outputData
}
class Offloaded extends Component with GorillaUtil {
//def report() = {bfs { n => println(n.name) }}
val io = new gInOutBundle (() => UFix(width = 32), () => UFix(width = 32))
val a = new gComponentMD (() => UFix(width = 32), () => UFix(width = 32),
ArrayBuffer(("constOff", () => UFix(width = 32), () => UFix(width = 32))))
val b = new gComponentMD (() => UFix(width = 32), () => UFix(width = 32),
ArrayBuffer())
val c = (a, () => new IncThroughOffloadgComponent)
val d = (b, () => new SendConstantgComponent)
//val e = Offload(c, d, "constOff")
///////////////////////////////////////////////////////////
val e = Offload(Replicate(c, 2), d, "constOff")
//////////////////////////////////////////////////////////
val f = e._2()
println("final object name is " + f.getClass.getName)
io <> f.io
//io <> d._2().io
}
class OffloadedTests(o: Offloaded) extends Tester(o, Array(o.io)) {
defTests {
val inputs_data = List(1, 2, 4, 8)
val svars = new HashMap[Node, Node]()
val ovars = new HashMap[Node, Node]()
val test = new Offloaded
val iDelay = 4
for (time <- 0 until 5) {
svars(o.io.in.valid) = Bool(false)
svars(o.io.in.bits) = UFix(0)
svars(o.io.pcIn.valid)= Bool(false)
svars(o.io.pcIn.bits.pcType)= Pcounters.pcReset
svars(o.io.pcIn.bits.moduleId)= UFix(0)
svars(o.io.pcIn.bits.portId)= UFix(0)
step(svars, ovars, false)
}
svars(o.io.pcIn.valid) = Bool(true)
svars(o.io.pcIn.bits.request) = Bool(true)
svars(o.io.pcIn.bits.pcType) = Pcounters.pcReset
step(svars, ovars)
svars(o.io.pcIn.valid) = Bool(false)
while(ovars(o.io.pcOut.valid).litValue().intValue == 0) {
step(svars, ovars)
}
println("PCREPORT: Performance counter reset received")
var sourced = 0
var sourcedIndex = 0
var sinked = 0
var sinkedIndex = 0
var time = 0
//var sinkStarted = false
var allPassed = true
var cycles = 0
while(sourced < 200 || sinked < 200) {
if ((sourced < 200) && (cycles % iDelay == 0)) {
svars(o.io.in.bits) = Bits(inputs_data(sourcedIndex))
svars(o.io.in.valid) = Bool(true)
svars(o.io.out.ready) = Bool(true)
} else {
svars(o.io.in.bits) = UFix(0)
svars(o.io.in.valid) = Bool(false)
svars(o.io.out.ready) = Bool(true)
}
// this advances the clock
step(svars, ovars)
cycles += 1
// bump counters and check outputs after advancing clock
if (ovars(o.io.in.ready).litValue() == 1 && (cycles % iDelay == 0)) {
sourced += 1
sourcedIndex = sourced % 4
println("sourced and sourcedIndex are " + sourced + " " + sourcedIndex)
}
if (ovars(o.io.out.valid).litValue() == 1) {
allPassed = allPassed && (ovars(o.io.out.bits).litValue() == (inputs_data(sinkedIndex) + 2))
if (allPassed == false) {
println("Test failed because output is " + ovars(o.io.out.bits).litValue() +
" expected " + (inputs_data(sinkedIndex) +2))
println("Sinked is " + sinked)
}
println("At " + time + " outpout " + ovars(o.io.out.bits).litValue() + " sinked");
sinked += 1
sinkedIndex = sinked % 4
}
time += 1
}
step(svars, ovars)
svars(o.io.pcIn.valid) = Bool(true)
svars(o.io.pcIn.bits.request) = Bool(true)
svars(o.io.pcIn.bits.pcType) = Pcounters.backPressure
svars(o.io.pcIn.bits.moduleId) = UFix(3) //incthrough module Replicated
//svars(o.io.pcIn.bits.moduleId) = UFix(2) //incthroug module Offloaded
svars(o.io.pcIn.bits.portId) = UFix(1) //Input port
step(svars, ovars)
svars(o.io.pcIn.valid) = Bool(false)
while(ovars(o.io.pcOut.valid).litValue().intValue() == 0) {
step(svars, ovars)
}
println("PCREPORT: Performance counter received " + ovars(o.io.pcOut.bits.pcValue))
step(svars, ovars)
allPassed
}
}
| seyedmaysamlavasani/GorillaPP | chisel/Gorilla++/backup/Offloaded.scala | Scala | bsd-3-clause | 6,380 |
package gitbucket.core.service
import gitbucket.core.model.WebHook
import org.scalatest.FunSuite
import gitbucket.core.model.WebHookContentType
class WebHookServiceSpec extends FunSuite with ServiceSpecBase {
lazy val service = new WebHookPullRequestService with AccountService with RepositoryService with PullRequestService with IssuesService with CommitsService
test("WebHookPullRequestService.getPullRequestsByRequestForWebhook") { withTestDB { implicit session =>
val user1 = generateNewUserWithDBRepository("user1","repo1")
val user2 = generateNewUserWithDBRepository("user2","repo2")
val user3 = generateNewUserWithDBRepository("user3","repo3")
val issueUser = user("root")
val (issue1, pullreq1) = generateNewPullRequest("user1/repo1/master1", "user2/repo2/master2", loginUser="root")
val (issue3, pullreq3) = generateNewPullRequest("user3/repo3/master3", "user2/repo2/master2", loginUser="root")
val (issue32, pullreq32) = generateNewPullRequest("user3/repo3/master32", "user2/repo2/master2", loginUser="root")
generateNewPullRequest("user2/repo2/master2", "user1/repo1/master2")
service.addWebHook("user1", "repo1", "webhook1-1", Set(WebHook.PullRequest), WebHookContentType.FORM, Some("key"))
service.addWebHook("user1", "repo1", "webhook1-2", Set(WebHook.PullRequest), WebHookContentType.FORM, Some("key"))
service.addWebHook("user2", "repo2", "webhook2-1", Set(WebHook.PullRequest), WebHookContentType.FORM, Some("key"))
service.addWebHook("user2", "repo2", "webhook2-2", Set(WebHook.PullRequest), WebHookContentType.FORM, Some("key"))
service.addWebHook("user3", "repo3", "webhook3-1", Set(WebHook.PullRequest), WebHookContentType.FORM, Some("key"))
service.addWebHook("user3", "repo3", "webhook3-2", Set(WebHook.PullRequest), WebHookContentType.FORM, Some("key"))
assert(service.getPullRequestsByRequestForWebhook("user1","repo1","master1") == Map.empty)
val r = service.getPullRequestsByRequestForWebhook("user2","repo2","master2").mapValues(_.map(_.url).toSet)
assert(r.size == 3)
assert(r((issue1, issueUser, pullreq1, user1, user2)) == Set("webhook1-1","webhook1-2"))
assert(r((issue3, issueUser, pullreq3, user3, user2)) == Set("webhook3-1","webhook3-2"))
assert(r((issue32, issueUser, pullreq32, user3, user2)) == Set("webhook3-1","webhook3-2"))
// when closed, it not founds.
service.updateClosed("user1","repo1",issue1.issueId, true)
val r2 = service.getPullRequestsByRequestForWebhook("user2","repo2","master2").mapValues(_.map(_.url).toSet)
assert(r2.size == 2)
assert(r2((issue3, issueUser, pullreq3, user3, user2)) == Set("webhook3-1","webhook3-2"))
assert(r2((issue32, issueUser, pullreq32, user3, user2)) == Set("webhook3-1","webhook3-2"))
} }
test("add and get and update and delete") { withTestDB { implicit session =>
val user1 = generateNewUserWithDBRepository("user1","repo1")
val formType = WebHookContentType.FORM
val jsonType = WebHookContentType.JSON
service.addWebHook("user1", "repo1", "http://example.com", Set(WebHook.PullRequest), formType, Some("key"))
assert(service.getWebHooks("user1", "repo1") == List((WebHook("user1","repo1","http://example.com", formType, Some("key")),Set(WebHook.PullRequest))))
assert(service.getWebHook("user1", "repo1", "http://example.com") == Some((WebHook("user1","repo1","http://example.com", formType, Some("key")),Set(WebHook.PullRequest))))
assert(service.getWebHooksByEvent("user1", "repo1", WebHook.PullRequest) == List((WebHook("user1","repo1","http://example.com", formType, Some("key")))))
assert(service.getWebHooksByEvent("user1", "repo1", WebHook.Push) == Nil)
assert(service.getWebHook("user1", "repo1", "http://example.com2") == None)
assert(service.getWebHook("user2", "repo1", "http://example.com") == None)
assert(service.getWebHook("user1", "repo2", "http://example.com") == None)
service.updateWebHook("user1", "repo1", "http://example.com", Set(WebHook.Push, WebHook.Issues), jsonType, Some("key"))
assert(service.getWebHook("user1", "repo1", "http://example.com") == Some((WebHook("user1","repo1","http://example.com", jsonType, Some("key")),Set(WebHook.Push, WebHook.Issues))))
assert(service.getWebHooksByEvent("user1", "repo1", WebHook.PullRequest) == Nil)
assert(service.getWebHooksByEvent("user1", "repo1", WebHook.Push) == List((WebHook("user1","repo1","http://example.com", jsonType, Some("key")))))
service.deleteWebHook("user1", "repo1", "http://example.com")
assert(service.getWebHook("user1", "repo1", "http://example.com") == None)
} }
test("getWebHooks, getWebHooksByEvent") { withTestDB { implicit session =>
val user1 = generateNewUserWithDBRepository("user1","repo1")
val ctype = WebHookContentType.FORM
service.addWebHook("user1", "repo1", "http://example.com/1", Set(WebHook.PullRequest), ctype, Some("key"))
service.addWebHook("user1", "repo1", "http://example.com/2", Set(WebHook.Push), ctype, Some("key"))
service.addWebHook("user1", "repo1", "http://example.com/3", Set(WebHook.PullRequest,WebHook.Push), ctype, Some("key"))
assert(service.getWebHooks("user1", "repo1") == List(
WebHook("user1","repo1","http://example.com/1", ctype, Some("key"))->Set(WebHook.PullRequest),
WebHook("user1","repo1","http://example.com/2", ctype, Some("key"))->Set(WebHook.Push),
WebHook("user1","repo1","http://example.com/3", ctype, Some("key"))->Set(WebHook.PullRequest,WebHook.Push)))
assert(service.getWebHooksByEvent("user1", "repo1", WebHook.PullRequest) == List(
WebHook("user1","repo1","http://example.com/1", ctype, Some("key")),
WebHook("user1","repo1","http://example.com/3", ctype, Some("key"))))
} }
} | nobusugi246/gitbucket | src/test/scala/gitbucket/core/service/WebHookServiceSpec.scala | Scala | apache-2.0 | 5,769 |
package org.oxygen.redio.runtime
import net.minecraft.util.BlockPos
import net.minecraft.world.IBlockAccess
import scala.collection.immutable.HashSet
trait PacketNode
{
def isAcceptable(world: IBlockAccess, pos: BlockPos, target: String): Boolean
def acceptPacket(world: IBlockAccess, pos: BlockPos, source: String, packet: PacketType): Any
}
object PacketNode
{
def dispatch(world: IBlockAccess, pos: BlockPos, source: String, target: String, packet: PacketType): Option[Any] =
{
val path = HashSet[BlockPos]()
dispatch(world, pos, source.trim, target.trim, packet, path)
}
def dispatch(world: IBlockAccess, pos: BlockPos, source: String,
target: String, packet: PacketType, path: HashSet[BlockPos]): Option[Any] =
{
if (target.isEmpty)
return None
val newPath = path + pos
val neighbors = Array(pos.up(), pos.down(), pos.east(), pos.west(), pos.north(), pos.south())
for ((neighbor, block) <- neighbors zip neighbors.map(world.getBlockState(_).getBlock)) block match
{
case node: PacketNode if !path.contains(neighbor) => node.isAcceptable(world, neighbor, target) match
{
case true => return Some(node.acceptPacket(world, neighbor, source, packet))
case false => dispatch(world, neighbor, source, target, packet, newPath) match
{
case None =>
case Some(result) => return Some(result)
}
}
case _ =>
}
None
}
}
| chenzhuoyu/RedIO | src/main/scala/org/oxygen/redio/runtime/PacketNode.scala | Scala | lgpl-2.1 | 1,391 |
package rere.ql.options
import rere.ql.queries.Func
import rere.ql.typeclasses.Transmuter
import rere.ql.types.{ReqlArray, ReqlDatum}
trait EmitOptions {
sealed trait EmitOptions[R <: ReqlDatum, Base <: ReqlDatum] extends ComposableOptions
// usually compiler can't infer this types and they should be specified explicitly
case class Emit[
R <: ReqlDatum : Transmuter,
Base <: ReqlDatum : Transmuter,
Out <: ReqlDatum
](
emitFunction: (R, Base, R) => ReqlArray[Out]
) extends EmitOptions[R, Base] with NonDefaultOption {
def withFinalEmit(finalEmit: R => ReqlArray[Out]): EmitWithFinalEmit[R, Base, Out] = EmitWithFinalEmit(emitFunction, finalEmit)
def view = "emit" -> Func.wrap3(emitFunction) :: Nil
}
// usually compiler can't infer this types and they should be specified explicitly
case class EmitWithFinalEmit[
R <: ReqlDatum : Transmuter,
Base <: ReqlDatum : Transmuter,
Out <: ReqlDatum
](
emitFunction: (R, Base, R) => ReqlArray[Out],
finalEmit: R => ReqlArray[Out]
) extends EmitOptions[R, Base] with NonDefaultOption {
def view = "emit" -> Func.wrap3(emitFunction) :: "final_emit" -> Func.wrap1(finalEmit) :: Nil
}
}
| pbaun/rere | modules/ql/src/main/scala/rere/ql/options/EmitOptions.scala | Scala | apache-2.0 | 1,207 |
package be.studiocredo.account
import java.io.File
import java.security.MessageDigest
import java.text.{DecimalFormat, DecimalFormatSymbols}
import be.studiocredo.util.Money
import models.entities.{PaymentEdit, PaymentType}
import org.joda.time.DateTime
import org.joda.time.format.DateTimeFormat
import scala.collection.mutable.ListBuffer
object AXATransactionImporter {
trait Key {
val value: String
}
//afschrift, datum verrichting, datum valuta, datum boeking, bedrag, saldo rekening , omschrijving aard verrichting, rekening begunstigde, tegenpartij, naam terminal, plaats terminal, kaartnummer, mededeling, vervolg mededeling, detail verrichting
case object Transcript extends Key { val value = "afschrift" }
case object TransactionDate extends Key { val value = "datum verrichting" }
case object Amount extends Key { val value = "bedrag" }
case object Debtor extends Key { val value = "tegenpartij" }
case object Message extends Key { val value = "mededeling" }
case object Message2 extends Key { val value = "vervolg mededeling" }
case object Detail extends Key { val value = "detail verrichting" }
val keys: List[Key] = List(Transcript, TransactionDate, Amount, Debtor, Message, Message2, Detail)
val mapper = keys.map( key => (key.value, key)).toMap
}
class AXATransactionImporter extends TransactionImporter {
val id = "AXA"
import AXATransactionImporter._
override def importFile(file: File): List[PaymentEdit] = {
import scala.io.Source
//this is naive -> no escaping from ; delimiter
val headers = Source.fromFile(file,"iso-8859-1").getLines().slice(8, 9).next().split(';').toList
val rawPaymentValues = new ListBuffer[List[String]]
var lastProcessedLine: Option[List[String]] = None
for (line <- Source.fromFile(file,"iso-8859-1").getLines().drop(9)) {
val values = line.split(';').toList
if (values.length == 1) {
val actualLastLine = lastProcessedLine.get
val newLine = actualLastLine.take(actualLastLine.length-1).toList ++ List(actualLastLine.last + "\n" + values(0))
lastProcessedLine = Some(newLine)
} else {
if (lastProcessedLine.isDefined) {
rawPaymentValues += lastProcessedLine.get.map(stripQuotes)
}
lastProcessedLine = Some(values)
}
}
if (lastProcessedLine.isDefined) {
rawPaymentValues += lastProcessedLine.get.map(stripQuotes)
}
val rawPaymentValueMap = rawPaymentValues.toList.map { value =>
value.zipWithIndex.collect{
case (v, index) if mapper.contains(headers(index)) => (mapper(headers(index)),v)
}.toMap
}
rawPaymentValueMap.map{ map =>
PaymentEdit(PaymentType.WireTransfer, Some(getImportId(map)), None, map(Debtor), getAmount(map), Some(getMessage(map)), Some(map(Detail)), getDate(map), false)
}.filter(pe => pe.amount.amount >= 0)
}
private def stripQuotes(s: String): String = {
if(s.startsWith("\"") && s.endsWith("\"")) s.dropRight(1).drop(1)
else s
}
private def getMessage(map: Map[Key, String]): String = {
List(map(Message), map(Message2)).filterNot(_.isEmpty).mkString("\n")
}
private def getImportId(map: Map[Key, String]): String = {
val message = id + "@" + List(map(Transcript),map(TransactionDate),map(Amount),map(Debtor),map(Message),map(Message2),map(Detail)).mkString(";")
MessageDigest.getInstance("MD5").digest(message.getBytes).map("%02X".format(_)).mkString
}
private def getAmount(map: Map[Key, String]): Money = {
val otherSymbols = new DecimalFormatSymbols()
otherSymbols.setDecimalSeparator(',')
otherSymbols.setGroupingSeparator('.')
val decimalFormat = new DecimalFormat("#,###.##", otherSymbols)
Money(decimalFormat.parse(map(Amount)).floatValue())
}
private def getDate(map: Map[Key, String]): DateTime = {
DateTimeFormat.forPattern("yyyy-MM-dd").parseDateTime(map(TransactionDate))
}
}
| studiocredo/ticket-reservation | app/be/studiocredo/account/AXATransactionImporter.scala | Scala | apache-2.0 | 3,950 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import uk.gov.hmrc.ct.accounts.{MockFrs102AccountsRetriever, AccountsMoneyValidationFixture}
import uk.gov.hmrc.ct.accounts.frs102.retriever.Frs102AccountsBoxRetriever
class AC59Spec extends AccountsMoneyValidationFixture[Frs102AccountsBoxRetriever] with MockFrs102AccountsRetriever {
testAccountsMoneyValidationWithMin("AC59", 0, AC59.apply)
}
| pncampbell/ct-calculations | src/test/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC59Spec.scala | Scala | apache-2.0 | 1,001 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v3.retriever
import uk.gov.hmrc.ct.accounts.retriever.AccountsBoxRetriever
import uk.gov.hmrc.ct.box.retriever.BoxRetriever
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
import uk.gov.hmrc.ct.ct600.v3._
import uk.gov.hmrc.ct.ct600a.v3.retriever.CT600ABoxRetriever
import uk.gov.hmrc.ct.ct600e.v3.B115
import uk.gov.hmrc.ct.ct600e.v3.retriever.CT600EBoxRetriever
import uk.gov.hmrc.ct.ct600j.v3.B140
abstract class AboutThisReturnBoxRetriever(val accountsBoxRetriever: AccountsBoxRetriever,
val computationsBoxRetriever: Option[ComputationsBoxRetriever],
val ct600EBoxRetriever: Option[CT600EBoxRetriever],
val ct600ABoxRetriever: Option[CT600ABoxRetriever]) extends BoxRetriever {
def b30(): B30 = (computationsBoxRetriever.map ( br => B30(br.cp1()) ) orElse
ct600EBoxRetriever.map ( br => B30(br.e3())))
.getOrElse(throw new IllegalStateException(s"This box retriever [$this] does not have an AP start date."))
def b35(): B35 = (computationsBoxRetriever.map ( br => B35(br.cp2()) ) orElse
ct600EBoxRetriever.map ( br => B35(br.e4())))
.getOrElse(throw new IllegalStateException(s"This box retriever [$this] does not have an AP end date."))
def b40(): B40
def b45(): B45 = B45.calculate(this)
def b45Input(): B45Input
def b50(): B50 = B50.calculate(accountsBoxRetriever)
def b55(): B55
def b65(): B65
def b80A(): B80A
def b85A(): B85A
def b90A(): B90A
def b95(): B95 = ct600ABoxRetriever.map ( br => B95(br.lpq01())).getOrElse(B95(false))
def b115(): B115 = B115(ct600EBoxRetriever.nonEmpty)
def b140(): B140 = B140(b65())
}
| liquidarmour/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600/v3/retriever/AboutThisReturnBoxRetriever.scala | Scala | apache-2.0 | 2,436 |
package lifted
import scala.virtualization.lms.common._
import scala.virtualization.lms.internal.ScalaCompile
import scala.virtualization.lms.util.OverloadHack
import ch.epfl.yinyang.api._
import scala.tools.nsc._
import scala.tools.nsc.util._
import scala.tools.nsc.reporters._
import scala.tools.nsc.io._
import scala.tools.nsc.interpreter.AbstractFileClassLoader
import java.io._
trait LMSYinYang extends BaseYinYang with BaseExp { self =>
case class Hole[+T: Manifest](symId: Long) extends Def[T]
implicit def liftAny[T: Manifest]: LiftEvidence[T, Rep[T]] =
new LiftEvidence[T, Rep[T]] {
def lift(v: T): Rep[T] = unit(v)
def hole(m: Manifest[Any], symId: Int): Rep[T] = toAtom(Hole(symId))
}
def requiredHoles = Nil
}
trait ScalaDSL extends ScalaOpsPkg with ScalaOpsPkgExp with LMSYinYang with CodeGenerator
with ScalaCompile { self =>
def main(): Any
val codegen = new ScalaCodeGenPkg {
val IR: self.type = self
override def emitNode(sym: Sym[Any], rhs: Def[Any]): Unit = rhs match {
case Hole(x) =>
case _ => super.emitNode(sym, rhs)
}
def emitSourceYinYang[T: Manifest](f: Exp[T], className: String, stream: PrintWriter): List[(Sym[Any], Any)] = {
val body = reifyBlock(f)
val syms: List[Sym[_]] = focusBlock(body) {
innerScope flatMap {
case TP(sym, rhs) =>
rhs match {
case Hole(x) => scala.List(sym)
case _ => Nil
}
case _ => Nil
}
}
emitSource(syms, body, className, stream)
}
}
/*
* Ret must be Nothing* => T. If I was only smarter to make this work without a convention :/
*/
def compile[T: TypeTag, Ret] = {
if (this.compiler eq null)
setupCompiler()
val className = "staged$" + compileCount
compileCount += 1
val source = new StringWriter()
codegen.emitSourceYinYang(main.asInstanceOf[Exp[T]], className, new PrintWriter(source))
val compiler = this.compiler
val run = new compiler.Run
val fileSystem = new VirtualDirectory("<vfs>", None)
compiler.settings.outputDirs.setSingleOutput(fileSystem)
Predef.println(source)
run.compileSources(scala.List(new util.BatchSourceFile("<stdin>", source.toString)))
reporter.printSummary()
reporter.reset
val parent = this.getClass.getClassLoader
val loader = new AbstractFileClassLoader(fileSystem, this.getClass.getClassLoader)
val cls: Class[_] = loader.loadClass(className)
cls.getConstructor().newInstance().asInstanceOf[Ret]
}
def interpret[T: TypeTag](params: Nothing*): T = {
params.length match {
case 0 =>
compile[T, () => T].apply
case 1 =>
compile[T, Any => T].apply(params(0))
case 2 =>
compile[T, (Any, Any) => T].apply(params(0), params(1))
}
}
}
import ppl.dsl.optiml._
trait OptiML extends OptiMLApplicationRunner with LMSYinYang with Interpreted {
def mainDelite(): Any
override def main(): Unit = ???
def interpret[T: Manifest](params: Any*) = 0.asInstanceOf[T]
type Boolean = scala.Boolean
type Int = scala.Int
type Unit = scala.Unit
type Nothing = scala.Nothing
type Any = scala.Any
}
import ppl.dsl.optigraph._
abstract class OptiGraph extends OptiGraphApplicationRunner with LMSYinYang with Interpreted {
def mainDelite(): Any
override def main(): Unit = ???
def interpret[T: Manifest](params: Any*) = 0.asInstanceOf[T]
/* This rewireing is special for OptiGraph since it keeps the
Types locked in the Jar.*/
type Long = scala.Long
type Int = scala.Int
type Float = scala.Float
type Double = scala.Double
type Boolean = scala.Boolean
type String = Predef.String
type Unit = scala.Unit
type Nothing = scala.Nothing
type Any = scala.Any
type Array[T] = scala.Array[T]
type Graph = ppl.dsl.optigraph.Graph
type Node = ppl.dsl.optigraph.Node
type Edge = ppl.dsl.optigraph.Edge
type GSet[T] = ppl.dsl.optigraph.GSet[T]
type GSeq[T] = ppl.dsl.optigraph.GSeq[T]
type GOrder[T] = ppl.dsl.optigraph.GOrder[T]
type GIterable[T] = ppl.dsl.optigraph.GIterable[T]
type Deferrable[T] = ppl.dsl.optigraph.Deferrable[T]
type Reduceable[T] = ppl.dsl.optigraph.Reduceable[T]
type NodeProperty[T] = ppl.dsl.optigraph.NodeProperty[T]
type EdgeProperty[T] = ppl.dsl.optigraph.EdgeProperty[T]
implicit def fixClosureContravariance[T](v: Rep[shallow.optigraph.Node] => Rep[T]) =
v.asInstanceOf[Rep[ppl.dsl.optigraph.Node] => Rep[T]]
implicit def fixOverloaded(x: Rep[Overloaded1]) = null.asInstanceOf[Overloaded1]
override implicit def repNodeToNodeOps(n: Rep[Node]) = new NodeOpsCls(n)
implicit val ManifestFactory = scala.reflect.ManifestFactory
implicit val IntIsIntegral = scala.math.Numeric.IntIsIntegral
object Numeric {
val IntIsIntegral = scala.math.Numeric.IntIsIntegral
val DoubleIsFractional = scala.math.Numeric.DoubleIsFractional
}
val DoubleIsFractional = scala.math.Numeric.DoubleIsFractional
object Overloaded1 extends Overloaded1
}
| vjovanov/scala-yinyang | components/delite-test/src/lifted/ScalaPkgTest.scala | Scala | bsd-3-clause | 5,066 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import scala.collection.mutable.ArrayBuffer
import org.apache.commons.lang3.StringUtils
import org.apache.hadoop.fs.{BlockLocation, FileStatus, LocatedFileStatus, Path}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.{InternalRow, TableIdentifier}
import org.apache.spark.sql.catalyst.catalog.BucketSpec
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.physical.{HashPartitioning, Partitioning, UnknownPartitioning}
import org.apache.spark.sql.execution.datasources._
import org.apache.spark.sql.execution.datasources.parquet.{ParquetFileFormat => ParquetSource}
import org.apache.spark.sql.execution.metric.SQLMetrics
import org.apache.spark.sql.sources.{BaseRelation, Filter}
import org.apache.spark.sql.types.StructType
import org.apache.spark.util.Utils
trait DataSourceScanExec extends LeafExecNode with CodegenSupport {
val relation: BaseRelation
val tableIdentifier: Option[TableIdentifier]
protected val nodeNamePrefix: String = ""
override val nodeName: String = {
s"Scan $relation ${tableIdentifier.map(_.unquotedString).getOrElse("")}"
}
// Metadata that describes more details of this scan.
protected def metadata: Map[String, String]
override def simpleString: String = {
val metadataEntries = metadata.toSeq.sorted.map {
case (key, value) =>
key + ": " + StringUtils.abbreviate(redact(value), 100)
}
val metadataStr = Utils.truncatedString(metadataEntries, " ", ", ", "")
s"$nodeNamePrefix$nodeName${Utils.truncatedString(output, "[", ",", "]")}$metadataStr"
}
override def verboseString: String = redact(super.verboseString)
override def treeString(verbose: Boolean, addSuffix: Boolean): String = {
redact(super.treeString(verbose, addSuffix))
}
/**
* Shorthand for calling redactString() without specifying redacting rules
*/
private def redact(text: String): String = {
Utils.redact(sqlContext.sessionState.conf.stringRedationPattern, text)
}
}
/** Physical plan node for scanning data from a relation. */
case class RowDataSourceScanExec(
fullOutput: Seq[Attribute],
requiredColumnsIndex: Seq[Int],
filters: Set[Filter],
handledFilters: Set[Filter],
rdd: RDD[InternalRow],
@transient relation: BaseRelation,
override val tableIdentifier: Option[TableIdentifier])
extends DataSourceScanExec {
def output: Seq[Attribute] = requiredColumnsIndex.map(fullOutput)
override lazy val metrics =
Map("numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
protected override def doExecute(): RDD[InternalRow] = {
val numOutputRows = longMetric("numOutputRows")
rdd.mapPartitionsWithIndexInternal { (index, iter) =>
val proj = UnsafeProjection.create(schema)
proj.initialize(index)
iter.map( r => {
numOutputRows += 1
proj(r)
})
}
}
override def inputRDDs(): Seq[RDD[InternalRow]] = {
rdd :: Nil
}
override protected def doProduce(ctx: CodegenContext): String = {
val numOutputRows = metricTerm(ctx, "numOutputRows")
// PhysicalRDD always just has one input
val input = ctx.addMutableState("scala.collection.Iterator", "input", v => s"$v = inputs[0];")
val exprRows = output.zipWithIndex.map{ case (a, i) =>
BoundReference(i, a.dataType, a.nullable)
}
val row = ctx.freshName("row")
ctx.INPUT_ROW = row
ctx.currentVars = null
val columnsRowInput = exprRows.map(_.genCode(ctx))
s"""
|while ($input.hasNext()) {
| InternalRow $row = (InternalRow) $input.next();
| $numOutputRows.add(1);
| ${consume(ctx, columnsRowInput).trim}
| if (shouldStop()) return;
|}
""".stripMargin
}
override val metadata: Map[String, String] = {
val markedFilters = for (filter <- filters) yield {
if (handledFilters.contains(filter)) s"*$filter" else s"$filter"
}
Map(
"ReadSchema" -> output.toStructType.catalogString,
"PushedFilters" -> markedFilters.mkString("[", ", ", "]"))
}
// Don't care about `rdd` and `tableIdentifier` when canonicalizing.
override def doCanonicalize(): SparkPlan =
copy(
fullOutput.map(QueryPlan.normalizeExprId(_, fullOutput)),
rdd = null,
tableIdentifier = None)
}
/**
* Physical plan node for scanning data from HadoopFsRelations.
*
* @param relation The file-based relation to scan.
* @param output Output attributes of the scan, including data attributes and partition attributes.
* @param requiredSchema Required schema of the underlying relation, excluding partition columns.
* @param partitionFilters Predicates to use for partition pruning.
* @param dataFilters Filters on non-partition columns.
* @param tableIdentifier identifier for the table in the metastore.
*/
case class FileSourceScanExec(
@transient relation: HadoopFsRelation,
output: Seq[Attribute],
requiredSchema: StructType,
partitionFilters: Seq[Expression],
dataFilters: Seq[Expression],
override val tableIdentifier: Option[TableIdentifier])
extends DataSourceScanExec with ColumnarBatchScan {
override val supportsBatch: Boolean = relation.fileFormat.supportBatch(
relation.sparkSession, StructType.fromAttributes(output))
override val needsUnsafeRowConversion: Boolean = {
if (relation.fileFormat.isInstanceOf[ParquetSource]) {
SparkSession.getActiveSession.get.sessionState.conf.parquetVectorizedReaderEnabled
} else {
false
}
}
override def vectorTypes: Option[Seq[String]] =
relation.fileFormat.vectorTypes(
requiredSchema = requiredSchema,
partitionSchema = relation.partitionSchema,
relation.sparkSession.sessionState.conf)
@transient private lazy val selectedPartitions: Seq[PartitionDirectory] = {
val optimizerMetadataTimeNs = relation.location.metadataOpsTimeNs.getOrElse(0L)
val startTime = System.nanoTime()
val ret = relation.location.listFiles(partitionFilters, dataFilters)
val timeTakenMs = ((System.nanoTime() - startTime) + optimizerMetadataTimeNs) / 1000 / 1000
metrics("numFiles").add(ret.map(_.files.size.toLong).sum)
metrics("metadataTime").add(timeTakenMs)
val executionId = sparkContext.getLocalProperty(SQLExecution.EXECUTION_ID_KEY)
SQLMetrics.postDriverMetricUpdates(sparkContext, executionId,
metrics("numFiles") :: metrics("metadataTime") :: Nil)
ret
}
override val (outputPartitioning, outputOrdering): (Partitioning, Seq[SortOrder]) = {
val bucketSpec = if (relation.sparkSession.sessionState.conf.bucketingEnabled) {
relation.bucketSpec
} else {
None
}
bucketSpec match {
case Some(spec) =>
// For bucketed columns:
// -----------------------
// `HashPartitioning` would be used only when:
// 1. ALL the bucketing columns are being read from the table
//
// For sorted columns:
// ---------------------
// Sort ordering should be used when ALL these criteria's match:
// 1. `HashPartitioning` is being used
// 2. A prefix (or all) of the sort columns are being read from the table.
//
// Sort ordering would be over the prefix subset of `sort columns` being read
// from the table.
// eg.
// Assume (col0, col2, col3) are the columns read from the table
// If sort columns are (col0, col1), then sort ordering would be considered as (col0)
// If sort columns are (col1, col0), then sort ordering would be empty as per rule #2
// above
def toAttribute(colName: String): Option[Attribute] =
output.find(_.name == colName)
val bucketColumns = spec.bucketColumnNames.flatMap(n => toAttribute(n))
if (bucketColumns.size == spec.bucketColumnNames.size) {
val partitioning = HashPartitioning(bucketColumns, spec.numBuckets)
val sortColumns =
spec.sortColumnNames.map(x => toAttribute(x)).takeWhile(x => x.isDefined).map(_.get)
val sortOrder = if (sortColumns.nonEmpty) {
// In case of bucketing, its possible to have multiple files belonging to the
// same bucket in a given relation. Each of these files are locally sorted
// but those files combined together are not globally sorted. Given that,
// the RDD partition will not be sorted even if the relation has sort columns set
// Current solution is to check if all the buckets have a single file in it
val files = selectedPartitions.flatMap(partition => partition.files)
val bucketToFilesGrouping =
files.map(_.getPath.getName).groupBy(file => BucketingUtils.getBucketId(file))
val singleFilePartitions = bucketToFilesGrouping.forall(p => p._2.length <= 1)
if (singleFilePartitions) {
// TODO Currently Spark does not support writing columns sorting in descending order
// so using Ascending order. This can be fixed in future
sortColumns.map(attribute => SortOrder(attribute, Ascending))
} else {
Nil
}
} else {
Nil
}
(partitioning, sortOrder)
} else {
(UnknownPartitioning(0), Nil)
}
case _ =>
(UnknownPartitioning(0), Nil)
}
}
@transient
private val pushedDownFilters = dataFilters.flatMap(DataSourceStrategy.translateFilter)
logInfo(s"Pushed Filters: ${pushedDownFilters.mkString(",")}")
override val metadata: Map[String, String] = {
def seqToString(seq: Seq[Any]) = seq.mkString("[", ", ", "]")
val location = relation.location
val locationDesc =
location.getClass.getSimpleName + seqToString(location.rootPaths)
val metadata =
Map(
"Format" -> relation.fileFormat.toString,
"ReadSchema" -> requiredSchema.catalogString,
"Batched" -> supportsBatch.toString,
"PartitionFilters" -> seqToString(partitionFilters),
"PushedFilters" -> seqToString(pushedDownFilters),
"Location" -> locationDesc)
val withOptPartitionCount =
relation.partitionSchemaOption.map { _ =>
metadata + ("PartitionCount" -> selectedPartitions.size.toString)
} getOrElse {
metadata
}
withOptPartitionCount
}
private lazy val inputRDD: RDD[InternalRow] = {
val readFile: (PartitionedFile) => Iterator[InternalRow] =
relation.fileFormat.buildReaderWithPartitionValues(
sparkSession = relation.sparkSession,
dataSchema = relation.dataSchema,
partitionSchema = relation.partitionSchema,
requiredSchema = requiredSchema,
filters = pushedDownFilters,
options = relation.options,
hadoopConf = relation.sparkSession.sessionState.newHadoopConfWithOptions(relation.options))
relation.bucketSpec match {
case Some(bucketing) if relation.sparkSession.sessionState.conf.bucketingEnabled =>
createBucketedReadRDD(bucketing, readFile, selectedPartitions, relation)
case _ =>
createNonBucketedReadRDD(readFile, selectedPartitions, relation)
}
}
override def inputRDDs(): Seq[RDD[InternalRow]] = {
inputRDD :: Nil
}
override lazy val metrics =
Map("numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"),
"numFiles" -> SQLMetrics.createMetric(sparkContext, "number of files"),
"metadataTime" -> SQLMetrics.createMetric(sparkContext, "metadata time (ms)"),
"scanTime" -> SQLMetrics.createTimingMetric(sparkContext, "scan time"))
protected override def doExecute(): RDD[InternalRow] = {
if (supportsBatch) {
// in the case of fallback, this batched scan should never fail because of:
// 1) only primitive types are supported
// 2) the number of columns should be smaller than spark.sql.codegen.maxFields
WholeStageCodegenExec(this)(codegenStageId = 0).execute()
} else {
val numOutputRows = longMetric("numOutputRows")
if (needsUnsafeRowConversion) {
inputRDD.mapPartitionsWithIndexInternal { (index, iter) =>
val proj = UnsafeProjection.create(schema)
proj.initialize(index)
iter.map( r => {
numOutputRows += 1
proj(r)
})
}
} else {
inputRDD.map { r =>
numOutputRows += 1
r
}
}
}
}
override val nodeNamePrefix: String = "File"
/**
* Create an RDD for bucketed reads.
* The non-bucketed variant of this function is [[createNonBucketedReadRDD]].
*
* The algorithm is pretty simple: each RDD partition being returned should include all the files
* with the same bucket id from all the given Hive partitions.
*
* @param bucketSpec the bucketing spec.
* @param readFile a function to read each (part of a) file.
* @param selectedPartitions Hive-style partition that are part of the read.
* @param fsRelation [[HadoopFsRelation]] associated with the read.
*/
private def createBucketedReadRDD(
bucketSpec: BucketSpec,
readFile: (PartitionedFile) => Iterator[InternalRow],
selectedPartitions: Seq[PartitionDirectory],
fsRelation: HadoopFsRelation): RDD[InternalRow] = {
logInfo(s"Planning with ${bucketSpec.numBuckets} buckets")
val bucketed =
selectedPartitions.flatMap { p =>
p.files.map { f =>
val hosts = getBlockHosts(getBlockLocations(f), 0, f.getLen)
PartitionedFile(p.values, f.getPath.toUri.toString, 0, f.getLen, hosts)
}
}.groupBy { f =>
BucketingUtils
.getBucketId(new Path(f.filePath).getName)
.getOrElse(sys.error(s"Invalid bucket file ${f.filePath}"))
}
val filePartitions = Seq.tabulate(bucketSpec.numBuckets) { bucketId =>
FilePartition(bucketId, bucketed.getOrElse(bucketId, Nil))
}
new FileScanRDD(fsRelation.sparkSession, readFile, filePartitions)
}
/**
* Create an RDD for non-bucketed reads.
* The bucketed variant of this function is [[createBucketedReadRDD]].
*
* @param readFile a function to read each (part of a) file.
* @param selectedPartitions Hive-style partition that are part of the read.
* @param fsRelation [[HadoopFsRelation]] associated with the read.
*/
private def createNonBucketedReadRDD(
readFile: (PartitionedFile) => Iterator[InternalRow],
selectedPartitions: Seq[PartitionDirectory],
fsRelation: HadoopFsRelation): RDD[InternalRow] = {
val defaultMaxSplitBytes =
fsRelation.sparkSession.sessionState.conf.filesMaxPartitionBytes
val openCostInBytes = fsRelation.sparkSession.sessionState.conf.filesOpenCostInBytes
val defaultParallelism = fsRelation.sparkSession.sparkContext.defaultParallelism
val totalBytes = selectedPartitions.flatMap(_.files.map(_.getLen + openCostInBytes)).sum
val bytesPerCore = totalBytes / defaultParallelism
val maxSplitBytes = Math.min(defaultMaxSplitBytes, Math.max(openCostInBytes, bytesPerCore))
logInfo(s"Planning scan with bin packing, max size: $maxSplitBytes bytes, " +
s"open cost is considered as scanning $openCostInBytes bytes.")
val splitFiles = selectedPartitions.flatMap { partition =>
partition.files.flatMap { file =>
val blockLocations = getBlockLocations(file)
if (fsRelation.fileFormat.isSplitable(
fsRelation.sparkSession, fsRelation.options, file.getPath)) {
(0L until file.getLen by maxSplitBytes).map { offset =>
val remaining = file.getLen - offset
val size = if (remaining > maxSplitBytes) maxSplitBytes else remaining
val hosts = getBlockHosts(blockLocations, offset, size)
PartitionedFile(
partition.values, file.getPath.toUri.toString, offset, size, hosts)
}
} else {
val hosts = getBlockHosts(blockLocations, 0, file.getLen)
Seq(PartitionedFile(
partition.values, file.getPath.toUri.toString, 0, file.getLen, hosts))
}
}
}.toArray.sortBy(_.length)(implicitly[Ordering[Long]].reverse)
val partitions = new ArrayBuffer[FilePartition]
val currentFiles = new ArrayBuffer[PartitionedFile]
var currentSize = 0L
/** Close the current partition and move to the next. */
def closePartition(): Unit = {
if (currentFiles.nonEmpty) {
val newPartition =
FilePartition(
partitions.size,
currentFiles.toArray.toSeq) // Copy to a new Array.
partitions += newPartition
}
currentFiles.clear()
currentSize = 0
}
// Assign files to partitions using "Next Fit Decreasing"
splitFiles.foreach { file =>
if (currentSize + file.length > maxSplitBytes) {
closePartition()
}
// Add the given file to the current partition.
currentSize += file.length + openCostInBytes
currentFiles += file
}
closePartition()
new FileScanRDD(fsRelation.sparkSession, readFile, partitions)
}
private def getBlockLocations(file: FileStatus): Array[BlockLocation] = file match {
case f: LocatedFileStatus => f.getBlockLocations
case f => Array.empty[BlockLocation]
}
// Given locations of all blocks of a single file, `blockLocations`, and an `(offset, length)`
// pair that represents a segment of the same file, find out the block that contains the largest
// fraction the segment, and returns location hosts of that block. If no such block can be found,
// returns an empty array.
private def getBlockHosts(
blockLocations: Array[BlockLocation], offset: Long, length: Long): Array[String] = {
val candidates = blockLocations.map {
// The fragment starts from a position within this block
case b if b.getOffset <= offset && offset < b.getOffset + b.getLength =>
b.getHosts -> (b.getOffset + b.getLength - offset).min(length)
// The fragment ends at a position within this block
case b if offset <= b.getOffset && offset + length < b.getLength =>
b.getHosts -> (offset + length - b.getOffset).min(length)
// The fragment fully contains this block
case b if offset <= b.getOffset && b.getOffset + b.getLength <= offset + length =>
b.getHosts -> b.getLength
// The fragment doesn't intersect with this block
case b =>
b.getHosts -> 0L
}.filter { case (hosts, size) =>
size > 0L
}
if (candidates.isEmpty) {
Array.empty[String]
} else {
val (hosts, _) = candidates.maxBy { case (_, size) => size }
hosts
}
}
override def doCanonicalize(): FileSourceScanExec = {
FileSourceScanExec(
relation,
output.map(QueryPlan.normalizeExprId(_, output)),
requiredSchema,
QueryPlan.normalizePredicates(partitionFilters, output),
QueryPlan.normalizePredicates(dataFilters, output),
None)
}
}
| ddna1021/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/DataSourceScanExec.scala | Scala | apache-2.0 | 20,095 |
package xyz.hyperreal.prolog
import xyz.hyperreal.bvm.VM
import xyz.hyperreal.char_reader.CharReader
import scala.collection.mutable
object Compilation {
var debug = false
val reserved =
Set(
indicator("true", 0),
indicator("fail", 0),
indicator("false", 0),
indicator("repeat", 0),
indicator("is", 2)
)
def compile(ast: PrologAST, prog: Program): Unit = {
phase1(ast, prog)
phase2(prog)
}
def phase1(ast: PrologAST, prog: Program): Unit =
ast match {
case SourceAST(clauses) => clauses foreach (phase1(_, prog))
case ClauseAST(StructureAST(r, ":-", List(StructureAST(r1, "import", List(AtomAST(_, name)))))) =>
prog.loadResource(name)
case ClauseAST(StructureAST(r, ":-", List(StructureAST(r1, "import", List(StringAST(_, name)))))) =>
prog.loadResource(name)
case ClauseAST(clause @ StructureAST(r, ":-", List(head @ StructureAST(h, name, args), body))) =>
val f = indicator(name, args.length)
if (Builtin.exists(f) || Math.exists(f) || reserved(f))
r.error(s"builtin procedure '$f' can't be redefined")
prog.clause(f, clause, head, body)
case ClauseAST(clause @ StructureAST(r, ":-", List(head @ AtomAST(h, name), body))) =>
val f = indicator(name, 0)
if (Builtin.exists(f) || Math.exists(f) || reserved(f))
r.error(s"builtin procedure '$f' can't be redefined")
prog.clause(f, clause, head, body)
case ClauseAST(clause @ StructureAST(r, name, args)) =>
val f = indicator(name, args.length)
if (Builtin.exists(f) || Math.exists(f) || reserved(f))
r.error(s"builtin procedure '$f' can't be redefined")
prog.clause(f, clause, clause, TRUE)
case ClauseAST(clause @ AtomAST(r, name)) =>
val f = indicator(name, 0)
if (Builtin.exists(f) || Math.exists(f) || reserved(f))
r.error(s"builtin procedure '$f' can't be redefined")
prog.clause(f, clause, clause, TRUE)
}
def phase2(implicit prog: Program): Unit =
prog.procedures foreach {
case proc @ Procedure(f, block, pub, clauses) if (block == null || block.length == 0) && (clauses.isEmpty || clauses.head.block.length == 0) =>
if (!pub)
proc.block = prog.block(f.toString)
var jumpblock: Block = null
var jumpidx = 0
for ((c, i) <- clauses.init.zipWithIndex) {
if (pub)
prog.block(s"$f ${i + 1}")
if (jumpblock ne null)
jumpblock(jumpidx) = JumpInst(c.block)
prog.patch((ptr, len) => CutChoiceInst(len - ptr - 1)) {
compileClause(c.ast)
}
jumpblock = c.block
jumpidx = c.block.length
prog += null
}
if (pub)
prog.block(s"$f ${clauses.length}")
if (jumpblock ne null)
jumpblock(jumpidx) = JumpInst(clauses.last.block)
compileClause(clauses.last.ast)
case _ =>
}
def dbg(msg: String, pos: CharReader)(implicit prog: Program): Any =
if (debug)
prog += DebugInst(msg, pos)
def compileClause(ast: TermAST)(implicit prog: Program): Int = {
implicit val vars: Vars = new Vars
ast match {
case StructureAST(r, ":-", List(StructureAST(pos, f, args), body)) =>
dbg(s"rule $f/${args.length}", pos)
prog.patch((_, _) => FrameInst(vars.count)) {
args.reverse foreach compileHead
compileGoal(body, prog)
}
prog += ReturnInst
case StructureAST(r, ":-", List(AtomAST(pos, n), body)) =>
dbg(s"rule $n/0", pos)
prog.patch((_, _) => FrameInst(vars.count)) {
compileGoal(body, prog)
}
prog += ReturnInst
case StructureAST(r, f, args) =>
dbg(s"fact $f/${args.length}", r)
prog.patch((_, _) => FrameInst(vars.count)) {
args.reverse foreach compileHead
}
prog += ReturnInst
case AtomAST(r, name) =>
dbg(s"fact $name/0", r)
prog += FrameInst(0)
prog += ReturnInst
}
vars.count
}
def compileClause(term: Any)(implicit prog: Program): Int = {
implicit val vars: Vars = new Vars
term match {
case Structure(Indicator(Symbol(":-"), 2), Array(Structure(f, args), body)) =>
prog.patch((_, _) => FrameInst(vars.count)) {
args.reverse foreach compileHead
compileGoal(body, prog)
}
prog += ReturnInst
case StructureAST(r, ":-", List(AtomAST(pos, n), body)) =>
prog.patch((_, _) => FrameInst(vars.count)) {
compileGoal(body, prog)
}
prog += ReturnInst
case StructureAST(r, f, args) =>
prog.patch((_, _) => FrameInst(vars.count)) {
args.reverse foreach compileHead
}
prog += ReturnInst
case AtomAST(r, name) =>
prog += FrameInst(0)
prog += ReturnInst
}
vars.count
}
def compileHead(term: TermAST)(implicit prog: Program, vars: Vars): Unit =
term match {
// case TupleStructureAST( _, args ) =>
// code += TypeCheckInst( struc, pos )
//
// args.zipWithIndex foreach {
// case (e, i) =>
// if (i < args.length - 1)
// code += DupInst
//
// code += TupleElementInst( i )
// compileHead( e, pos, namespaces )
// }
// case ListStructureAST( _, l ) =>
// code += TypeCheckInst( term, pos )
//
// l foreach { e =>
// code += DupInst
// code += EmptyInst
// code += BranchIfNotInst( 1 )
// code += FailInst
// code += DupInst
// code += ListHeadInst
// compileHead( e, pos, namespaces )
// code += ListTailInst
// }
//
// code += EmptyInst
// code += BranchIfInst( 1 )
// code += FailInst
// case ConsStructureAST( _, head, tail ) =>
// code += TypeCheckInst( term, pos )
// code += DupInst
// code += ListHeadInst
// compileHead( head, pos, namespaces )
// code += ListTailInst
// compileHead( tail, pos, namespaces )
case AtomAST(r, "[]") =>
dbg("get nil", r)
prog += NilUnifyInst
case AtomAST(r, n) =>
dbg(s"get atom $n", r)
prog += PushInst(Symbol(n))
prog += UnifyInst
case AnonymousAST(r) =>
dbg("get anonymous", r)
prog += DropInst
case VariableAST(r, name) =>
dbg(s"get variable $name", r)
prog += VarUnifyInst(vars.num(name))
case StructureAST(r, name, args) =>
dbg(s"get structure $name/${args.length}", r)
prog += FunctorInst(Indicator(Symbol(name), args.length))
args.zipWithIndex foreach {
case (e, i) =>
dbg(s"get arg $i", e.pos)
if (i < args.length - 1)
prog += DupInst
compileTerm(e)
prog += ElementUnifyInst(i)
}
// case AlternationStructureAST( l ) =>
// val jumps = new ArrayBuffer[Int]
//
// for (s <- l.init) {
// val backptr = code.length
//
// code += null
// compileHead( s, pos, namespaces )
// jumps += code.length
// code += null
// code(backptr) = ChoiceInst( code.length - backptr - 1 )
// }
//
// compileHead( l.last, pos, namespaces )
//
// for (b <- jumps)
// code(b) = BranchInst( code.length - b - 1 )
case n: NumericAST =>
dbg(s"get number ${n.v}", n.pos)
prog += PushInst(n.v)
prog += UnifyInst
}
def compileHead(term: Any)(implicit prog: Program, vars: Vars): Unit =
term match {
case AtomAST(r, "[]") =>
dbg("get nil", r)
prog += NilUnifyInst
case AtomAST(r, n) =>
dbg(s"get atom $n", r)
prog += PushInst(Symbol(n))
prog += UnifyInst
case AnonymousAST(r) =>
dbg("get anonymous", r)
prog += DropInst
case VariableAST(r, name) =>
dbg(s"get variable $name", r)
prog += VarUnifyInst(vars.num(name))
case StructureAST(r, name, args) =>
dbg(s"get structure $name/${args.length}", r)
prog += FunctorInst(Indicator(Symbol(name), args.length))
args.zipWithIndex foreach {
case (e, i) =>
dbg(s"get arg $i", e.pos)
if (i < args.length - 1)
prog += DupInst
compileTerm(e)
prog += ElementUnifyInst(i)
}
case n: NumericAST =>
dbg(s"get number ${n.v}", n.pos)
prog += PushInst(n.v)
prog += UnifyInst
}
def ground(term: TermAST): Boolean =
term match {
case StructureAST(_, _, args) => args forall ground
case AtomAST(_, _) | _: NumericAST => true
case AnonymousAST(_) | VariableAST(_, _) => false
}
def toTerm(term: TermAST): Any =
term match {
case StructureAST(_, name, args) => Structure(indicator(name, args.length), args map toTerm toArray)
case AtomAST(_, name) => Symbol(name)
case n: NumericAST => n.v
case AnonymousAST(_) | VariableAST(_, _) => false
}
def compileTerm(term: TermAST)(implicit prog: Program, vars: Vars): Unit =
term match {
case s: StructureAST if ground(s) =>
dbg(s"put structure", s.pos)
prog += PushInst(toTerm(s))
case StructureAST(r, name, args) =>
dbg(s"put structure", r)
args foreach compileTerm
prog += StructureInst(indicator(name, args.length))
case AtomAST(r, name) =>
dbg("put atom", r)
prog += PushInst(Symbol(name))
case AnonymousAST(r) =>
dbg("put anonymous", r)
prog += PushVarInst(vars.anon)
case VariableAST(r, name) =>
dbg("put variable", r)
prog += PushVarInst(vars.num(name))
case n: NumericAST =>
dbg("put number", n.pos)
prog += PushInst(n.v)
case StringAST(r, s) =>
dbg("put string", r)
prog += PushInst(s)
}
def compileArithmetic(expr: TermAST)(implicit prog: Program, vars: Vars) = {
val seen = new mutable.HashMap[String, VariableAST]
val exprvars = new mutable.HashMap[String, (CharReader, Int, Int)]
def addvar(term: TermAST)(implicit vars: Vars): Unit =
term match {
case v @ VariableAST(_, name) if !seen.contains(name) =>
seen(name) = v
v.eval = true
case v @ VariableAST(r, name) =>
vars get name match {
case None => r.error(s"variable '$name' does not occur previously in the clause")
case Some(n) =>
seen(name).name += '\\''
seen(name).eval = false
v.name += '\\''
exprvars(name) = (r, n, vars.num(v.name))
}
case StructureAST(_, _, args) => args foreach addvar
case _ =>
}
addvar(expr)
for ((n, (r, v, v1)) <- exprvars if vars eval n) {
prog += EvalInst(r, n, v)
prog += VarUnifyInst(v1)
}
}
def compileExpression(expr: TermAST)(implicit prog: Program, vars: Vars): Unit =
expr match {
case x: NumericAST => prog += PushInst(x.v)
case v @ VariableAST(pos, name) if v.eval => prog += EvalInst(pos, name, vars.num(name))
case VariableAST(_, name) => prog += PushVarInst(vars.num(name))
case StructureAST(pos, op @ ("+" | "-" | "*" | "/" | "mod"), List(left, right)) =>
compileExpression(left)
compileExpression(right)
prog +=
(op match {
case "+" => AddInst
case "-" => SubInst
case "*" => MulInst
case "/" => DivInst
case "mod" => ModInst
})
case StructureAST(pos, op @ "-", List(arg)) =>
compileExpression(arg)
prog +=
(op match {
case "-" => NegInst
})
case StructureAST(_, name, args) if Math exists indicator(name, args.length) =>
val f = indicator(name, args.length)
args foreach compileExpression
prog += NativeInst(Math.function(f), Vector(), f, NATIVE_MATH)
case StructureAST(pos, name, args) => pos.error(s"function $name/${args.length} not found")
case AtomAST(_, name) if Math exists indicator(name, 0) =>
val f = indicator(name, 0)
prog += NativeInst(Math.function(f), Vector(), f, NATIVE_MATH)
case AtomAST(pos, name) => pos.error(s"constant or system value '$name' not found")
}
def compileGoal(ast: TermAST, lookup: Program)(implicit prog: Program, vars: Vars): Unit =
ast match {
case StructureAST(r1, ";", List(StructureAST(r, "->", List(goal1, goal2)), goal3)) =>
dbg(s"if-then-else", r)
prog.patch((ptr, len) => MarkInst(len - ptr)) { // need to skip over the branch
compileGoal(goal1, lookup)
prog += UnmarkInst
dbg(s"then part", r)
compileGoal(goal2, lookup)
}
prog.patch((ptr, len) => BranchInst(len - ptr - 1)) {
dbg(s"else part", r1)
compileGoal(goal3, lookup)
}
case StructureAST(r, "->", List(goal1, goal2)) =>
dbg(s"if-then", r)
prog.patch((ptr, len) => MarkInst(len - ptr + 1)) { // need to skip over the unmark/branch
compileGoal(goal1, lookup)
}
prog += UnmarkInst
prog += BranchInst(1)
prog += FailInst
dbg(s"then part", r)
compileGoal(goal2, lookup)
case StructureAST(r, "\\\\+", List(term @ (AtomAST(_, _) | StructureAST(_, _, _)))) =>
dbg(s"not provable", r)
prog.patch((ptr, len) => MarkInst(len - ptr + 1)) { // need to skip over the unmark/fail
compileGoal(term, lookup)
}
prog += UnmarkInst
prog += FailInst
case StructureAST(r, "var", List(term)) =>
dbg("var", r)
compileTerm(term)
prog += VarInst
case StructureAST(r, "nonvar", List(term)) =>
dbg("nonvar", r)
compileTerm(term)
prog += NonvarInst
case StructureAST(r, "call", List(term @ (AtomAST(_, _) | StructureAST(_, _, _)))) =>
dbg(s"call", r)
prog.patch((ptr, len) => MarkInst(len - ptr + 1)) { // need to skip over the unmark/fail
compileGoal(term, lookup)
}
prog += UnmarkInst
case StructureAST(r, "call", List(VariableAST(pos, name))) =>
dbg(s"call (compile)", r)
prog += PushFrameInst
prog += PushVarInst(vars.num(name))
prog += NativeInst(Runtime.compileCall, Vector(), Indicator(Symbol("$compileCall"), 0), NATIVE_RUNTIME)
prog += MarkInst(2)
prog += CallBlockInst
prog += UnmarkInst
case StructureAST(r, "call", List(arg)) => r.error(s"call: term should be callable: $arg")
case StructureAST(r, "once", List(term @ (AtomAST(_, _) | StructureAST(_, _, _)))) =>
dbg(s"once", r)
prog.patch((ptr, len) => MarkInst(len - ptr)) { // need to skip over the unmark
compileGoal(term, lookup)
}
prog += UnmarkInst
case StructureAST(r, "once", List(VariableAST(pos, name))) =>
dbg(s"once (compile)", r)
prog += PushFrameInst
prog += PushVarInst(vars.num(name))
prog += NativeInst(Runtime.compileCall, Vector(), Indicator(Symbol("$compileCall"), 0), NATIVE_RUNTIME)
prog += MarkInst(2)
prog += CallBlockInst
prog += UnmarkInst
case StructureAST(r, "once", List(arg)) => r.error(s"once: term should be callable: $arg")
case StructureAST(_, ",", List(left, right)) =>
compileGoal(left, lookup)
compileGoal(right, lookup)
case StructureAST(r, ";", List(left, right)) =>
dbg("disjunction", r)
prog.patch((ptr, len) => ChoiceInst(len - ptr)) { // need to skip over the branch
compileGoal(left, lookup)
}
prog.patch((ptr, len) => BranchInst(len - ptr - 1)) {
compileGoal(right, lookup)
}
case AtomAST(_, "true") => // no code to emit for true/0
case AtomAST(r, "false" | "fail") =>
dbg("fail", r)
prog += FailInst
case AtomAST(r, "!") =>
dbg("cut", r)
prog += CutInst
case AtomAST(r, "repeat") =>
dbg("repeat", r)
prog += ChoiceInst(-1)
case StructureAST(pos, "==", List(left, right)) =>
dbg("term equals", pos)
compileTerm(left)
compileTerm(right)
prog += TermEqInst
case StructureAST(pos, "\\\\==", List(left, right)) =>
dbg("term equals", pos)
prog.patch((ptr, len) => MarkInst(len - ptr + 1)) { // need to skip over the unmark/fail
compileTerm(left)
compileTerm(right)
prog += TermEqInst
}
prog += UnmarkInst
prog += FailInst
case StructureAST(pos, "@<", List(left, right)) =>
dbg("term less than", pos)
compileTerm(left)
compileTerm(right)
prog += TermLtInst
case StructureAST(pos, "@>", List(left, right)) =>
dbg("term equals", pos)
prog.patch((ptr, len) => MarkInst(len - ptr + 1)) { // need to skip over the unmark/fail
compileTerm(left)
compileTerm(right)
prog += TermLeInst
}
prog += UnmarkInst
prog += FailInst
case StructureAST(pos, "@=<", List(left, right)) =>
dbg("term less than or equal", pos)
compileTerm(left)
compileTerm(right)
prog += TermLeInst
case StructureAST(pos, "@>=", List(left, right)) =>
dbg("term greater than or equal", pos)
prog.patch((ptr, len) => MarkInst(len - ptr + 1)) { // need to skip over the unmark/fail
compileTerm(left)
compileTerm(right)
prog += TermLtInst
}
prog += UnmarkInst
prog += FailInst
case StructureAST(pos, "=", List(VariableAST(_, lname), right)) =>
dbg("unify", pos)
compileTerm(right)
prog += VarUnifyInst(vars.num(lname))
case StructureAST(pos, "=", List(left, VariableAST(_, rname))) =>
dbg("unify", pos)
compileTerm(left)
prog += VarUnifyInst(vars.num(rname))
case StructureAST(pos, "=", List(left, right)) =>
dbg("unify", pos)
compileTerm(left)
compileTerm(right)
prog += UnifyInst
case StructureAST(pos, "\\\\=", List(left, right)) =>
dbg("not unifiable", pos)
prog.patch((ptr, len) => MarkInst(len - ptr - 1)) {
compileTerm(left)
compileTerm(right)
prog += UnifyInst
prog += UnmarkInst
prog += FailInst
}
case StructureAST(pos, "is", List(VariableAST(_, lname), expr)) =>
compileArithmetic(expr)
compileExpression(expr)
prog += VarUnifyInst(vars.num(lname))
case StructureAST(_, "is", List(head, _)) => head.pos.error(s"variable was expected")
case StructureAST(pos, "=:=", List(left, right)) =>
compileArithmetic(ast)
compileExpression(left)
compileExpression(right)
prog += EqInst
case StructureAST(pos, "=\\\\=", List(left, right)) =>
compileArithmetic(ast)
compileExpression(left)
compileExpression(right)
prog += NeInst
case StructureAST(pos, "<", List(left, right)) =>
compileArithmetic(ast)
compileExpression(right)
compileExpression(left)
prog += LtInst
case StructureAST(pos, "=<", List(left, right)) =>
compileArithmetic(ast)
compileExpression(right)
compileExpression(left)
prog += LeInst
case StructureAST(pos, ">", List(left, right)) =>
compileArithmetic(ast)
compileExpression(right)
compileExpression(left)
prog += GtInst
case StructureAST(pos, ">=", List(left, right)) =>
compileArithmetic(ast)
compileExpression(right)
compileExpression(left)
prog += GeInst
case StructureAST(r, name, args) if lookup.defined(name, args.length) =>
val f = indicator(name, args.length)
dbg(s"procedure $f", r)
prog += PushFrameInst
args foreach compileTerm
prog += CallProcedureInst(lookup procedure f)
case StructureAST(r, name, args) if Builtin exists indicator(name, args.length) =>
val f = indicator(name, args.length)
dbg(s"built-in $f", r)
args foreach compileTerm
prog += NativeInst(Builtin.predicate(f), args map (_.pos) toVector, f, NATIVE_PREDICATE)
case StructureAST(pos, name, args) =>
val f = indicator(name, args.length)
dbg(s"call procedure (indirect) $f", pos)
prog += PushFrameInst
args foreach compileTerm
prog += CallIndirectInst(pos, f)
case AtomAST(r, name) if lookup.defined(name, 0) =>
val f = indicator(name, 0)
dbg(s"built-in $f", r)
prog += PushFrameInst
prog += CallProcedureInst(lookup procedure f)
case AtomAST(r, name) if Builtin exists indicator(name, 0) =>
val f = indicator(name, 0)
dbg(s"built-in $f", r)
prog += NativeInst(Builtin.predicate(f), Vector(), f, NATIVE_PREDICATE)
case AtomAST(r, name) =>
val f = indicator(name, 0)
dbg(s"procedure (indirect) $f", r)
prog += PushFrameInst
prog += CallIndirectInst(r, indicator(name, 0))
case _ => sys.error(s"illegal goal term: $ast")
}
def compileTerm(term: Any)(implicit prog: Program, vars: Vars): Unit =
term match {
case s: Structure if groundTerm(s) => prog += PushInst(s)
case Structure(f, args) =>
args foreach compileTerm
prog += StructureInst(f)
// case v: VM#Variable if v.name == "_" => prog += VarInst( vars.anon )
case v: VM#Variable => prog += PushInst(v) //VarInst( vars.num(v.name) )
case _ => prog += PushInst(term)
}
/*
def compileArithmetic( expr: Any )( implicit prog: Program, vars: Vars ) {
val seen = new mutable.HashMap[String, VariableAST]
val exprvars = new mutable.HashMap[String, (CharReader, Int, Int)]
def addvar( term: TermAST )( implicit vars: Vars ): Unit =
term match {
case v@VariableAST( _, name ) if !seen.contains(name) =>
seen(name) = v
v.eval = true
case v@VariableAST( r, name ) =>
vars get name match {
case None => r.error( s"variable '$name' does not occur previously in the clause" )
case Some( n ) =>
seen(name).name += '\\''
seen(name).eval = false
v.name += '\\''
exprvars(name) = (r, n, vars.num( v.name ))
}
case StructureAST( _, _, args ) => args foreach addvar
case _ =>
}
addvar( expr )
for ((n, (r, v, v1)) <- exprvars if vars eval n) {
prog += EvalInst( r, n, v )
prog += VarUnifyInst( v1 )
}
}
def compileExpression( expr: Any )( implicit prog: Program, vars: Vars ): Unit =
expr match {
case x: NumericAST => prog += PushInst( x.v )
case v@VariableAST( pos, name ) if v.eval => prog += EvalInst( pos, name, vars.num(name) )
case VariableAST( _, name ) => prog += VarInst( vars.num(name) )
case StructureAST( pos, op@("+"|"-"|"*"|"/"|"mod"), List(left, right) ) =>
compileExpression( left )
compileExpression( right )
prog +=
(op match {
case "+" => AddInst
case "-" => SubInst
case "*" => MulInst
case "/" => DivInst
case "mod" => ModInst
})
case StructureAST( pos, op@"-", List(arg) ) =>
compileExpression( arg )
prog +=
(op match {
case "-" => NegInst
})
case StructureAST( _, name, args ) if Math exists functor( name, args.length ) =>
args foreach compileExpression
prog += NativeInst( Math.function(functor(name, args.length)) )
case StructureAST( pos, name, args ) => pos.error( s"function $name/${args.length} not found" )
case AtomAST( _, name ) if Math exists functor( name, 0 ) =>
prog += NativeInst( Math.function(functor( name, 0)) )
case AtomAST( pos, name ) => pos.error( s"constant '$name' not found" )
}
*/
def compileGoal(data: Any, lookup: Program)(implicit prog: Program, vars: Vars): Unit =
data match {
case Structure(Indicator(Symbol(";"), 2), Array(Structure(Indicator(Symbol("->"), 2), Array(goal1, goal2)), goal3)) =>
prog.patch((ptr, len) => MarkInst(len - ptr)) { // need to skip over the branch
compileGoal(goal1, lookup)
prog += UnmarkInst
compileGoal(goal2, lookup)
}
prog.patch((ptr, len) => BranchInst(len - ptr - 1)) {
compileGoal(goal3, lookup)
}
case Structure(Indicator(Symbol("->"), 2), Array(goal1, goal2)) =>
prog.patch((ptr, len) => MarkInst(len - ptr + 1)) { // need to skip over the unmark/branch
compileGoal(goal1, lookup)
}
prog += UnmarkInst
prog += BranchInst(1)
prog += FailInst
compileGoal(goal2, lookup)
case Structure(Indicator(Symbol("\\\\+"), 1), Array(term @ (_: Symbol | _: Structure))) =>
prog.patch((ptr, len) => MarkInst(len - ptr + 1)) { // need to skip over the unmark/fail
compileGoal(term, lookup)
}
prog += UnmarkInst
prog += FailInst
// case StructureAST( r, "call", List(term@(AtomAST(_, _) | StructureAST( _, _, _ ))) ) =>
// dbg( s"call", r )
// prog.patch( (ptr, len) => MarkInst(len - ptr + 1) ) { // need to skip over the unmark/fail
// compileBody( term ) }
// prog += UnmarkInst
// case StructureAST( r, "once", List(term@(AtomAST(_, _) | StructureAST( _, _, _ ))) ) =>
// dbg( s"once", r )
// prog.patch( (ptr, len) => MarkInst(len - ptr) ) { // need to skip over the unmark
// compileGoal( term, lookup ) }
// prog += UnmarkInst
case Structure(Indicator(Symbol(","), 2), Array(left, right)) =>
compileGoal(left, lookup)
compileGoal(right, lookup)
case Structure(Indicator(Symbol(";"), 2), Array(left, right)) =>
prog.patch((ptr, len) => ChoiceInst(len - ptr)) { // need to skip over the branch
compileGoal(left, lookup)
}
prog.patch((ptr, len) => BranchInst(len - ptr - 1)) {
compileGoal(right, lookup)
}
case Symbol("true") => // no code to emit for true/0
case Symbol("false") | Symbol("fail") => prog += FailInst
case Symbol("!") => prog += CutInst
case Symbol("repeat") => prog += ChoiceInst(-1)
// case Structure( Functor(Symbol("="), 2), Array(v: VM#Variable, right) ) =>
// compileTerm( right )
// prog += VarUnifyInst( vars.num(lname) )
// case StructureAST( pos, "=", List(left, VariableAST(_, rname)) ) =>
// compileTerm( left )
// prog += VarUnifyInst( vars.num(rname) )
case Structure(Indicator(Symbol("="), 2), Array(left, right)) =>
compileTerm(left)
compileTerm(right)
prog += UnifyInst
case Structure(Indicator(Symbol("\\\\="), 2), Array(left, right)) =>
prog.patch((ptr, len) => MarkInst(len - ptr - 1)) {
compileTerm(left)
compileTerm(right)
prog += UnifyInst
prog += UnmarkInst
prog += FailInst
}
// case StructureAST( pos, "is", List(VariableAST(_, rname), expr) ) =>
// compileArithmetic( expr )
// compileExpression( expr )
// prog += VarInst( vars.num(rname) )
// prog += UnifyInst
// case StructureAST( _, "is", List(head, _) ) => head.pos.error( s"variable was expected" )
// case StructureAST( pos, "=:=", List(left, right) ) =>
// compileArithmetic( data )
// compileExpression( left )
// compileExpression( right )
// prog += EqInst
// case StructureAST( pos, "=\\\\=", List(left, right) ) =>
// compileArithmetic( data )
// compileExpression( left )
// compileExpression( right )
// prog += NeInst
// case StructureAST( pos, "<", List(left, right) ) =>
// compileArithmetic( data )
// compileExpression( right )
// compileExpression( left )
// prog += LtInst
// case StructureAST( pos, "=<", List(left, right) ) =>
// compileArithmetic( data )
// compileExpression( right )
// compileExpression( left )
// prog += LeInst
// case StructureAST( pos, ">", List(left, right) ) =>
// compileArithmetic( data )
// compileExpression( right )
// compileExpression( left )
// prog += GtInst
// case StructureAST( pos, ">=", List(left, right) ) =>
// compileArithmetic( data )
// compileExpression( right )
// compileExpression( left )
// prog += GeInst
case Structure(f, args) if lookup defined f =>
prog += PushFrameInst
args foreach compileTerm
prog += CallProcedureInst(lookup.procedure(f))
case Structure(f, args) if Builtin exists f =>
args foreach compileTerm
prog += NativeInst(Builtin.predicate(f), Vector.fill(args.length)(null), f, NATIVE_PREDICATE)
case Structure(f, args) =>
prog += PushFrameInst
args foreach compileTerm
prog += CallIndirectInst(null, f)
case a: Symbol if lookup defined Indicator(a, 0) =>
prog += PushFrameInst
prog += CallProcedureInst(lookup.procedure(Indicator(a, 0)))
case a: Symbol if Builtin exists Indicator(a, 0) =>
val f = Indicator(a, 0)
prog += NativeInst(Builtin predicate f, Vector(), f, NATIVE_PREDICATE)
case a: Symbol =>
prog += PushFrameInst
prog += CallIndirectInst(null, Indicator(a, 0))
}
}
| edadma/funl | prolog/src/main/scala/xyz/hyperreal/prolog/Compilation.scala | Scala | mit | 30,318 |
object Test {
def main(args: Array[String]) = {
val is = List(1,2,3)
is match {
case List(1, _*} => // error: pattern expected
}
}
} // error: eof expected, but '}' found
| dotty-staging/dotty | tests/neg/t5702-neg-bad-brace.scala | Scala | apache-2.0 | 196 |
package edu.gemini.qv.plugin.chart.ui
import java.awt.Color
import javax.swing.UIManager
import edu.gemini.qv.plugin.chart.Axis
import edu.gemini.qv.plugin.filter.core._
import edu.gemini.qv.plugin.filter.ui.CategoriesFilter
import edu.gemini.qv.plugin.filter.ui.FilterElement.FilterElementChanged2
import edu.gemini.qv.plugin.ui.QvGui
import edu.gemini.qv.plugin.{QvContext, QvStore}
import scala.swing.GridBagPanel.Fill._
import scala.swing.Swing._
import scala.swing._
import scala.swing.event._
object AxisEditor {
def apply(ctx: QvContext, initial: String, axis: Axis) = {
new AxisEditor(ctx, initial, new AxisEditorPanel(ctx, axis))
}
}
/**
* Editor for axes of charts and categorized tables.
*/
class AxisEditor(ctx: QvContext, initial: String, panel: AxisEditorPanel) extends
// TODO: The dynamic on-the-fly axes should become part of the default axes, so we don't need to treat them separately.
ElementEditor("Axis", initial, (QvStore.DefaultAxes ++ Axis.Dynamics).map(_.label).toSet, QvStore.axes.map(_.label).toSet, Some(panel)) {
def axis = panel.axis
def delete() {
QvStore.removeAxis(elementName)
}
def save() {
val newAxis = Axis(elementName, panel.axis.groups)
QvStore.addAxis(newAxis)
}
}
class AxisEditorPanel(ctx: QvContext, var axis: Axis) extends SplitPane(Orientation.Vertical) {
var groupEditor = new GroupsPanel(0)
var filterEditor = new CategoriesFilter(ctx, groupEditor.active.filter.elements) {
border = EmptyBorder(5, 5, 5, 5)
}
leftComponent = filterEditor
rightComponent = groupEditor
listenTo(groupEditor, filterEditor)
def replaceGroupEditor() {
// remove old
val storedLoc = dividerLocation
deafTo(groupEditor)
// create and insert new
groupEditor = new GroupsPanel(groupEditor.activeIx)
rightComponent = groupEditor
listenTo(groupEditor)
dividerLocation = storedLoc
}
def replaceFilterEditor() {
// remove old
deafTo(filterEditor)
// create and insert new
val storedLoc = dividerLocation
val storedPanel = filterEditor.selection.index
filterEditor = new CategoriesFilter(ctx, groupEditor.active.filter.elements) {
border = EmptyBorder(5, 5, 5, 5)
}
filterEditor.selection.index = storedPanel
leftComponent = filterEditor
listenTo(filterEditor)
dividerLocation = storedLoc
}
reactions += {
case GroupSelected(g) => {
replaceFilterEditor()
}
case FilterElementChanged2 => {
groupEditor.active.filter = filterEditor.filter
}
}
class GroupsPanel(var activeIx: Int) extends GridBagPanel {
val editors = axis.groups.zipWithIndex.map{case (f, ix) => new GroupEditor(f, ix)}
def active: GroupEditor = {
activeIx = Math.min(activeIx, editors.size-1)
activeIx = Math.max(activeIx, 0)
editors(activeIx)
}
active.select()
border = EmptyBorder(5, 5, 5, 5)
editors.foreach(listenTo(_))
deafTo(this)
reactions += {
case GroupSelected(g) =>
active.deselect()
activeIx = g.ix
active.select()
publish(new GroupSelected(g))
}
private var yPos = 0
editors.foreach { e =>
layout(e.groupButton) = new Constraints() {gridx=0; gridy=yPos; weightx=1.0; fill=Horizontal}
layout(e.deleteButton) = new Constraints() {gridx=1; gridy=yPos}
layout(e.addButton) = new Constraints() {gridx=2; gridy=yPos}
layout(e.upButton) = new Constraints() {gridx=3; gridy=yPos}
layout(e.downButton) = new Constraints() {gridx=4; gridy=yPos}
yPos += 1
}
layout(Button("Delete All") {deleteAll()}) = new Constraints() {gridx=1; gridy=yPos; gridwidth=4; fill=Horizontal}
yPos += 1
// filler
layout(Swing.VGlue) = new Constraints() {gridx=0; gridy=yPos; weighty=1.0; gridwidth=5; fill=Vertical}
def deleteAll() {
axis = Axis(axis.label, Seq(new EmptyFilter("<<New>>")))
replaceGroupEditor()
replaceFilterEditor()
}
}
case class GroupSelected(group: GroupEditor) extends Event
class GroupEditor(private var _filter: Filter, val ix: Int) extends Publisher {
val groupButton = groupButtonF(_filter)
def select() = groupButton.background = Color.gray
def deselect() = groupButton.background = UIManager.getColor("Button.background")
listenTo(groupButton.mouse.clicks)
reactions += {
case _: MouseClicked => publish(new GroupSelected(GroupEditor.this))
}
def filter = _filter
def filter_= (f: Filter): Unit = {
_filter = f
groupButton.text = f.name
axis = Axis(axis.label, axis.groups.updated(ix, f))
}
def groupButtonF(filter: Filter) = new Button {
text = filter.name
}
// remove the group from the x or y axis
def deleteButton = new Button {
action = new Action("") {
icon = QvGui.DelIcon
def apply() = {
axis = removeFromAxis(axis, ix)
replaceGroupEditor()
replaceFilterEditor()
}
}
}
// insert new empty group
def addButton = new Button {
action = new Action("") {
icon = QvGui.AddIcon
def apply() = {
axis = insertIntoAxis(axis, ix+1)
groupEditor.activeIx = ix+1
replaceGroupEditor()
replaceFilterEditor()
revalidate()
}
}
}
def upButton = new Button {
action = new Action("") {
icon = QvGui.UpIcon
def apply() = {
if (ix > 0) {
axis = swapOnAxis(axis, ix-1, ix)
groupEditor.activeIx = ix-1
replaceGroupEditor()
}
}
}
}
def downButton = new Button {
action = new Action("") {
icon = QvGui.DownIcon
def apply() = {
if (ix < groupEditor.editors.size-1) {
axis = swapOnAxis(axis, ix+1, ix)
groupEditor.activeIx = ix+1
replaceGroupEditor()
}
}
}
}
def swapOnAxis(a: Axis, i: Int, j: Int) = Axis(a.label, a.groups.updated(i, a.groups(j)).updated(j, a.groups(i)))
def removeFromAxis(a: Axis, ix: Int) = {
val shortenedGroups = a.groups.zipWithIndex.filter(_._2 != ix).unzip._1
val newGroups = if (shortenedGroups.isEmpty) Seq(new EmptyFilter("<<New>>")) else shortenedGroups
Axis(a.label, newGroups)
}
def insertIntoAxis(a: Axis, ix: Int) = Axis(a.label, (a.groups.slice(0, ix) :+ new EmptyFilter("<<New>>")) ++ a.groups.slice(ix, a.groups.length))
}
}
| arturog8m/ocs | bundle/edu.gemini.qv.plugin/src/main/scala/edu/gemini/qv/plugin/chart/ui/AxisEditor.scala | Scala | bsd-3-clause | 6,535 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.utils.tf.loaders
import java.nio.ByteOrder
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.dllib.nn.tf.{Fill => FillOps}
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.tf.Context
import org.tensorflow.framework.NodeDef
import scala.reflect.ClassTag
class Fill extends TensorflowOpsLoader {
override def build[T: ClassTag](nodeDef: NodeDef, byteOrder: ByteOrder,
context: Context[T])(implicit ev: TensorNumeric[T]): Module[T] = {
FillOps[T]()
}
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/utils/tf/loaders/Fill.scala | Scala | apache-2.0 | 1,206 |
/**
* (c) Copyright 2013 WibiData, Inc.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kiji.express.repl
import com.twitter.scalding.Args
import com.twitter.scalding.Hdfs
import com.twitter.scalding.Job
import com.twitter.scalding.Local
import com.twitter.scalding.Mode
import com.twitter.scalding.NullSource
import com.twitter.scalding.Tsv
import org.apache.hadoop.hbase.HBaseConfiguration
import org.junit.rules.TemporaryFolder
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.kiji.express.Implicits
import org.kiji.express.avro.SimpleRecord
import org.kiji.express.flow.EntityId
import org.kiji.express.flow.FlowCell
import org.kiji.express.flow.KijiInput
import org.kiji.express.flow.KijiOutput
import org.kiji.express.flow.util.ResourceUtil
import org.kiji.express.flow.util.TestingResourceUtil
import org.kiji.express.KijiSuite
import org.kiji.schema.KijiClientTest
import org.kiji.schema.KijiURI
import org.kiji.schema.layout.KijiTableLayout
import org.kiji.schema.layout.KijiTableLayouts
import org.kiji.schema.util.InstanceBuilder
@RunWith(classOf[JUnitRunner])
class KijiPipeToolSuite extends KijiClientTest with KijiSuite {
// Hook into KijiClientTest since methods marked with JUnit's @Before and @After annotations won't
// run when using ScalaTest.
setupKijiTest()
// Create test Kiji table.
val uri: String = {
/** Table layout to use for tests. */
val layout: KijiTableLayout = TestingResourceUtil.layout(KijiTableLayouts.SIMPLE_TWO_COLUMNS)
val instanceUri = new InstanceBuilder(getKiji)
.withTable(layout.getName, layout)
.withRow("row01").withFamily("family").withQualifier("column1").withValue(1L, "hello")
.withRow("row02").withFamily("family").withQualifier("column1").withValue(2L, "hello")
.withRow("row03").withFamily("family").withQualifier("column1").withValue(1L, "world")
.withRow("row04").withFamily("family").withQualifier("column1").withValue(3L, "hello")
.build()
.getURI
val tableUri = KijiURI.newBuilder(instanceUri).withTableName(layout.getName).build()
tableUri.toString
}
test("A KijiPipeTool can be used to obtain a Scalding job that is run in local mode.") {
Implicits.mode = Local(strictSources = true)
KijiPipeToolSuite.jobToRun(Mode.putMode(Implicits.mode, Args(Nil)), uri).run
}
test("A KijiPipeTool can be used to obtain a Scalding job that is run with Hadoop.") {
Implicits.mode = Hdfs(strict = true, conf = HBaseConfiguration.create())
KijiPipeToolSuite.jobToRun(Mode.putMode(Implicits.mode, Args(Nil)), uri).run
}
test("A KijiPipe can be implicitly converted to a KijiPipeTool,") {
// Run test case in local mode so we can specify the input file.
Implicits.mode = Local(strictSources = true)
val tempFolder = new TemporaryFolder()
tempFolder.create()
val inputFile = tempFolder.newFile("input-source")
{
import Implicits._
import ReplImplicits._
// Implicitly create a KijiPipe, then call KijiPipeTool's run() method on it.
Tsv(inputFile.getAbsolutePath, fields = ('l, 's)).read
.packGenericRecordTo(('l, 's) -> 'record)(SimpleRecord.getClassSchema)
.insert('entityId, EntityId("foo"))
.write(KijiOutput.builder.withTableURI(uri).build)
.run()
}
}
}
object KijiPipeToolSuite {
// A job obtained by converting a Cascading Pipe to a KijiPipe, which is then used to obtain
// a Scalding Job from the pipe.
def jobToRun(args: Args, uri: String): Job = {
import Implicits._
import ReplImplicits._
// Setup input to bind values from the "family:column1" column to the symbol 'word.
KijiInput.builder
.withTableURI(uri)
.withColumns("family:column1" -> 'word)
.build
// Sanitize the word.
.map('word -> 'cleanword) { words: Seq[FlowCell[CharSequence]] =>
words.head.datum
.toString
.toLowerCase
}
// Count the occurrences of each word.
.groupBy('cleanword) { occurences => occurences.size('count) }
.groupAll { _.toList[(String, Int)](('cleanword, 'count) -> 'results) }
.map('results -> ()) { results: Seq[(String, Int)] =>
val outMap = results.toMap
// Validate that the output is as expected.
assert(3 == outMap("hello"))
assert(1 == outMap("world"))
}
// Write the result to a file.
.write(NullSource)
.getJob(args)
}
}
| kijiproject/kiji-express | kiji-express-tools/src/test/scala/org/kiji/express/repl/KijiPipeToolSuite.scala | Scala | apache-2.0 | 5,194 |
/*
* Copyright 2012-2015 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.money.logging
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{ OneInstancePerTest, Matchers, WordSpec }
import org.slf4j.Logger
class TraceLoggingSpec extends WordSpec with Matchers with MockitoSugar with OneInstancePerTest {
val mockLogger = mock[Logger]
"TraceLogging" should {
"capture exceptions into a log" in {
val testTraceLogging = new TraceLogging {
override lazy val shouldLogExceptions: Boolean = true
override val logger: Logger = mockLogger
}
val t = mock[Throwable]
testTraceLogging.logException(t)
verify(mockLogger).error("Tracing exception", t)
}
"not capture exceptions if log exceptions is not enabled" in {
val testTraceLogging = new TraceLogging {
override lazy val shouldLogExceptions: Boolean = false
override val logger: Logger = mockLogger
}
val t = mock[Throwable]
testTraceLogging.logException(t)
verifyZeroInteractions(mockLogger)
}
}
}
| ipapa/money | money-core/src/test/scala/com/comcast/money/logging/TraceLoggingSpec.scala | Scala | apache-2.0 | 1,680 |
package wandou.math.indicator
import java.awt.Color
/**
*
* @author Caoyuan Deng
*/
trait Plottable {
def plot: Plot
def getColor(idx: Int): Color
def setColor(idx: Int, color: Color)
def layer: Int
def layer_=(order: Int)
}
| wandoulabs/wandou-math | wandou-math/src/main/scala/wandou/math/indicator/Plottable.scala | Scala | apache-2.0 | 244 |
package net.gumbix.bioinf.string.alignment
import net.gumbix.bioinf.phylo.{JoinedTaxon, NeighborJoiningMetric, NeighborJoiningTree, Taxon}
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
/**
* Implementation of a variant of the CLUSTAL algorithm.
* @author Markus Gumbel (m.gumbel@hs-mannheim.de)
*/
class Clustal(strings: Array[String], ll: Boolean = false)
extends AbstractMultipleAlignment(strings)
with ProgressiveAlignment {
logLevel = ll
private val maMap = new mutable.HashMap[Taxon, List[AlignedString]]
/**
* Calculate the distance metrics for the neighbor joining
* algorithm.
* TODO Warning: Side effect as maMap is also created.
*/
val distMetric = {
val taxa = new ArrayBuffer[Taxon]()
// Create some id for the sequences.
for (i <- 0 until strings.size) {
val taxon = new Taxon(i.toString)
val as = new AlignedString(strings(i))
maMap.put(taxon, List(as))
taxa += taxon
}
logln("Primary pairwise alignments:")
logln(mkAlignmentTable())
val m = Array.ofDim[Double](alignments.size, alignments.size)
for (i <- 0 until alignments.size; j <- 0 until alignments.size) {
m(i)(j) = alignments(i)(j).similarity
}
// TODO improve distance matrix:
val maximum = m.flatten.max // Max. value in scores.
for (i <- 0 until alignments.size; j <- 0 until alignments.size) {
m(i)(j) = maximum - m(i)(j)
}
val njm = new NeighborJoiningMetric(taxa.toArray, m)
logln("\\nDistance matrix:")
logln(njm.mkMatrixString)
njm
}
val multipleAlignment = {
/**
* Insert the gaps in all aligned strings of a MSA.
* @param msa
* @param gaps
*/
def insertGapsInMSA(msa: List[AlignedString], gaps: List[Int]) {
for (as <- msa) {
// TODO clarify why reverse
insertGaps(as, gaps.reverse)
}
}
/**
* Align two sequences or multiple alignments or
* any combination of both.
* @param t Taxon indicating either a single sequence
* or a nested taxon referring to two taxa.
*/
def align(t: Taxon) {
val jt = t.asInstanceOf[JoinedTaxon]
val t1 = jt.taxa(0)
val t2 = jt.taxa(1)
// We can ensure that the taxa are available:
val msa1 = maMap.get(t1).get
val msa2 = maMap.get(t2).get
val c1 = consensusFromList(msa1)
val c2 = consensusFromList(msa2)
logln("\\nAlign " + t1 + " and " + t2 + ":")
logln(" " + msa1.mkString("\\n "))
logln("c: " + c1.toString)
logln()
logln(" " + msa2.mkString("\\n "))
logln("c: " + c2.toString)
val a = new Alignment(c1, c2, mode)
val (as1, as2) = a.alignedStrings()
val ins1 = as1.gaps().toList
val ins2 = as2.gaps().toList
logln("\\nPairwise alignment btw. consensus:")
logln(a.makeAlignmentString(a.solution))
insertGapsInMSA(msa1, ins1) // Insert gaps in the MSAs.
insertGapsInMSA(msa2, ins2)
val msa = msa1 ::: msa2 // join both (multiple) alignments
maMap.put(t, msa)
}
if (strings.size == 2) { // no real msa?
val (as1, as2) = alignments(0)(1).alignedStrings()
Array(as1, as2)
} else {
val m = new NeighborJoiningTree(distMetric, ll)
m.allJoins.foreach(align(_)) // Go through all sequences.
// One remaining node:
val ft1 = m.allEdges(m.allEdges.size - 1)._1
val ft2 = m.allJoins(m.allJoins.size - 1)
val jt = new JoinedTaxon(List(ft1, ft2).toArray)
align(jt)
logln()
maMap(jt).toArray
}
}
}
| markusgumbel/scalabioalg | core/src/main/scala/net/gumbix/bioinf/string/alignment/Clustal.scala | Scala | apache-2.0 | 3,630 |
/*
* Part of NDLA article-api.
* Copyright (C) 2016 NDLA
*
* See LICENSE
*
*/
package no.ndla.articleapi.model.api
import org.scalatra.swagger.annotations.{ApiModel, ApiModelProperty}
import scala.annotation.meta.field
@ApiModel(description = "Description of the tags of the article")
case class ArticleTag(@(ApiModelProperty @field)(description = "The searchable tag.") tags: Seq[String],
@(ApiModelProperty @field)(
description = "ISO 639-1 code that represents the language used in tag") language: String)
| NDLANO/article-api | src/main/scala/no/ndla/articleapi/model/api/ArticleTag.scala | Scala | gpl-3.0 | 564 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.spi.v2_3
//This class should live here, but until we have to touch
//disk, let's have it in the compiler. Convenient.
case object HardcodedGraphStatistics
extends org.neo4j.cypher.internal.compiler.v2_3.HardcodedGraphStatisticsValues
| HuangLS/neo4j | community/cypher/cypher/src/main/scala/org/neo4j/cypher/internal/spi/v2_3/HardcodedGraphStatistics.scala | Scala | apache-2.0 | 1,067 |
package scalarules.test.junit
import org.junit.Test
class SomeHelpreForTest
class SingleTestSoTargetWillNotFailDueToNoTestsTest {
@Test
def someTest: Unit =
println("passing")
}
| bazelbuild/rules_scala | test/src/main/scala/scalarules/test/junit/JunitNoTests.scala | Scala | apache-2.0 | 189 |
package es.weso.rdf.nodes
trait Literal extends RDFNode {
def dataType: IRI
def isLangLiteral: Boolean
def hasLang(lang: Lang): Boolean
override def getLexicalForm: String
}
case class DatatypeLiteral(lexicalForm: String, dataType: IRI) extends Literal {
override def isLangLiteral = false
override def hasLang(lang: Lang) = false
override def toString: String = {
"\\"" + lexicalForm + "\\"^^" + dataType
}
override def getLexicalForm = lexicalForm
}
// It should be better to inherit from DatatypeLiteral,
// but case-to-case inheritance is prohibited in Scala
case class IntegerLiteral(int: Integer) extends Literal {
val dataType = RDFNode.IntegerDatatypeIRI
val lexicalForm = int.toString
override def isLangLiteral = false
override def hasLang(lang: Lang) = false
override def toString: String = {
lexicalForm
}
override def getLexicalForm = lexicalForm
}
case class DecimalLiteral(decimal: BigDecimal) extends Literal {
val dataType = RDFNode.DecimalDatatypeIRI
val lexicalForm = decimal.toString
override def isLangLiteral = false
override def hasLang(lang: Lang) = false
override def toString: String = {
lexicalForm
}
override def getLexicalForm = lexicalForm
}
case class DoubleLiteral(double: Double) extends Literal {
val dataType = RDFNode.DoubleDatatypeIRI
val lexicalForm = double.toString
override def isLangLiteral = false
override def hasLang(lang: Lang) = false
override def toString: String = {
lexicalForm
}
override def getLexicalForm = lexicalForm
}
case class StringLiteral(lexicalForm: String) extends Literal {
val dataType = RDFNode.StringDatatypeIRI
override def isLangLiteral = false
override def hasLang(lang: Lang) = false
override def toString: String = {
// TODO: Check if literal contains extended chars
"\\"" + lexicalForm + "\\""
}
override def getLexicalForm = lexicalForm
}
case class BooleanLiteral(bool: Boolean) extends Literal {
val dataType = RDFNode.BooleanDatatypeIRI
val lexicalForm = if (bool) "true" else "false"
override def isLangLiteral = false
override def hasLang(lang: Lang) = false
override def toString: String = {
lexicalForm
}
override def getLexicalForm = lexicalForm
}
case class LangLiteral(lexicalForm: String, lang: Lang) extends Literal {
lazy val dataType = RDFNode.LangStringDatatypeIRI
def isLangLiteral = true
def hasLang(l: Lang) = lang.matchLanguage(l)
override def toString: String = {
val lex = "\\"" + lexicalForm + "\\""
lex + lang
}
override def getLexicalForm = lexicalForm
}
case class Lang(lang: String) {
// This should be the right regular expression for lang.
// We don't use this expression because the specification does not also.
val langtag_ex: String = "(\\\\A[xX]([\\\\x2d]\\\\p{Alnum}{1,8})*\\\\z)" +
"|(((\\\\A\\\\p{Alpha}{2,8}(?=\\\\x2d|\\\\z)){1}" +
"(([\\\\x2d]\\\\p{Alpha}{3})(?=\\\\x2d|\\\\z)){0,3}" +
"([\\\\x2d]\\\\p{Alpha}{4}(?=\\\\x2d|\\\\z))?" +
"([\\\\x2d](\\\\p{Alpha}{2}|\\\\d{3})(?=\\\\x2d|\\\\z))?" +
"([\\\\x2d](\\\\d\\\\p{Alnum}{3}|\\\\p{Alnum}{5,8})(?=\\\\x2d|\\\\z))*)" +
"(([\\\\x2d]([a-wyzA-WYZ](?=\\\\x2d))([\\\\x2d](\\\\p{Alnum}{2,8})+)*))*" +
"([\\\\x2d][xX]([\\\\x2d]\\\\p{Alnum}{1,8})*)?)\\\\z"
// TODO. Specification defines other ways to match languages
def matchLanguage(other: Lang) =
this.lang.toLowerCase == other.lang.toLowerCase
override def toString = lang match {
case "" => ""
case ls => "@" + ls
}
// The following code has been inspired by:
// http://stackoverflow.com/questions/7681183/how-can-i-define-a-custom-equality-operation-that-will-be-used-by-immutable-set
override def equals(o: Any) = o match {
case that: Lang => that.lang.toLowerCase == this.lang.toLowerCase
case _ => false
}
override def hashCode = lang.toLowerCase.hashCode
}
| labra/SRDF | srdf/shared/src/main/scala/es/weso/rdf/nodes/Literal.scala | Scala | mit | 3,836 |
package views.html
package game
import lila.api.Context
import lila.app.templating.Environment._
import lila.app.ui.ScalatagsTemplate._
import controllers.routes
object importGame {
private def analyseHelp(implicit ctx: Context) =
ctx.isAnon option a(cls := "blue", href := routes.Auth.signup)(trans.youNeedAnAccountToDoThat())
def apply(form: play.api.data.Form[_])(implicit ctx: Context) =
views.html.base.layout(
title = trans.importGame.txt(),
moreCss = cssTag("importer"),
moreJs = jsTag("importer.js"),
openGraph = lila.app.ui
.OpenGraph(
title = "Paste PGN chess game",
url = s"$netBaseUrl${routes.Importer.importGame.url}",
description = trans.importGameExplanation.txt()
)
.some
) {
main(cls := "importer page-small box box-pad")(
h1(trans.importGame()),
p(cls := "explanation")(trans.importGameExplanation()),
standardFlash(),
postForm(cls := "form3 import", action := routes.Importer.sendGame)(
form3.group(form("pgn"), trans.pasteThePgnStringHere())(form3.textarea(_)()),
form("pgn").value flatMap { pgn =>
lila.importer
.ImportData(pgn, none)
.preprocess(none)
.fold(
err =>
frag(
pre(cls := "error")(err),
br,
br
).some,
_ => none
)
},
form3.group(form("pgnFile"), raw("Or upload a PGN file"), klass = "upload") { f =>
form3.file.pgn(f.name)
},
form3.checkbox(
form("analyse"),
trans.requestAComputerAnalysis(),
help = Some(analyseHelp),
disabled = ctx.isAnon
),
form3.action(form3.submit(trans.importGame(), "".some))
)
)
}
}
| luanlv/lila | app/views/game/importGame.scala | Scala | mit | 1,935 |
package debop4s.core.utils
import java.io._
import java.nio.charset.Charset
import debop4s.core._
import org.slf4j.LoggerFactory
import scala.annotation.tailrec
/**
* debop4s.core.tools.Streams
* @author 배성혁 sunghyouk.bae@gmail.com
* @since 2013. 12. 9. 오후 11:00
*/
object Streams {
private[this] lazy val log = LoggerFactory.getLogger(getClass)
private[this] val BUFFER_SIZE: Int = 4096
/**
* inputStream 정보를 읽어, outputStream에 복사합니다.
*/
@tailrec
def copy(inputStream: InputStream, outputStream: OutputStream, bufferSize: Int = BUFFER_SIZE): Unit = {
val buffer = new Array[Byte](bufferSize)
inputStream.read(buffer, 0, buffer.length) match {
case -1 => ()
case n =>
outputStream.write(buffer, 0, n)
copy(inputStream, outputStream, bufferSize)
}
}
def toInputStream(bytes: Array[Byte]): InputStream =
new ByteArrayInputStream(bytes)
def toInputStream(str: String, cs: Charset = Charsets.UTF_8): InputStream =
if (Strings.isEmpty(str)) new ByteArrayInputStream(Array.emptyByteArray)
else toInputStream(str.getBytes(cs))
def toOutputStream(is: InputStream): OutputStream = {
val bos = new ByteArrayOutputStream()
copy(is, bos)
bos
}
def toOutputStream(bytes: Array[Byte]): OutputStream = {
if (bytes == null || bytes.length == 0)
return new ByteArrayOutputStream()
val os = new ByteArrayOutputStream(bytes.length)
using(new ByteArrayInputStream(bytes)) { is =>
copy(is, os)
}
os
}
def toOutputStream(str: String, cs: Charset = Charsets.UTF_8): OutputStream = {
if (Strings.isEmpty(str))
new ByteArrayOutputStream()
else
toOutputStream(str.getBytes(cs))
}
def toByteArray(is: InputStream): Array[Byte] = {
if (is == null)
return Array.emptyByteArray
using(new ByteArrayOutputStream()) { os =>
copy(is, os)
os.toByteArray
}
}
def toString(is: InputStream): String = {
if (is == null) ""
else Strings.getUtf8String(toByteArray(is))
}
def toString(is: InputStream, cs: Charset): String = {
if (is == null) ""
else new String(toByteArray(is), cs)
}
}
| debop/debop4s | debop4s-core/src/main/scala/debop4s/core/utils/Streams.scala | Scala | apache-2.0 | 2,207 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.sources
import java.util
import org.apache.flink.table.api.TableException
import org.apache.flink.table.descriptors.ConnectorDescriptorValidator.{CONNECTOR_PROPERTY_VERSION, CONNECTOR_TYPE}
import org.apache.flink.table.descriptors.OldCsvValidator._
import org.apache.flink.table.descriptors.FileSystemValidator.{CONNECTOR_PATH, CONNECTOR_TYPE_VALUE}
import org.apache.flink.table.descriptors.FormatDescriptorValidator.{FORMAT_PROPERTY_VERSION, FORMAT_TYPE}
import org.apache.flink.table.descriptors.SchemaValidator.SCHEMA
import org.apache.flink.table.descriptors._
import org.apache.flink.table.factories.TableFactory
import org.apache.flink.table.util.JavaScalaConversionUtil.toScala
/**
* Factory base for creating configured instances of [[CsvTableSource]].
*/
abstract class CsvTableSourceFactoryBase extends TableFactory {
override def requiredContext(): util.Map[String, String] = {
val context = new util.HashMap[String, String]()
context.put(CONNECTOR_TYPE, CONNECTOR_TYPE_VALUE)
context.put(FORMAT_TYPE, FORMAT_TYPE_VALUE)
context.put(CONNECTOR_PROPERTY_VERSION, "1")
context.put(FORMAT_PROPERTY_VERSION, "1")
context
}
override def supportedProperties(): util.List[String] = {
val properties = new util.ArrayList[String]()
// connector
properties.add(CONNECTOR_PATH)
// format
properties.add(s"$FORMAT_FIELDS.#.${DescriptorProperties.TABLE_SCHEMA_TYPE}")
properties.add(s"$FORMAT_FIELDS.#.${DescriptorProperties.TABLE_SCHEMA_NAME}")
properties.add(FORMAT_FIELD_DELIMITER)
properties.add(FORMAT_LINE_DELIMITER)
properties.add(FORMAT_QUOTE_CHARACTER)
properties.add(FORMAT_COMMENT_PREFIX)
properties.add(FORMAT_IGNORE_FIRST_LINE)
properties.add(FORMAT_IGNORE_PARSE_ERRORS)
properties.add(CONNECTOR_PATH)
// schema
properties.add(s"$SCHEMA.#.${DescriptorProperties.TABLE_SCHEMA_TYPE}")
properties.add(s"$SCHEMA.#.${DescriptorProperties.TABLE_SCHEMA_NAME}")
properties
}
protected def createTableSource(
isStreaming: Boolean,
properties: util.Map[String, String])
: CsvTableSource = {
val params = new DescriptorProperties()
params.putProperties(properties)
// validate
new FileSystemValidator().validate(params)
new OldCsvValidator().validate(params)
new SchemaValidator(
isStreaming,
supportsSourceTimestamps = false,
supportsSourceWatermarks = false).validate(params)
// build
val csvTableSourceBuilder = new CsvTableSource.Builder
val formatSchema = params.getTableSchema(FORMAT_FIELDS)
val tableSchema = params.getTableSchema(SCHEMA)
// the CsvTableSource needs some rework first
// for now the schema must be equal to the encoding
if (!formatSchema.equals(tableSchema)) {
throw new TableException(
"Encodings that differ from the schema are not supported yet for CsvTableSources.")
}
toScala(params.getOptionalString(CONNECTOR_PATH))
.foreach(csvTableSourceBuilder.path)
toScala(params.getOptionalString(FORMAT_FIELD_DELIMITER))
.foreach(csvTableSourceBuilder.fieldDelimiter)
toScala(params.getOptionalString(FORMAT_LINE_DELIMITER))
.foreach(csvTableSourceBuilder.lineDelimiter)
formatSchema.getFieldNames.zip(formatSchema.getFieldTypes).foreach { case (name, tpe) =>
csvTableSourceBuilder.field(name, tpe)
}
toScala(params.getOptionalCharacter(FORMAT_QUOTE_CHARACTER))
.foreach(csvTableSourceBuilder.quoteCharacter)
toScala(params.getOptionalString(FORMAT_COMMENT_PREFIX))
.foreach(csvTableSourceBuilder.commentPrefix)
toScala(params.getOptionalBoolean(FORMAT_IGNORE_FIRST_LINE)).foreach { flag =>
if (flag) {
csvTableSourceBuilder.ignoreFirstLine()
}
}
toScala(params.getOptionalBoolean(FORMAT_IGNORE_PARSE_ERRORS)).foreach { flag =>
if (flag) {
csvTableSourceBuilder.ignoreParseErrors()
}
}
csvTableSourceBuilder.build()
}
}
| ueshin/apache-flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/sources/CsvTableSourceFactoryBase.scala | Scala | apache-2.0 | 4,830 |
package com.outr.arango.api.model
import io.circe.Json
case class GeneralGraphEdgeModifyHttpExamplesRc200(error: Boolean,
code: Option[Int] = None,
edge: Option[EdgeRepresentation] = None,
`new`: Option[EdgeRepresentation] = None,
old: Option[EdgeRepresentation] = None) | outr/arangodb-scala | api/src/main/scala/com/outr/arango/api/model/GeneralGraphEdgeModifyHttpExamplesRc200.scala | Scala | mit | 477 |
package src.main.scala.utils.conversion
object ConvertTimeToSeconds {
/*
* method: convertTimeStrToSeconds
*
* Converts a string of the form:
*
* H:m:s
*
* where 'H', 'm', and 's' are numbers representing hours, minutes,
* and seconds respectively, but they can be over the normal range
* of numbers associated with 'hours' (00..23), 'minutes' (00..59)
* and 'seconds' (00..59). The reason for this is that in GTFS, the
* arrival time of a bus-trip to a stop can be:
*
* 27:01:35
*
* ie., the next day.
*
* @param in_s the string of the lax format 'H:m:s' to convert
* @return an integer representing the number of seconds
*/
def convertTimeStrToSeconds(in_s: String): Int =
{
val Array(hours, minutes, seconds) = in_s.split(":")
(hours.toInt * 3600 + minutes.toInt * 60 + seconds.toInt)
} // method convertTimeStrToSeconds
/*
* method: apply
*
* @param in_s the string of the lax format 'H:m:s' to convert
* @return an integer representing the number of seconds
*/
def apply(in_s: String): Int = convertTimeStrToSeconds(in_s)
} // object ConvertTimeToSeconds
| je-nunez/urban_planning_on_gtfs_traffic_congestion | src/main/scala/utils/conversion/ConvertTimeToSeconds.scala | Scala | gpl-2.0 | 1,173 |
package sparkutil
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
/**
* Spark工具类
* Created by Administrator on 2016/12/15.
*/
object Util {
def conf: SparkConf = {
System.setProperty("hadoop.home.dir", "D:/Program Files/hadoop_winutils")
System.setProperty("spark.master", "local")
val conf: SparkConf = new SparkConf()
conf.setAppName(Util.getClass.getSimpleName)
conf
}
def sc: SparkContext = new SparkContext(conf)
/**
* 加载文件或者文件夹(本地或Hdfs),生成RDD
*
* @param path
* @return
*/
def loadFile(path: String): RDD[String] = {
val rdd: RDD[String] = Util.sc.textFile(path)
rdd
}
/**
* 加载文件或文件夹,生成RDD[(String,String)],第一个是文件名,第二个是文件内容
*
* @param path
* @return
*/
def loadWholeFile(path: String): RDD[(String, String)] = {
val rdd: RDD[(String, String)] = Util.sc.wholeTextFiles(path)
rdd
}
}
| monsonlee/BigData | Project6_SparkBasic5个小问题/sparkutil/Util.scala | Scala | gpl-3.0 | 1,064 |
package scala.meta
import junit.framework.Test
import org.jetbrains.plugins.scala.DependencyManagerBase._
import org.jetbrains.plugins.scala.{LatestScalaVersions, ScalaVersion}
import org.jetbrains.plugins.scala.base.ScalaSdkOwner
import org.jetbrains.plugins.scala.base.libraryLoaders.{IvyManagedLoader, LibraryLoader, ScalaSDKLoader}
import scala.meta.intellij.MetaExpansionsManager.META_MINOR_VERSION
trait ScalaMetaTestBase extends ScalaSdkOwner { this: Test =>
override protected def supportedIn(version: ScalaVersion): Boolean =
version == LatestScalaVersions.Scala_2_12.withMinor(3)
override def librariesLoaders: Seq[LibraryLoader] = Seq(
ScalaSDKLoader(includeScalaReflectIntoCompilerClasspath = true),
IvyManagedLoader(("org.scalameta" %% "scalameta" % META_MINOR_VERSION).transitive().exclude("com.google.protobuf:protobuf-java"))
)
} | JetBrains/intellij-scala | scala/scala-impl/test/scala/meta/ScalaMetaTestBase.scala | Scala | apache-2.0 | 869 |
package actors
import akka.actor._
import play.api.libs.json._
import play.api.mvc.RequestHeader
import json._
import models.PLM
import models.User
import log.PLMLogger
import spies._
import plm.core.model.lesson.Exercise
import plm.core.model.lesson.Lesson
import plm.core.model.lesson.Lecture
import plm.core.lang.ProgrammingLanguage
import plm.universe.Entity
import plm.universe.World
import plm.universe.IWorldView
import plm.universe.GridWorld
import plm.universe.GridWorldCell
import plm.universe.bugglequest.BuggleWorld
import plm.universe.bugglequest.AbstractBuggle
import plm.universe.bugglequest.BuggleWorldCell
import play.api.Play.current
import play.api.i18n.Lang
import play.api.Logger
import java.util.UUID
import models.daos.UserDAOMongoImpl
object PLMActor {
def props(actorUUID: String, gitID: String, newUser: Boolean, preferredLang: Option[Lang], lastProgLang: Option[String])(out: ActorRef) = Props(new PLMActor(actorUUID, gitID, newUser, preferredLang, lastProgLang, out))
def propsWithUser(actorUUID: String, user: User)(out: ActorRef) = Props(new PLMActor(actorUUID, user, out))
}
class PLMActor(actorUUID: String, gitID: String, newUser: Boolean, preferredLang: Option[Lang], lastProgLang: Option[String], out: ActorRef) extends Actor {
var availableLangs: Seq[Lang] = Lang.availables
var plmLogger: PLMLogger = new PLMLogger(this)
var resultSpy: ExecutionResultListener = null
var progLangSpy: ProgLangListener = null
var humanLangSpy: HumanLangListener = null
var registeredSpies: List[ExecutionSpy] = null
var currentUser: User = null
var currentPreferredLang: Lang = preferredLang.getOrElse(Lang("en"))
var currentGitID: String = null
setCurrentGitID(gitID, newUser)
var plm: PLM = new PLM(currentGitID, plmLogger, currentPreferredLang.toLocale, lastProgLang)
initSpies
registerActor
def this(actorUUID: String, user: User, out: ActorRef) {
this(actorUUID, user.gitID.toString, false, user.preferredLang, user.lastProgLang, out)
setCurrentUser(user)
}
def receive = {
case msg: JsValue =>
Logger.debug("Received a message")
Logger.debug(msg.toString())
var cmd: Option[String] = (msg \\ "cmd").asOpt[String]
cmd.getOrElse(None) match {
case "signIn" | "signUp" =>
setCurrentUser((msg \\ "user").asOpt[User].get)
registeredSpies.foreach { spy => spy.unregister }
plm.setUserUUID(currentGitID)
currentUser.preferredLang.getOrElse(None) match {
case newLang: Lang =>
currentPreferredLang = newLang
plm.setLang(currentPreferredLang)
case _ =>
savePreferredLang()
}
plm.setProgrammingLanguage(currentUser.lastProgLang.getOrElse("Java"))
case "signOut" =>
clearCurrentUser()
registeredSpies.foreach { spy => spy.unregister }
plm.setUserUUID(currentGitID)
case "getLessons" =>
sendMessage("lessons", Json.obj(
"lessons" -> LessonToJson.lessonsWrite(plm.lessons)
))
case "setProgrammingLanguage" =>
var optProgrammingLanguage: Option[String] = (msg \\ "args" \\ "programmingLanguage").asOpt[String]
(optProgrammingLanguage.getOrElse(None)) match {
case programmingLanguage: String =>
plm.setProgrammingLanguage(programmingLanguage)
saveLastProgLang(programmingLanguage)
case _ =>
Logger.debug("getExercise: non-correct JSON")
}
case "setLang" =>
var optLang: Option[String] = (msg \\ "args" \\ "lang").asOpt[String]
(optLang.getOrElse(None)) match {
case lang: String =>
currentPreferredLang = Lang(lang)
plm.setLang(currentPreferredLang)
savePreferredLang()
case _ =>
Logger.debug("getExercise: non-correct JSON")
}
case "getExercise" =>
var optLessonID: Option[String] = (msg \\ "args" \\ "lessonID").asOpt[String]
var optExerciseID: Option[String] = (msg \\ "args" \\ "exerciseID").asOpt[String]
var lecture: Lecture = null;
var executionSpy: ExecutionSpy = new ExecutionSpy(this, "operations")
var demoExecutionSpy: ExecutionSpy = new ExecutionSpy(this, "demoOperations")
(optLessonID.getOrElse(None), optExerciseID.getOrElse(None)) match {
case (lessonID:String, exerciseID: String) =>
lecture = plm.switchExercise(lessonID, exerciseID, executionSpy, demoExecutionSpy)
case (lessonID:String, _) =>
lecture = plm.switchLesson(lessonID, executionSpy, demoExecutionSpy)
case (_, _) =>
Logger.debug("getExercise: non-correct JSON")
}
if(lecture != null) {
sendMessage("exercise", Json.obj(
"exercise" -> LectureToJson.lectureWrites(lecture, plm.programmingLanguage, plm.getStudentCode, plm.getInitialWorlds, plm.getSelectedWorldID)
))
}
case "runExercise" =>
var optLessonID: Option[String] = (msg \\ "args" \\ "lessonID").asOpt[String]
var optExerciseID: Option[String] = (msg \\ "args" \\ "exerciseID").asOpt[String]
var optCode: Option[String] = (msg \\ "args" \\ "code").asOpt[String]
(optLessonID.getOrElse(None), optExerciseID.getOrElse(None), optCode.getOrElse(None)) match {
case (lessonID:String, exerciseID: String, code: String) =>
plm.runExercise(lessonID, exerciseID, code)
case (_, _, _) =>
Logger.debug("runExercise: non-correctJSON")
}
case "runDemo" =>
var optLessonID: Option[String] = (msg \\ "args" \\ "lessonID").asOpt[String]
var optExerciseID: Option[String] = (msg \\ "args" \\ "exerciseID").asOpt[String]
(optLessonID.getOrElse(None), optExerciseID.getOrElse(None)) match {
case (lessonID:String, exerciseID: String) =>
plm.runDemo(lessonID, exerciseID)
case (_, _) =>
Logger.debug("runDemo: non-correctJSON")
}
case "stopExecution" =>
plm.stopExecution
case "revertExercise" =>
var lecture = plm.revertExercise
sendMessage("exercise", Json.obj(
"exercise" -> LectureToJson.lectureWrites(lecture, plm.programmingLanguage, plm.getStudentCode, plm.getInitialWorlds, plm.getSelectedWorldID)
))
case "getExercises" =>
if(plm.currentExercise != null) {
var lectures = plm.game.getCurrentLesson.getRootLectures.toArray(Array[Lecture]())
sendMessage("exercises", Json.obj(
"exercises" -> ExerciseToJson.exercisesWrite(lectures)
))
}
case "getLangs" =>
sendMessage("langs", Json.obj(
"selected" -> LangToJson.langWrite(currentPreferredLang),
"availables" -> LangToJson.langsWrite(availableLangs)
))
case _ =>
Logger.debug("cmd: non-correct JSON")
}
}
def createMessage(cmdName: String, mapArgs: JsValue): JsValue = {
return Json.obj(
"cmd" -> cmdName,
"args" -> mapArgs
)
}
def sendMessage(cmdName: String, mapArgs: JsValue) {
out ! createMessage(cmdName, mapArgs)
}
def setCurrentUser(newUser: User) {
currentUser = newUser
sendMessage("user", Json.obj(
"user" -> currentUser
)
)
setCurrentGitID(currentUser.gitID.toString, false)
}
def clearCurrentUser() {
currentUser = null
sendMessage("user", Json.obj())
currentGitID = UUID.randomUUID.toString
setCurrentGitID(currentGitID, true)
}
def setCurrentGitID(newGitID: String, toSend: Boolean) {
currentGitID = newGitID;
if(toSend) {
sendMessage("gitID", Json.obj(
"gitID" -> currentGitID
)
)
}
}
def initSpies() {
resultSpy = new ExecutionResultListener(this, plm.game)
plm.game.addGameStateListener(resultSpy)
progLangSpy = new ProgLangListener(this, plm)
plm.game.addProgLangListener(progLangSpy, true)
humanLangSpy = new HumanLangListener(this, plm)
plm.game.addHumanLangListener(humanLangSpy, true)
registeredSpies = List()
}
def registerActor() {
ActorsMap.add(actorUUID, self)
sendMessage("actorUUID", Json.obj(
"actorUUID" -> actorUUID
)
)
}
def registerSpy(spy: ExecutionSpy) {
registeredSpies = registeredSpies ::: List(spy)
}
def saveLastProgLang(programmingLanguage: String) {
if(currentUser != null) {
currentUser = currentUser.copy(
lastProgLang = Some(programmingLanguage)
)
UserDAOMongoImpl.save(currentUser)
}
}
def savePreferredLang() {
if(currentUser != null) {
currentUser = currentUser.copy(
preferredLang = Some(currentPreferredLang)
)
UserDAOMongoImpl.save(currentUser)
}
}
override def postStop() = {
Logger.debug("postStop: websocket closed - removing the spies")
ActorsMap.remove(actorUUID)
plm.game.removeGameStateListener(resultSpy)
plm.game.removeProgLangListener(progLangSpy)
registeredSpies.foreach { spy => spy.unregister }
}
} | BaptisteMounier/webPLM | app/actors/PLMActor.scala | Scala | agpl-3.0 | 9,385 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive
import java.net.URL
import org.apache.spark.SparkFunSuite
/**
* Verify that some classes load and that others are not found on the classpath.
*
* This is used to detect classpath and shading conflicts.
*/
class ClasspathDependenciesSuite extends SparkFunSuite {
private val classloader = this.getClass.getClassLoader
private def assertLoads(classname: String): Unit = {
val resourceURL: URL = Option(findResource(classname)).getOrElse {
fail(s"Class $classname not found as ${resourceName(classname)}")
}
logInfo(s"Class $classname at $resourceURL")
classloader.loadClass(classname)
}
private def findResource(classname: String): URL = {
val resource = resourceName(classname)
classloader.getResource(resource)
}
private def resourceName(classname: String): String = {
classname.replace(".", "/") + ".class"
}
private def assertClassNotFound(classname: String): Unit = {
Option(findResource(classname)).foreach { resourceURL =>
fail(s"Class $classname found at $resourceURL")
}
intercept[ClassNotFoundException] {
classloader.loadClass(classname)
}
}
test("shaded Protobuf") {
if (HiveUtils.isHive23) {
assertLoads("com.google.protobuf.ServiceException")
} else {
assertLoads("org.apache.hive.com.google.protobuf.ServiceException")
}
}
test("shaded Kryo") {
if (HiveUtils.isHive23) {
assertLoads("com.esotericsoftware.kryo.Kryo")
} else {
assertLoads("org.apache.hive.com.esotericsoftware.kryo.Kryo")
}
}
test("hive-common") {
assertLoads("org.apache.hadoop.hive.conf.HiveConf")
}
test("hive-exec") {
assertLoads("org.apache.hadoop.hive.ql.CommandNeedRetryException")
}
private val STD_INSTANTIATOR = "org.objenesis.strategy.StdInstantiatorStrategy"
test("Forbidden Dependencies") {
assertClassNotFound("com.esotericsoftware.shaded." + STD_INSTANTIATOR)
assertClassNotFound("org.apache.hive.com.esotericsoftware.shaded." + STD_INSTANTIATOR)
}
test("parquet-hadoop-bundle") {
if (HiveUtils.isHive23) {
assertLoads("org.apache.parquet.hadoop.ParquetOutputFormat")
assertLoads("org.apache.parquet.hadoop.ParquetInputFormat")
} else {
assertLoads("parquet.hadoop.ParquetOutputFormat")
assertLoads("parquet.hadoop.ParquetInputFormat")
}
}
}
| pgandhi999/spark | sql/hive/src/test/scala/org/apache/spark/sql/hive/ClasspathDependenciesSuite.scala | Scala | apache-2.0 | 3,198 |
package katas.scala.doors
import org.junit.Test
import org.scalatest.Matchers
class Doors13 extends Matchers {
@Test def `walking doors`() {
val amount = 10
val steps = Range.inclusive(1, amount).flatMap { stepSize =>
Range(stepSize - 1, amount, stepSize)
}
val doors = steps.groupBy{ it => it }.toList.sortBy(_._1).map{ case (_, seq) => seq.size % 2 == 1}
doors should equal(Seq(true, false, false, true, false, false, false, false, true, false))
}
} | dkandalov/katas | scala/src/katas/scala/doors/Doors13.scala | Scala | unlicense | 470 |
/* Copyright 2009-2021 EPFL, Lausanne */
package stainless
package extraction
package methods
trait Trees extends throwing.Trees { self =>
override protected def unapplyScrut(scrut: Expr, up: UnapplyPattern)(using s: Symbols): Expr =
if (s.lookupFunction(up.id).exists(_.flags.exists { case IsMethodOf(_) => true case _ => false }) && up.recs.size == 1) {
MethodInvocation(up.recs.head, up.id, up.tps, Seq(scrut))
} else {
super.unapplyScrut(scrut, up)
}
override protected def unapplyAccessor(unapplied: Expr, id: Identifier, up: UnapplyPattern)(using s: Symbols): Expr =
if (s.lookupFunction(id).exists(_.flags.exists { case IsMethodOf(_) => true case _ => false })) {
MethodInvocation(unapplied, id, Seq(), Seq())
} else {
super.unapplyAccessor(unapplied, id, up)
}
/** $encodingof `this` */
case class This(ct: ClassType) extends Expr with Terminal {
def getType(using Symbols): Type = ct.getType
}
/** $encodingof `super` */
case class Super(ct: ClassType) extends Expr with Terminal {
def getType(using Symbols): Type = ct.getType
}
/** $encodingof `receiver.id[tps](args)` */
case class MethodInvocation(receiver: Expr, id: Identifier, tps: Seq[Type], args: Seq[Expr]) extends Expr with CachingTyped {
protected def computeType(using s: Symbols): Type = widenTypeParameter(receiver) match {
case ct: ClassType =>
val optTfd = s.lookupFunction(id)
.filter(fd => tps.size == fd.tparams.size && args.size == fd.params.size)
.map(_.typed(tps))
val optTcd = s.lookupClass(ct.id)
.filter(cd => ct.tps.size == cd.tparams.size)
.map(_.typed(ct.tps))
(optTfd zip optTcd).headOption.flatMap { case (tfd, ctcd) =>
tfd.fd.flags.collectFirst { case IsMethodOf(cid) => cid }
.flatMap(cid => (ctcd +: ctcd.ancestors).find(_.id == cid))
.map { tcd =>
val tpSubst = tcd.tpSubst ++ tfd.tpSubst
val it = new InstantiateThis(ctcd.toType)
val instParams = tfd.fd.params.map { vd =>
it.transform(typeOps.instantiateType(vd.getType, tpSubst))
}
val fdTpe = it.transform(typeOps.instantiateType(tfd.fd.getType, tpSubst))
checkParamTypes(args, instParams, fdTpe)
}
}.getOrElse(Untyped)
case _ => Untyped
}
}
private[this] class InstantiateThis(override val s: self.type,
override val t: self.type,
thisType: ClassType) extends oo.ConcreteTreeTransformer(s, t) {
def this(thisType: ClassType) = this(self, self, thisType)
override def transform(tpe: Type): Type = tpe match {
case TypeSelect(Some(This(_)), sel) =>
super.transform(TypeSelect(Some(This(thisType)), sel))
case _ => super.transform(tpe)
}
}
type Symbols >: Null <: MethodsAbstractSymbols
trait MethodsAbstractSymbols
extends ImperativeAbstractSymbols
with methods.DependencyGraph
with methods.TypeOps { self0: Symbols =>
// The only value that can be assigned to `trees`, but that has to be
// done in a concrete class explicitly overriding `trees`
// Otherwise, we can run into initialization issue.
protected val trees: self.type
// More or less the same here
protected val symbols: this.type
import symbols.given
override protected def ensureWellFormedFunction(fd: FunDef): Unit = {
val res = fd.getClassDef.fold(fd) { cd =>
val it = new InstantiateThis(cd.typed.toType)
it.transform(fd)
}
super.ensureWellFormedFunction(res)
}
override protected def ensureWellFormedClass(cd: ClassDef) = {
super.ensureWellFormedClass(cd)
val methods = cd.methods.map(getFunction)
val fds = methods.filter(fd => !fd.getFieldDefPosition.isEmpty && fd.isField(isLazy = false))
val fids = fds.map(_.id).toSet
for (fd <- fds) {
val fid = fd.id
val position = fd.getFieldDefPosition.get
exprOps.fold[Unit] {
// allow access to fields defined previously
case (MethodInvocation(This(_), xid, _, Seq()), _)
if fids(xid) && lookupFunction(xid).forall {
fd => (fd.isAccessor || fd.isField) && fd.getFieldDefPosition.forall(_ < position)
} =>
// allways allow access to constructor params
case (ClassSelector(This(_), xid), _) =>
// check that methods and functions don't access fields defined previously
case (MethodInvocation(rec, xid, _, _), subs) =>
val _ = subs.toList // force visit to children
for (tid <- ((transitiveCallees(xid) + xid) & fids).find(tid => getFunction(tid).getFieldDefPosition.exists(_ >= position)))
throw NotWellFormedException(fd,
Some(s"field `$fid` can only refer to previous fields, not to `$tid`")
)
case (FunctionInvocation(xid, _, _), subs) =>
val _ = subs.toList // force visit to children
for (tid <- (transitiveCallees(xid) & fids).find(tid => getFunction(tid).getFieldDefPosition.exists(_ >= position)))
throw NotWellFormedException(fd,
Some(s"field `$fid` can only refer to previous fields, not to `$tid`")
)
case (_, subs) =>
val _ = subs.toList // force visit to children
()
}(fd.fullBody)
}
// Check that abstract methods are overridden by a method, a lazy val, or a constructor parameter (but not by a val)
if (!cd.isAbstract) {
val remainingAbstract = (cd +: cd.ancestors.map(_.cd)).reverse.foldLeft(Set.empty[Symbol]) {
case (abstractSymbols, acd) =>
val concreteSymbols = acd.methods
.map(id => id.symbol -> getFunction(id))
.filter { case (_, fd) => !fd.isAbstract }
// fd.getFieldDefPosition is empty for lazy val's, non-empty for val's
.filter { case (_, fd) =>
(!fd.isAccessor || fd.isAccessorOfParam(acd)) && fd.getFieldDefPosition.isEmpty
}
.map(_._1).toSet
val newAbstractSymbols = acd.methods.filter(id => getFunction(id).isAbstract).map(_.symbol).toSet
abstractSymbols -- concreteSymbols ++ newAbstractSymbols
}
if (remainingAbstract.nonEmpty) {
throw NotWellFormedException(cd,
Some("abstract methods " + remainingAbstract.map(_.name).toList.sorted.mkString(", ") + " were not overridden by a method, a lazy val, or a constructor parameter"))
}
}
val ancestors = cd.ancestors(using this).map(cd => cd.id -> cd).toMap
// Check that type members overrides are well-typed
cd.typeMembers.foreach { id =>
firstSuperTypeMember(id).foreach { sid =>
val td = getTypeDef(id)
val std = getTypeDef(sid)
val cid = std.flags
.collectFirst { case IsTypeMemberOf(cid) => cid }
.getOrElse(throw NotWellFormedException(std, Some(s"must be a type member")))
if (!(ancestors contains cid))
throw NotWellFormedException(std, Some(s"first super is not a method of an ancestor"))
val acd = ancestors(cid)
if (td.isAbstract && !std.isAbstract)
throw NotWellFormedException(td, Some(s"cannot override concrete type with abstract type."))
if (std.isFinal)
throw NotWellFormedException(td, Some(s"cannot override final type: $std"))
if (td.tparams.size != std.tparams.size)
throw NotWellFormedException(td, Some(s"type parameters length are not equal"))
}
}
// Check that method overrides are well-typed
cd.methods.foreach { id =>
firstSuperMethod(id).foreach { sid =>
val sfd = getFunction(sid)
val cid = sfd.flags
.collectFirst { case IsMethodOf(cid) => cid }
.getOrElse(throw NotWellFormedException(sfd, Some(s"must be a method")))
if (!(ancestors contains cid))
throw NotWellFormedException(sfd, Some(s"first super is not a method of an ancestor"))
val acd = ancestors(cid)
val fd = getFunction(id)
if (fd.isAbstract && !sfd.isAbstract)
throw NotWellFormedException(fd, Some(s"cannot override concrete function with abstract function."))
if (sfd.isFinal)
throw NotWellFormedException(fd, Some(s"cannot override final function:\\n$sfd"))
if (fd.tparams.size != sfd.tparams.size)
throw NotWellFormedException(fd, Some(s"type parameters length are not equal"))
val it = new InstantiateThis(cd.typed.toType)
val tpSubst = (fd.typeArgs zip sfd.typeArgs).toMap
(fd.typeArgs zip sfd.typeArgs).foreach { case (tp, stp) =>
val TypeBounds(lo, hi, _) = tp.bounds
val TypeBounds(slo, shi, _) = stp.bounds
if (!isSubtypeOf(
it.transform(typeOps.instantiateType(lo, tpSubst)),
it.transform(typeOps.instantiateType(slo, acd.tpSubst)))) {
throw NotWellFormedException(fd, Some(s"TODO"))
}
if (!isSubtypeOf(
it.transform(typeOps.instantiateType(shi, acd.tpSubst)),
it.transform(typeOps.instantiateType(hi, tpSubst)))) {
throw NotWellFormedException(fd, Some("TODO"))
}
}
if (fd.params.size != sfd.params.size)
throw NotWellFormedException(fd, Some("Method override does not have the same number of parameters as parent"))
(fd.params zip sfd.params).foreach { case (vd, svd) =>
val aTpe = it.transform(typeOps.instantiateType(svd.getType, acd.tpSubst))
val tpe = it.transform(typeOps.instantiateType(vd.getType, tpSubst))
if (!isSubtypeOf(aTpe, tpe))
throw NotWellFormedException(fd, Some(s"Parameter ${vd.id} of type $tpe is not a subtype of ancestor $aTpe"))
}
val t1 = it.transform(typeOps.instantiateType(fd.getType, tpSubst))
val t2 = it.transform(typeOps.instantiateType(sfd.getType, acd.tpSubst))
if (!isSubtypeOf(t1.getType, t2.getType))
throw NotWellFormedException(fd, Some(s"return type ${t1} is not a subtype of ${t2}"))
}
}
}
}
case class IsAccessor(id: Option[Identifier]) extends Flag("accessor", id.toSeq)
case class IsMethodOf(id: Identifier) extends Flag("method", Seq(id))
case object ValueClass extends Flag("valueClass", Seq.empty)
case class FieldDefPosition(i: Int) extends Flag("fieldDefPosition", Seq(i))
implicit class ClassDefWrapper(cd: ClassDef) {
def isSealed: Boolean = cd.flags contains IsSealed
def isAbstract: Boolean = cd.flags contains IsAbstract
def isLibrary: Boolean = cd.flags contains Library
def isGhost: Boolean = cd.flags contains Ghost
def isValueClass: Boolean = cd.flags contains ValueClass
def methods(using s: Symbols): Seq[SymbolIdentifier] = {
s.functions.values
.filter(_.flags contains IsMethodOf(cd.id))
.map(_.id.asInstanceOf[SymbolIdentifier]).toSeq
}
def invariant(using s: Symbols): Option[FunDef] = {
methods map s.functions find (_.flags contains IsInvariant)
}
}
extension (fd: FunDef) {
def isMethod: Boolean =
fd.flags exists { case IsMethodOf(_) => true case _ => false }
def isGhost: Boolean = fd.flags contains Ghost
def isSynthetic: Boolean = fd.flags contains Synthetic
def getClassId: Option[Identifier] =
fd.flags collectFirst { case IsMethodOf(id) => id }
def getFieldDefPosition: Option[Int] =
fd.flags collectFirst { case FieldDefPosition(i) => i }
def getClassDef(using s: Symbols): Option[ClassDef] =
getClassId flatMap s.lookupClass
def isAccessor: Boolean =
fd.flags exists { case IsAccessor(_) => true case _ => false }
def isAccessorOfParam(cd: ClassDef)(using Symbols): Boolean =
fd.flags exists {
case IsAccessor(Some(id)) => cd.fields.map(_.id).contains(id)
case _ => false
}
def isAccessor(id: Identifier): Boolean =
fd.flags exists { case IsAccessor(Some(id2)) if id == id2 => true case _ => false }
def isField: Boolean =
fd.flags exists { case IsField(_) => true case _ => false }
def isField(isLazy: Boolean): Boolean =
fd.flags exists { case IsField(`isLazy`) => true case _ => false }
def isSetter: Boolean = isAccessor && fd.id.name.endsWith("_=") && fd.params.size == 1
def isGetter: Boolean = isAccessor && fd.params.isEmpty
def isFinal: Boolean = fd.flags contains Final
def isAbstract(using Symbols): Boolean = {
(fd.flags contains IsAbstract) ||
(!isExtern && !hasBody && !isSynthetic && fd.getClassDef.forall(_.isAbstract))
}
def hasBody: Boolean = exprOps.BodyWithSpecs(fd.fullBody).hasBody
def isInvariant: Boolean = fd.flags contains IsInvariant
def isExtern: Boolean = fd.flags contains Extern
def isLaw: Boolean = fd.flags contains Law
def isLibrary: Boolean = fd.flags contains Library
}
override def getDeconstructor(that: inox.ast.Trees): inox.ast.TreeDeconstructor { val s: self.type; val t: that.type } = that match {
case tree: (Trees & that.type) => // The `& that.type` trick allows to convince scala that `tree` and `that` are actually equal...
class DeconstructorImpl(override val s: self.type, override val t: tree.type & that.type) extends ConcreteTreeDeconstructor(s, t)
new DeconstructorImpl(self, tree)
case _ => super.getDeconstructor(that)
}
override val exprOps: ExprOps { val trees: self.type } = {
class ExprOpsImpl(override val trees: self.type) extends ExprOps(trees)
new ExprOpsImpl(self)
}
}
class ExprOps(override val trees: Trees) extends throwing.ExprOps(trees) {
import trees._
/** Freshen the type parameters, fields, methods and type members of the given [[ClassDef]]. */
def freshenClass(enclosingFn: FunDef, cd: ClassDef, methods: Seq[FunDef], typeMembers: Seq[TypeDef]): (FunDef, ClassDef, Seq[FunDef], Seq[TypeDef]) = {
val typeArgs = freshenTypeParams(cd.typeArgs)
val tpSubst = (cd.typeArgs zip typeArgs).toMap
val (fieldSubst, fields) = cd.fields
.map(vd => vd.copy(tpe = typeOps.instantiateType(vd.tpe, tpSubst)))
.foldLeft((Map[Identifier, ValDef](), Seq[ValDef]())) { case ((paramSubst, params), vd) =>
val nvd = ValDef(vd.id.freshen, vd.tpe, vd.flags).copiedFrom(vd)
(paramSubst + (vd.id -> nvd), params :+ nvd)
}
val freshener = new TypeFreshener(tpSubst) {
override def transform(e: Expr): Expr = e match {
case ClassSelector(rec, id) if fieldSubst contains id =>
ClassSelector(transform(rec), fieldSubst(id).id).copiedFrom(e)
case _ => super.transform(e)
}
}
val freshCd = new ClassDef(
cd.id,
typeArgs.map(TypeParameterDef(_)),
cd.parents.map(ct => typeOps.instantiateType(ct, tpSubst).asInstanceOf[ClassType]),
fields,
cd.flags
).copiedFrom(cd)
val freshMethods = methods map { fd =>
val oldFldId = fd.flags.collectFirst {
case IsAccessor(Some(fldId)) if cd.fields.exists(_.id == fldId) => fldId
}
val newFd = oldFldId match {
case Some(oldFldId) =>
val fldNewId = fieldSubst(oldFldId).id
val newFlags = fd.flags.filter {
case IsAccessor(Some(`oldFldId`)) => false
case _ => true
} :+ IsAccessor(Some(fldNewId))
val newFdId =
if (fd.id == oldFldId) fldNewId
else fd.id
fd.copy(id = newFdId, flags = newFlags)
case None =>
fd
}
freshenSignature(freshener.transform(newFd))
}
val freshTypeMembers = typeMembers map (freshener.transform(_))
(freshener.transform(enclosingFn), freshCd, freshMethods, freshTypeMembers)
}
}
trait Printer extends throwing.Printer {
protected val trees: Trees
import trees._
override def ppBody(tree: Tree)(using ctx: PrinterContext): Unit = tree match {
case cd: ClassDef =>
super.ppBody(cd)
ctx.opts.symbols.foreach { case (given Symbols) =>
if (cd.methods.nonEmpty || cd.typeMembers.nonEmpty) {
p""" {
| ${typeDefs(cd.typeMembers)}
|
| ${functions(cd.methods)}
|}"""
}
}
case MethodInvocation(caller, id, tps, args) =>
p"$caller.$id${nary(tps, ", ", "[", "]")}"
if (args.nonEmpty) {
// TODO: handle implicit arguments and/or default values
p"($args)"
}
case This(_) => p"this"
case Super(_) => p"super"
case _ => super.ppBody(tree)
}
override protected def requiresParentheses(ex: Tree, within: Option[Tree]): Boolean = (ex, within) match {
case (_, Some(MethodInvocation(_, _, _, args))) => !args.contains(ex)
case _ => super.requiresParentheses(ex, within)
}
}
trait TreeDeconstructor extends throwing.TreeDeconstructor {
protected val s: Trees
protected val t: Trees
override def deconstruct(e: s.Expr): Deconstructed[t.Expr] = e match {
case s.MethodInvocation(rec, id, tps, args) =>
(Seq(id), Seq(), rec +: args, tps, Seq(), (ids, _, es, tps, _) => t.MethodInvocation(es(0), ids.head, tps, es.tail))
case s.This(ct) =>
(Seq(), Seq(), Seq(), Seq(ct), Seq(), (_, _, _, tps, _) => t.This(tps.head.asInstanceOf[t.ClassType]))
case s.Super(ct) =>
(Seq(), Seq(), Seq(), Seq(ct), Seq(), (_, _, _, tps, _) => t.Super(tps.head.asInstanceOf[t.ClassType]))
case _ => super.deconstruct(e)
}
override def deconstruct(f: s.Flag): DeconstructedFlag = f match {
case s.IsMethodOf(id) => (Seq(id), Seq(), Seq(), (ids, _, _) => t.IsMethodOf(ids.head))
case s.IsAccessor(id) => (id.toSeq, Seq(), Seq(), (ids, _, _) => t.IsAccessor(ids.headOption))
case s.ValueClass => (Seq(), Seq(), Seq(), (_, _, _) => t.ValueClass)
case s.FieldDefPosition(i) => (Seq(), Seq(), Seq(), (_, _, _) => t.FieldDefPosition(i))
case _ => super.deconstruct(f)
}
}
class ConcreteTreeDeconstructor(override val s: Trees, override val t: Trees) extends TreeDeconstructor | epfl-lara/stainless | core/src/main/scala/stainless/extraction/methods/Trees.scala | Scala | apache-2.0 | 18,491 |
sealed class Foo
class Test {
def f = {
class Bar extends Foo
}
class C {
class Bar extends Foo
}
object O {
class Bar extends Foo
}
}
| som-snytt/dotty | tests/pos/i3149.scala | Scala | apache-2.0 | 161 |
/*
* Copyright 2015 - 2017 Pablo Alcaraz
* Mysql to Postgres converter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.mysql2postgresql.converter.writer
import java.nio.file.Path
import com.github.mysql2postgresql.converter.GeneratedStatements
import com.github.mysql2postgresql.converter.conversion.ConversionError
import org.springframework.context.annotation.Profile
import org.springframework.stereotype.Component
/**
* a WriterStep used for testing.
*/
@Component
@Profile(Array("test"))
class TestWriterStep extends WriterStep {
val sbHeader = new StringBuilder
val sbStatements = new StringBuilder
val sbFooter = new StringBuilder
var errors = IndexedSeq[ConversionError]()
var beforeCalled = false
/**
* process generatedStatements to be written as SQL sentences.
*
* @param generatedStatements
*/
override def handle(generatedStatements: GeneratedStatements): Unit = {
if (!beforeCalled) {
throw new RuntimeException("WriterStep.before() was not called. Fix it please.")
}
generatedStatements.header.values.foreach(
statement => {
sbHeader.append(statement)
sbHeader.append(";\n")
}
)
sbStatements.append(generatedStatements.statement.toString + ";")
generatedStatements.postStatement.foreach(
statement => {
sbStatements.append(statement)
}
)
sbStatements.append("\n")
generatedStatements.footer.foreach(
statement => {
sbFooter.append(statement)
sbFooter.append(";\n")
}
)
}
/**
* Called after all processed statements.
*/
override def after(errors: IndexedSeq[ConversionError]): Unit = {
this.errors = errors
}
/**
* Called before processing converted statements.
*
* @param outputPath
*/
override def before(outputPath: Path): Unit = {
sbHeader.clear()
sbStatements.clear()
sbFooter.clear()
errors = IndexedSeq[ConversionError]()
beforeCalled = true
}
/**
* Called when conversion process is finished (it could finish
* because an error). Good to close files, release resources, etc.
*/
override def close(): Unit = {
// empty
}
}
| pabloa/mysql2postgresql | src/test/scala/com/github/mysql2postgresql/converter/writer/TestWriterStep.scala | Scala | apache-2.0 | 2,763 |
package suggestions
package gui
import scala.collection.mutable.ListBuffer
import scala.collection.JavaConverters._
import scala.concurrent._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.swing._
import scala.util.{ Try, Success, Failure }
import scala.swing.event._
import swing.Swing._
import javax.swing.UIManager
import Orientation._
import rx.subscriptions.CompositeSubscription
import rx.lang.scala.Observable
import rx.lang.scala.Subscription
import observablex._
import search._
import rx.lang.scala.subscriptions.Subscription
object WikipediaSuggest extends SimpleSwingApplication with ConcreteSwingApi with ConcreteWikipediaApi {
{
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName())
} catch {
case t: Throwable =>
}
}
def top = new MainFrame {
/* gui setup */
title = "Query Wikipedia"
minimumSize = new Dimension(900, 600)
val button = new Button("Get") {
icon = new javax.swing.ImageIcon(javax.imageio.ImageIO.read(this.getClass.getResourceAsStream("/suggestions/wiki-icon.png")))
}
val searchTermField = new TextField
val suggestionList = new ListView(ListBuffer[String]())
val status = new Label(" ")
val editorpane = new EditorPane {
import javax.swing.border._
border = new EtchedBorder(EtchedBorder.LOWERED)
editable = false
peer.setContentType("text/html")
}
contents = new BoxPanel(orientation = Vertical) {
border = EmptyBorder(top = 5, left = 5, bottom = 5, right = 5)
contents += new BoxPanel(orientation = Horizontal) {
contents += new BoxPanel(orientation = Vertical) {
maximumSize = new Dimension(240, 900)
border = EmptyBorder(top = 10, left = 10, bottom = 10, right = 10)
contents += new BoxPanel(orientation = Horizontal) {
maximumSize = new Dimension(640, 30)
border = EmptyBorder(top = 5, left = 0, bottom = 5, right = 0)
contents += searchTermField
}
contents += new ScrollPane(suggestionList)
contents += new BorderPanel {
maximumSize = new Dimension(640, 30)
add(button, BorderPanel.Position.Center)
}
}
contents += new ScrollPane(editorpane)
}
contents += status
}
val eventScheduler = SchedulerEx.SwingEventThreadScheduler
/**
* Observables
* You may find the following methods useful when manipulating GUI elements:
* `myListView.listData = aList` : sets the content of `myListView` to `aList`
* `myTextField.text = "react"` : sets the content of `myTextField` to "react"
* `myListView.selection.items` returns a list of selected items from `myListView`
* `myEditorPane.text = "act"` : sets the content of `myEditorPane` to "act"
*/
// TO IMPLEMENT
val searchTerms: Observable[String] = searchTermField.textValues
// TO IMPLEMENT
val suggestions: Observable[Try[List[String]]] = searchTerms.concatRecovered(wikiSuggestResponseStream(_).timedOut(1))
// TO IMPLEMENT
val suggestionSubscription: Subscription = suggestions.observeOn(eventScheduler) subscribe {
x =>
x match {
case Success(lst) => suggestionList.listData = lst
case Failure(t) => status.text = t.getMessage
}
}
// TO IMPLEMENT
val selections: Observable[String] = button.clicks.map({
button => Observable(suggestionList.selection.items: _*)
}).concat
// TO IMPLEMENT
val pages: Observable[Try[String]] = selections.sanitized.concatRecovered {
x => ObservableEx(wikipediaPage(x)).timedOut(2)
}
// TO IMPLEMENT
val pageSubscription: Subscription = pages.observeOn(eventScheduler) subscribe {
_ match {
case Success(str) => editorpane.text = str
case Failure(t) => status.text = t.getMessage
}
}
}
}
trait ConcreteWikipediaApi extends WikipediaApi {
def wikipediaSuggestion(term: String) = Search.wikipediaSuggestion(term)
def wikipediaPage(term: String) = Search.wikipediaPage(term)
}
trait ConcreteSwingApi extends SwingApi {
type ValueChanged = scala.swing.event.ValueChanged
object ValueChanged {
def unapply(x: Event) = x match {
case vc: ValueChanged => Some(vc.source.asInstanceOf[TextField])
case _ => None
}
}
type ButtonClicked = scala.swing.event.ButtonClicked
object ButtonClicked {
def unapply(x: Event) = x match {
case bc: ButtonClicked => Some(bc.source.asInstanceOf[Button])
case _ => None
}
}
type TextField = scala.swing.TextField
type Button = scala.swing.Button
}
| 4DD8A19D69F5324F9D49D17EF78BBBCC/Princip_les_of_Rea_ctive_Progra_mming | suggestions/src/main/scala/suggestions/gui/WikipediaSuggest.scala | Scala | mit | 4,699 |
object CopyInv {
case class Foo(x: BigInt) {
require(x > 0)
}
def prop(foo: Foo, y: BigInt) = {
require(y > 1)
foo.copy(x = y)
}
}
| epfl-lara/stainless | frontends/benchmarks/verification/valid/MicroTests/CopyInv.scala | Scala | apache-2.0 | 155 |
package org.refptr.iscala
package widgets
sealed trait BorderStyle extends EnumType with SnakeCase
@enum object BorderStyle extends Enumerated[BorderStyle] {
case object None extends BorderStyle
case object Hidden extends BorderStyle
case object Dotted extends BorderStyle
case object Dashed extends BorderStyle
case object Solid extends BorderStyle
case object Double extends BorderStyle
case object Groove extends BorderStyle
case object Ridge extends BorderStyle
case object Inset extends BorderStyle
case object Outset extends BorderStyle
case object Initial extends BorderStyle
case object Inherit extends BorderStyle
}
sealed trait FontStyle extends EnumType with SnakeCase
@enum object FontStyle extends Enumerated[FontStyle] {
case object Normal extends FontStyle
case object Italic extends FontStyle
case object Oblique extends FontStyle
case object Initial extends FontStyle
case object Inherit extends FontStyle
}
sealed trait FontWeight extends EnumType with SnakeCase
@enum object FontWeight extends Enumerated[FontWeight] {
case object Normal extends FontWeight
case object Bold extends FontWeight
case object Bolder extends FontWeight
case object Lighter extends FontWeight
case object Initial extends FontWeight
case object Inherit extends FontWeight
case object `100` extends FontWeight
case object `200` extends FontWeight
case object `300` extends FontWeight
case object `400` extends FontWeight
case object `500` extends FontWeight
case object `600` extends FontWeight
case object `700` extends FontWeight
case object `800` extends FontWeight
case object `900` extends FontWeight
}
sealed trait ButtonStyle extends EnumType with SnakeCase
@enum object ButtonStyle extends Enumerated[ButtonStyle] {
case object Primary extends ButtonStyle
case object Success extends ButtonStyle
case object Info extends ButtonStyle
case object Warning extends ButtonStyle
case object Danger extends ButtonStyle
}
sealed trait BoxStyle extends EnumType with SnakeCase
@enum object BoxStyle extends Enumerated[BoxStyle] {
case object Success extends BoxStyle
case object Info extends BoxStyle
case object Warning extends BoxStyle
case object Danger extends BoxStyle
}
sealed trait Orientation extends EnumType with SnakeCase
@enum object Orientation extends Enumerated[Orientation] {
case object Horizontal extends Orientation
case object Vertical extends Orientation
}
sealed trait Location extends EnumType with SnakeCase
@enum object Location extends Enumerated[Location] {
case object Start extends Location
case object Center extends Location
case object End extends Location
case object Baseline extends Location
case object Stretch extends Location
}
sealed trait Overflow extends EnumType with SnakeCase
@enum object Overflow extends Enumerated[Overflow] {
case object Visible extends Overflow
case object Hidden extends Overflow
case object Scroll extends Overflow
case object Auto extends Overflow
case object Initial extends Overflow
case object Inherit extends Overflow
}
| nkhuyu/IScala | lib/src/main/scala/Enums.scala | Scala | mit | 3,209 |
package screact.swing
import scutil.lang.*
import scutil.gui.SwingUtil.*
import screact.*
import screact.extra.Blocker
/** used to connect Swing widgets to the reactive world */
object SwingWidget {
/** simply emit events from some Connectable */
def events[T](connect:Effect[T]=>Disposer):Events[T] = {
require(insideEdt, "SwingWidget events may not be constructed outside the EDT")
val events = new SourceEvents[T]
// BETTER call this at some time
val disposer = connect(events.emit)
val _ = disposer
events
}
/** Signal values by some getter, changing on events from some Connectable */
def signal[T,X](connect:Effect[X]=>Disposer, getter:Thunk[T]):Signal[T] =
events(connect) tag getter() hold getter()
/**
wraps a swing component to take an input Signal and mit change Events.
the input signal determines the state of the component.
change events are only fired on user interaction, but not on changes
of the input signal.
*/
def transformer[S,T,X](input:Signal[S], connect:Effect[X]=>Disposer, getter:Thunk[T], setter:Effect[S])(using ob:Observing):Events[T] = {
require(insideEdt, "SwingWidget transformer may not be constructed outside the EDT")
val blocker = new Blocker
val events = new WidgetEvents[T]
input observeNow { it =>
blocker exclusive {
if (getter() != it) {
setter(it)
}
}
}
// BETTER call this at some time
val disposer = connect { _ =>
blocker attempt {
events emit getter()
}
}
val _ = disposer
events
}
//------------------------------------------------------------------------------
/** in contrast to SourceEvents, this allows multiple calls to emit within the same cycle. the last emit wins. */
private final class WidgetEvents[T] extends Events[T] { outer =>
var msg:Option[T] = None
var delayed:Option[T] = None
def emit(value:T):Unit = {
val first = delayed.isEmpty
delayed = Some(value)
if (first) {
// TODO use the (Swing-)Domain to schedule
edt {
engine schedule thunk {
msg = delayed
delayed = None
Some(outer)
}
}
}
}
def calculate():Unit = {} // msg does not change in here
def reset():Unit = {
msg = None
}
// init() // not necessary, we don't have dependencies
}
}
| ritschwumm/screact | src/main/scala/screact/swing/SwingWidget.scala | Scala | bsd-2-clause | 2,279 |
package controllers
import scala.language.higherKinds
import play.api.i18n.I18nSupport
import play.api.mvc._
import controllers.sugar.Requests.{AuthRequest, AuthedProjectRequest, OreRequest}
import controllers.sugar.{Actions, Requests}
import ore.db.Model
import ore.db.access.ModelView
import ore.db.impl.OrePostgresDriver.api._
import ore.db.impl.schema.VersionTable
import ore.models.organization.Organization
import ore.models.project.{Project, Version, Visibility}
import ore.permission.Permission
import util.syntax._
import zio.IO
/**
* Represents a Secured base Controller for this application.
*/
abstract class OreBaseController(implicit val oreComponents: OreControllerComponents)
extends AbstractController(oreComponents)
with Actions
with I18nSupport {
implicit val assetsFinder: AssetsFinder = oreComponents.assetsFinder
override def notFound(implicit request: OreRequest[_]): Result = NotFound(views.html.errors.notFound())
/**
* Gets a project with the specified author and slug, or returns a notFound.
*
* @param author Project author
* @param slug Project slug
* @param request Incoming request
* @return NotFound or project
*/
def getProject(author: String, slug: String)(implicit request: OreRequest[_]): IO[Result, Model[Project]] =
projects.withSlug(author, slug).get.orElseFail(notFound)
private def versionFindFunc(versionString: String, canSeeHiden: Boolean): VersionTable => Rep[Boolean] = v => {
val versionMatches = v.versionString.toLowerCase === versionString.toLowerCase
val isVisible = if (canSeeHiden) true.bind else v.visibility === (Visibility.Public: Visibility)
versionMatches && isVisible
}
/**
* Gets a project with the specified versionString, or returns a notFound.
*
* @param project Project to get version from
* @param versionString VersionString
* @param request Incoming request
* @return NotFound or function result
*/
def getVersion(project: Model[Project], versionString: String)(
implicit request: OreRequest[_]
): IO[Result, Model[Version]] =
project
.versions(ModelView.now(Version))
.find(versionFindFunc(versionString, request.headerData.globalPerm(Permission.SeeHidden)))
.toZIOWithError(notFound)
/**
* Gets a version with the specified author, project slug and version string
* or returns a notFound.
*
* @param author Project author
* @param slug Project slug
* @param versionString VersionString
* @param request Incoming request
* @return NotFound or project
*/
def getProjectVersion(author: String, slug: String, versionString: String)(
implicit request: OreRequest[_]
): IO[Result, Model[Version]] =
for {
project <- getProject(author, slug)
version <- getVersion(project, versionString)
} yield version
def OreAction: ActionBuilder[OreRequest, AnyContent] = Action.andThen(oreAction)
/** Ensures a request is authenticated */
def Authenticated: ActionBuilder[AuthRequest, AnyContent] = Action.andThen(authAction)
/** Ensures a user's account is unlocked */
def UserLock(redirect: Call = ShowHome): ActionBuilder[AuthRequest, AnyContent] =
Authenticated.andThen(userLock(redirect))
/**
* Retrieves, processes, and adds a [[Project]] to a request.
*
* @param author Project owner
* @param slug Project slug
* @return Request with a project if found, NotFound otherwise.
*/
def ProjectAction(author: String, slug: String): ActionBuilder[Requests.ProjectRequest, AnyContent] =
OreAction.andThen(projectAction(author, slug))
/**
* Retrieves, processes, and adds a [[Project]] to a request.
*
* @param pluginId The project's unique plugin ID
* @return Request with a project if found, NotFound otherwise
*/
def ProjectAction(pluginId: String): ActionBuilder[Requests.ProjectRequest, AnyContent] =
OreAction.andThen(projectAction(pluginId))
/**
* Ensures a request is authenticated and retrieves, processes, and adds a
* [[Project]] to a request.
*
* @param author Project owner
* @param slug Project slug
* @return Authenticated request with a project if found, NotFound otherwise.
*/
def AuthedProjectAction(
author: String,
slug: String,
requireUnlock: Boolean = false
): ActionBuilder[AuthedProjectRequest, AnyContent] = {
val first = if (requireUnlock) UserLock(ShowProject(author, slug)) else Authenticated
first.andThen(authedProjectAction(author, slug))
}
/**
* Retrieves an [[Organization]] and adds it to the request.
*
* @param organization Organization to retrieve
* @return Request with organization if found, NotFound otherwise
*/
def OrganizationAction(organization: String): ActionBuilder[Requests.OrganizationRequest, AnyContent] =
OreAction.andThen(organizationAction(organization))
/**
* Ensures a request is authenticated and retrieves and adds a
* [[Organization]] to the request.
*
* @param organization Organization to retrieve
* @return Authenticated request with Organization if found, NotFound otherwise
*/
def AuthedOrganizationAction(
organization: String,
requireUnlock: Boolean = false
): ActionBuilder[Requests.AuthedOrganizationRequest, AnyContent] = {
val first = if (requireUnlock) UserLock(ShowUser(organization)) else Authenticated
first.andThen(authedOrganizationAction(organization))
}
/**
* A request that ensures that a user has permission to edit a specified
* profile.
*
* @param username User to check
* @return [[OreAction]] if has permission
*/
def UserEditAction(username: String): ActionBuilder[AuthRequest, AnyContent] =
Authenticated.andThen(userEditAction(username))
/**
* Represents an action that requires a user to reenter their password.
*
* @param username Username to verify
* @param sso Incoming SSO payload
* @param sig Incoming SSO signature
* @return None if verified, Unauthorized otherwise
*/
def VerifiedAction(
username: String,
sso: Option[String],
sig: Option[String]
): ActionBuilder[AuthRequest, AnyContent] = UserEditAction(username).andThen(verifiedAction(sso, sig))
}
| SpongePowered/Ore | orePlayCommon/app/controllers/OreBaseController.scala | Scala | mit | 6,465 |
import playground.X
def test(x: Int) = X.power(x, 2)
| dotty-staging/dotty | tests/pos-macros/i8208/Test_2.scala | Scala | apache-2.0 | 53 |
/*
* Copyright 2011 Hui Wen Han, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.huiwen.prefz
import com.twitter.gizzard.nameserver.MultiForwarder
import com.twitter.gizzard.shards.{RoutingNode, ShardException}
import me.huiwen.prefz.shards.{Shard, ReadWriteShardAdapter}
class ForwardingManager(val forwarder: MultiForwarder[Shard]) {
@throws(classOf[ShardException])
def find(userId: Long, graphId: Int): Shard = {
new ReadWriteShardAdapter(findNode(userId, graphId))
}
@throws(classOf[ShardException])
def findNode(userId: Long, graphId: Int)= {
forwarder.find(graphId, userId)
}
}
| huiwenhan/PrefStore | src/main/scala/me/huiwen/prefz/ForwardingManager.scala | Scala | apache-2.0 | 1,143 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogx.cogmath.circuit
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.FunSuite
import org.scalatest.MustMatchers
import scala.language.reflectiveCalls
/** Test code for the circuit package.
*
* Because the circuit classes are mutually recursive and abstract (a problem
* with graph-like structures in general), the package is tested as a unit.
*
* @author Greg Snider
*/
@RunWith(classOf[JUnitRunner])
class CircuitSpec extends FunSuite with MustMatchers {
/** Test version of Node subclass. */
class TestNode(val index: Int, in: TestNode*)
extends Node[TestNode](in.toArray)
{
override def toString = "node " + index
}
/** Test version of Circuit subclass. */
class TestCircuit extends Circuit[TestNode]
val Verbose = false
/** A simple circuit with 2 roots, inputs at bottom
* {{{
* 5 8
* / \\ / \\
* 4 2 \\
* / \\ / \\
* 3 1 7
* \\ / /
* 0 6
* }}}
*/
val circuit = new TestCircuit {
val node0 = new TestNode(0)
val node1 = new TestNode(1, node0)
val node2 = new TestNode(2, node1)
val node3 = new TestNode(3, node0)
val node4 = new TestNode(4, node3, node1)
val node5 = new TestNode(5, node4, node2) // root
val node6 = new TestNode(6)
val node7 = new TestNode(7, node6)
val node8 = new TestNode(8, node2, node7) // root
}
test("roots") {
require(circuit.roots.length == 2, "expected 2 roots")
require(circuit.roots(0) == circuit.node5)
require(circuit.roots(1) == circuit.node8)
}
test("preorder traversal") {
// Test for pre-order traversal.
val traversalOrder = Array(5, 4, 3, 0, 1, 2, 8, 7, 6)
var index = 0
circuit.traversePreorder((node: TestNode) => {
require(node.index == traversalOrder(index), "node " + node.index)
index += 1
})
}
test("postorder traversal") {
// Test for pre-order traversal.
val traversalOrder = Array(0, 3, 1, 4, 2, 5, 6, 7, 8)
var index = 0
circuit.traversePostorder((node: TestNode) => {
require(node.index == traversalOrder(index))
index += 1
})
}
test("print") {
if (Verbose)
circuit.print
}
test("steal output") {
// Replace node 4 with a new node, node 9.
// Node 9 connects to the same inputs as node 4 then steals its outputs
val node9 = new TestNode(9, circuit.node3, circuit.node1)
node9.stealOutputsFrom(circuit.node4)
if (Verbose) {
println("after stealing")
circuit.print
}
// Test for pre-order traversal. Note the substitution of 9 for 4
val traversalOrder = Array(5, 9, 3, 0, 1, 2, 8, 7, 6)
var index = 0
circuit.traversePreorder((node: TestNode) => {
require(node.index == traversalOrder(index), "node " + node.index)
index += 1
})
// Now remove node 3 by having node 1 steal its outputs
require(circuit.size == 9)
circuit.node1.stealOutputsFrom(circuit.node3)
require(circuit.size == 8)
// Test for pre-order traversal.
val traversalOrder2 = Array(5, 9, 1, 0, 2, 8, 7, 6)
index = 0
circuit.traversePreorder((node: TestNode) => {
require(node.index == traversalOrder2(index), "node " + node.index)
index += 1
})
}
test("steal internal") {
val circuit = new TestCircuit {
val in1 = new TestNode(0)
val in2 = new TestNode(1)
val sum1 = new TestNode(2, in1, in2)
val sum2 = new TestNode(3, in1, in2)
val topSum = new TestNode(4, sum1, sum2)
}
require(circuit.size == 5)
//println("CircuitSpec: before stealing")
//circuit.print
circuit.sum1.stealOutputsFrom(circuit.sum2)
require(circuit.size == 4)
//println("CircuitSpec: after stealing")
//circuit.print
circuit.traversePreorder {
node => require(!(node eq circuit.sum2))
}
}
test("steal recursive") {
val circuit = new TestCircuit {
val n0 = new TestNode(0)
val n1 = new TestNode(1, n0)
val n2 = new TestNode(2, n1)
}
require(circuit.size == 3)
val n3 = new TestNode(3, circuit.n0)
require(circuit.size == 4)
require(circuit.contains(circuit.n0))
require(circuit.contains(circuit.n1))
require(circuit.contains(circuit.n2))
require(circuit.contains(n3))
n3 stealOutputsFrom circuit.n2
// n2 is now useless, as is n1. n0 is primary input and cannot be removed.
require(circuit.contains(circuit.n0))
require(!circuit.contains(circuit.n1))
require(!circuit.contains(circuit.n2))
require(circuit.contains(n3))
}
} | hpe-cct/cct-core | src/test/scala/cogx/cogmath/circuit/CircuitSpec.scala | Scala | apache-2.0 | 5,339 |
/*
* Copyright 2012-2014 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.xfinity.sirius.api.impl.bridge
import akka.actor._
import com.comcast.xfinity.sirius.api.{SiriusConfiguration, SiriusResult}
import com.comcast.xfinity.sirius.admin.MonitoringHooks
import com.comcast.xfinity.sirius.api.impl.membership.MembershipHelper
import com.comcast.xfinity.sirius.util.RichJTreeMap
import com.comcast.xfinity.sirius.api.impl.bridge.PaxosStateBridge.ChildProvider
import com.comcast.xfinity.sirius.api.impl.state.SiriusPersistenceActor._
import com.comcast.xfinity.sirius.api.impl.state.SiriusPersistenceActor.PartialSubrange
import com.comcast.xfinity.sirius.api.impl.OrderedEvent
import com.comcast.xfinity.sirius.api.impl.paxos.PaxosMessages.DecisionHint
import com.comcast.xfinity.sirius.api.impl.paxos.PaxosMessages.Decision
import com.comcast.xfinity.sirius.api.impl.state.SiriusPersistenceActor.CompleteSubrange
import com.comcast.xfinity.sirius.api.impl.paxos.PaxosMessages.Command
import scala.language.postfixOps
import scala.concurrent.duration._
object PaxosStateBridge {
/**
* Factory for creating the children actors of PaxosStateBridge.
*
* @param config the SiriusConfiguration for this node
*/
class ChildProvider(config: SiriusConfiguration, membershipHelper: MembershipHelper) {
def createCatchupSupervisor()(implicit context: ActorContext) = {
context.actorOf(CatchupSupervisor.props(membershipHelper, config))
}
}
/**
* Create Props for a PaxosStateBridge actor.
*
* @param startingSeq the sequence number to start with
* @param stateSupervisor reference to the subsystem encapsulating system state.
* @param siriusSupervisor reference to the Sirius Supervisor Actor for routing
* DecisionHints to the Paxos Subsystem
* @param membershipHelper reference to object that knows how to get a random
* remote cluster member
* @param config SiriusConfiguration for this node
* @return Props for creating this actor, which can then be further configured
* (e.g. calling `.withDispatcher()` on it)
*/
def props(startingSeq: Long,
stateSupervisor: ActorRef,
siriusSupervisor: ActorRef,
membershipHelper: MembershipHelper,
config: SiriusConfiguration): Props = {
val childProvider = new ChildProvider(config, membershipHelper)
val catchupFreq = config.getProp(SiriusConfiguration.LOG_REQUEST_FREQ_SECS, 30).seconds
Props(classOf[PaxosStateBridge], startingSeq, stateSupervisor, siriusSupervisor, childProvider, catchupFreq, config)
}
}
/**
* Actor responsible for bridging the gap between the Paxos layer and
* the persistence layer.
*
* This Actor contains the necessary logic for assuring that events are only
* applied to the persistence layer in order. As designed currently (on
* purpose) the Paxos system will blindly deliver decisions, even if they have
* already been decided. This allows nodes that are behind to catch up. Also,
* there is no guarantee that events will arrive in order, so a later event
* may arrive before a current event.
*
* To accomplish this we buffer events that come before their time, only keeping
* the first copy of each.
*
* XXX: in its current form it does not wait for confirmation that an event has been
* committed to disk by the persistence layer, we should really add that, but for
* now this should be good enough.
*
* @param startingSeq the sequence number to start with
* @param stateSupervisor reference to the subsystem encapsulating system state.
* In the future as the code settles down we may want to have this
* directly point to the persistence layer, but for now we funnel
* everything through the state subsystem supervisor for abstraction,
* such that we can easily refactor and not worry about messing stuff
* up.
* @param siriusSupervisor reference to the Sirius Supervisor Actor for routing
* DecisionHints to the Paxos Subsystem
*/
class PaxosStateBridge(startingSeq: Long,
stateSupervisor: ActorRef,
siriusSupervisor: ActorRef,
childProvider: ChildProvider,
catchupFreq: FiniteDuration,
config: SiriusConfiguration)
extends Actor with MonitoringHooks {
implicit val executionContext = context.system.dispatcher
var nextSeq: Long = startingSeq
var eventBuffer = RichJTreeMap[Long, OrderedEvent]()
val catchupSupervisor = childProvider.createCatchupSupervisor()
val catchupSchedule = context.system.scheduler.schedule(catchupFreq, catchupFreq, self, InitiateCatchup)
// monitor stats, for original catchup duration
var startupCatchupDuration: Option[Long] = None
val startupTimestamp = System.currentTimeMillis()
override def preStart() {
registerMonitor(new PaxosStateBridgeInfo, config)
}
override def postStop() {
catchupSchedule.cancel()
unregisterMonitors(config)
}
def receive = {
case Decision(seq, Command(client, ts, op)) if seq >= nextSeq && !eventBuffer.containsKey(seq) =>
eventBuffer.put(seq, OrderedEvent(seq, ts, op))
while (eventBuffer.containsKey(nextSeq)) {
stateSupervisor ! eventBuffer.remove(nextSeq)
nextSeq += 1
}
client ! SiriusResult.none()
siriusSupervisor ! DecisionHint(nextSeq - 1)
case InitiateCatchup =>
catchupSupervisor ! InitiateCatchup(nextSeq)
case subrange: CompleteSubrange =>
applySubrange(subrange)
catchupSupervisor ! ContinueCatchup(nextSeq)
case subrange: PartialSubrange =>
applySubrange(subrange)
updateCatchupDuration()
catchupSupervisor ! StopCatchup
case EmptySubrange =>
updateCatchupDuration()
catchupSupervisor ! StopCatchup
}
private def updateCatchupDuration() {
if (startupCatchupDuration.isEmpty) { // some accounting: speed of our first catchup
startupCatchupDuration = Some(System.currentTimeMillis() - startupTimestamp)
}
}
private def applySubrange(subrange: PopulatedSubrange) {
// for each useful event, send it to the stateSupervisor
subrange.events.dropWhile(_.sequence < nextSeq)
.foreach(stateSupervisor ! _)
// update nextSeq
nextSeq = subrange.rangeEnd + 1
// dump out of the buffer events that no longer matter
eventBuffer.dropWhile((slot, _) => slot < nextSeq)
// let parent know about the new nextSeq
siriusSupervisor ! DecisionHint(nextSeq - 1)
}
/**
* Monitoring hooks
*/
trait PaxosStateBridgeInfoMBean {
def getNextSeq: Long
def getEventBufferSize: Int
def getStartupCatchupDuration: Option[Long]
}
class PaxosStateBridgeInfo extends PaxosStateBridgeInfoMBean {
def getNextSeq = nextSeq
def getEventBufferSize = eventBuffer.size
def getStartupCatchupDuration = startupCatchupDuration
}
}
| mattinger/sirius | src/main/scala/com/comcast/xfinity/sirius/api/impl/bridge/PaxosStateBridge.scala | Scala | apache-2.0 | 7,607 |
package sylvestris.core
import cats.implicits._
import org.scalacheck._, Prop._, Shapeless._
import spray.json._, DefaultJsonProtocol._
import sylvestris.core._, Graph._, Relationship._
import sylvestris.core.fixtures._, model._
//TODO make this a trait and use the withgraph business...
@SuppressWarnings(Array(
"org.brianmckenna.wartremover.warts.Any",
"org.brianmckenna.wartremover.warts.AsInstanceOf",
"org.brianmckenna.wartremover.warts.NonUnitStatements",
"org.brianmckenna.wartremover.warts.Throw"))
abstract class NodeOpsTest extends Properties("NodeOpsTest") {
def withGraph[T](f: Graph => T): T
property("toOne get") = forAll { (node1: Node[Content1], node2: Node[Content2]) =>
(node1.id =!= node2.id) ==> withGraph { g =>
implicit val oneToOne = new OneToOne[Content1, Content2]
val toEdge = Edge(None, node1.id, Content1.nodeManifest.tag, node2.id, Content2.nodeManifest.tag)
val fromEdge = Edge(None, node2.id, Content2.nodeManifest.tag, node1.id, Content1.nodeManifest.tag)
runAssertIsRight(g) {
for {
_ <- addNode(node1)
_ <- addNode(node2)
_ <- addEdges(Set(toEdge))
_ <- addEdges(Set(fromEdge))
} yield {}
}
node1.toOne[Content2].value.run(g) === Some(node2).right &&
node2.toOne[Content1].value.run(g) === Some(node1).right
}
}
property("toMany get") = forAll { (node1: Node[Content1], node2: Node[Content2], node3: Node[Content2]) =>
(node1.id =!= node2.id && node2.id =!= node3.id) ==> withGraph { g =>
implicit val oneToMany = new OneToMany[Content1, Content2]
val edges = Set(node2, node3).map(n => Set(
Edge(None, node1.id, Content1.nodeManifest.tag, n.id, Content2.nodeManifest.tag),
Edge(None, n.id, Content2.nodeManifest.tag, node1.id, Content1.nodeManifest.tag))).flatten
runAssertIsRight(g) {
for {
_ <- addNode(node1)
_ <- addNode(node2)
_ <- addNode(node3)
_ <- addEdges(edges)
} yield {}
}
node1.toMany[Content2].value.run(g) === Set(node2, node3).right &&
node2.toOne[Content1].value.run(g) === Some(node1).right &&
node3.toOne[Content1].value.run(g) === Some(node1).right
}
}
property("toOne(Node) set") = forAll { (node1: Node[Content1], node2: Node[Content2]) =>
(node1.id =!= node2.id) ==> withGraph { g =>
implicit val oneToOne = new OneToOne[Content1, Content2]
runAssertIsRight(g) {
for {
_ <- addNode(node1)
_ <- addNode(node2)
_ <- node1.toOne[Content2](Some(node2))
} yield {}
}
val expectedTo = Edge(None, node1.id, Content1.nodeManifest.tag, node2.id, Content2.nodeManifest.tag)
val expectedFrom = Edge(None, node2.id, Content2.nodeManifest.tag, node1.id, Content1.nodeManifest.tag)
getEdges(node1.id, Content1.nodeManifest.tag).value.run(g) === Set(expectedTo).right &&
getEdges(node2.id, Content2.nodeManifest.tag).value.run(g) === Set(expectedFrom).right
}
}
property("toOne(Node) clear") = forAll { (node1: Node[Content1], node2: Node[Content2]) =>
(node1.id =!= node2.id) ==> withGraph { g =>
implicit val oneToOne = new OneToOne[Content1, Content2]
val edges = Set(
Edge(None, node1.id, Content1.nodeManifest.tag, node2.id, Content2.nodeManifest.tag),
Edge(None, node2.id, Content2.nodeManifest.tag, node1.id, Content1.nodeManifest.tag))
runAssertIsRight(g) {
for {
_ <- addNode(node1)
_ <- addNode(node2)
_ <- addEdges(edges)
} yield {}
}
// TODO having to type the None is annoying
node1.toOne[Content2](Option.empty[Node[Content2]]).value.run(g)
getEdges(node1.id, Content1.nodeManifest.tag).value.run(g) === Set.empty[Edge].right &&
getEdges(node2.id, Content2.nodeManifest.tag).value.run(g) === Set.empty[Edge].right
}
}
// TODO toOne replace
}
object InMemoryNodeOpsTest extends NodeOpsTest {
def withGraph[T](f: Graph => T): T = f(InMemoryGraph())
}
| janrain/sylvestris | core/src/test/scala/sylvestris/core/NodeOpsTest.scala | Scala | mit | 4,111 |
/**
* Copyright (C) 2013 Carnegie Mellon University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tdb.list
import akka.pattern.ask
import java.io.Serializable
import scala.collection.mutable.Buffer
import scala.concurrent.Await
import tdb._
import tdb.Constants._
import tdb.messages._
import tdb.TDB._
class PartitionedDoubleChunkList[T, U]
(val partitions: Buffer[DoubleChunkList[T, U]],
conf: ListConf)
extends AdjustableList[T, U] with Serializable {
Log.debug("new PartitionedDoubleChunkList")
override def chunkMap[V, W](f: Iterable[(T, U)] => (V, W))
(implicit c: Context): AdjustableList[V, W] = {
def innerMap(i: Int)(implicit c: Context): Buffer[DoubleList[V, W]] = {
if (i < partitions.size) {
val (mappedPartition, mappedRest) = parWithHint({
c => partitions(i).chunkMap(f)(c)
}, partitions(i).datastoreId)({
c => innerMap(i + 1)(c)
})
mappedRest += mappedPartition
} else {
Buffer[DoubleList[V, W]]()
}
}
new PartitionedDoubleList(innerMap(0))
}
def filter(pred: ((T, U)) => Boolean)
(implicit c: Context): PartitionedDoubleChunkList[T, U] = ???
def flatMap[V, W](f: ((T, U)) => Iterable[(V, W)])
(implicit c: Context): PartitionedDoubleChunkList[V, W] = {
def innerMap(i: Int)(implicit c: Context): Buffer[DoubleChunkList[V, W]] = {
if (i < partitions.size) {
val (mappedPartition, mappedRest) = parWithHint({
c => partitions(i).flatMap(f)(c)
}, partitions(i).datastoreId)({
c => innerMap(i + 1)(c)
})
mappedRest += mappedPartition
} else {
Buffer[DoubleChunkList[V, W]]()
}
}
new PartitionedDoubleChunkList(innerMap(0), conf)
}
override def foreachChunk(f: (Iterable[(T, U)], Context) => Unit)
(implicit c: Context): Unit = {
def innerForeach(i: Int)(implicit c: Context) {
if (i < partitions.size) {
val (mappedPartition, mappedRest) = parWithHint({
c => partitions(i).foreachChunk(f)(c)
}, partitions(i).datastoreId)({
c => innerForeach(i + 1)(c)
})
}
}
innerForeach(0)
}
override def foreach(f: ((T, U), Context) => Unit)
(implicit c: Context): Unit = {
def innerForeach(i: Int)(implicit c: Context) {
if (i < partitions.size) {
val (mappedPartition, mappedRest) = parWithHint({
c => partitions(i).foreach(f)(c)
}, partitions(i).datastoreId)({
c => innerForeach(i + 1)(c)
})
}
}
innerForeach(0)
}
def join[V](that: AdjustableList[T, V], condition: ((T, V), (T, U)) => Boolean)
(implicit c: Context): PartitionedDoubleChunkList[T, (U, V)] = ???
def map[V, W](f: ((T, U)) => (V, W))
(implicit c: Context): PartitionedDoubleChunkList[V, W] = {
def innerMap(i: Int)(implicit c: Context): Buffer[DoubleChunkList[V, W]] = {
if (i < partitions.size) {
val (mappedPartition, mappedRest) = parWithHint({
c => partitions(i).map(f)(c)
}, partitions(i).datastoreId)({
c => innerMap(i + 1)(c)
})
mappedRest += mappedPartition
} else {
Buffer[DoubleChunkList[V, W]]()
}
}
new PartitionedDoubleChunkList(innerMap(0), conf)
}
override def mapValues[V](f: U => V)
(implicit c: Context): PartitionedDoubleChunkList[T, V] = {
def innerMap(i: Int)(implicit c: Context): Buffer[DoubleChunkList[T, V]] = {
if (i < partitions.size) {
val (mappedPartition, mappedRest) = parWithHint({
c => partitions(i).mapValues(f)(c)
}, partitions(i).datastoreId)({
c => innerMap(i + 1)(c)
})
mappedRest += mappedPartition
} else {
Buffer[DoubleChunkList[T, V]]()
}
}
new PartitionedDoubleChunkList(innerMap(0), conf)
}
def reduce(f: ((T, U), (T, U)) => (T, U))
(implicit c: Context): Mod[(T, U)] = {
def innerReduce
(next: DoubleChunkList[T, U],
remaining: Buffer[DoubleChunkList[T, U]])
(implicit c: Context): Mod[(T, U)] = {
val newNextOption = remaining.find(_.datastoreId == next.datastoreId)
val (reducedPartition, reducedRest) =
newNextOption match {
case Some(newNext) =>
parWithHint({
c => next.reduce(f)(c)
}, next.datastoreId)({
c => innerReduce(newNext, remaining - newNext)(c)
}, newNext.datastoreId)
case None =>
if (remaining.size > 0) {
parWithHint({
c => next.reduce(f)(c)
}, next.datastoreId)({
c => innerReduce(remaining(0), remaining.tail)(c)
}, remaining(0).datastoreId)
} else {
parWithHint({
c => next.reduce(f)(c)
}, next.datastoreId)({
c => mod { write[(T, U)](null)(c) }(c)
}, next.datastoreId)
}
}
mod {
read(reducedPartition) {
case null =>
read(reducedRest) {
case null => write(null)
case rest => write(rest)
}
case partition =>
read(reducedRest) {
case null => write(partition)
case rest => write(f(partition, rest))
}
}
}
}
parWithHint({
c => innerReduce(partitions(0), partitions.tail)(c)
}, partitions(0).datastoreId)({
c =>
})._1
}
/* Meta Operations */
def toBuffer(mutator: Mutator): Buffer[(T, U)] = {
val buf = Buffer[(T, U)]()
for (partition <- partitions) {
buf ++= partition.toBuffer(mutator)
}
buf
}
def toString(mutator: Mutator): String = {
val buf = new StringBuffer()
for (partition <- partitions) {
buf.append(partition.toBuffer(mutator).toString)
}
buf.toString()
}
}
| twmarshall/tdb | core/src/main/scala/tdb/list/PartitionedDoubleChunkList.scala | Scala | apache-2.0 | 6,483 |
/*
,i::,
:;;;;;;;
;:,,::;.
1ft1;::;1tL
t1;::;1,
:;::; _____ __ ___ __
fCLff ;:: tfLLC / ___/ / |/ /____ _ _____ / /_
CLft11 :,, i1tffLi \\__ \\ ____ / /|_/ // __ `// ___// __ \\
1t1i .;; .1tf ___/ //___// / / // /_/ // /__ / / / /
CLt1i :,: .1tfL. /____/ /_/ /_/ \\__,_/ \\___//_/ /_/
Lft1,:;: , 1tfL:
;it1i ,,,:::;;;::1tti s_mach.datadiff
.t1i .,::;;; ;1tt Copyright (c) 2014 S-Mach, Inc.
Lft11ii;::;ii1tfL: Author: lance.gatlin@gmail.com
.L1 1tt1ttt,,Li
...1LLLL...
*/
package s_mach.datadiff
import scala.language.experimental.macros
import scala.reflect.macros.blackbox
/**
* A type class for computing the differences between two instances of a type
* @tparam A the type to compute differences on
* @tparam P a type for a "patch" which represents the differences between any
* two instances of A
*/
trait DataDiff[A,P] {
type Patch = P
/** The value of P that represents no change */
val noChange : P
/**
* Compute the difference between two values. Result is a patch that if
* applied to the original value results in the new value.
* @param oldValue the original value
* @param newValue the new value
* @return If oldValue and newValue are different, P (that is not equal to
* noChange). Otherwise, noChange
*/
def calcDiff(oldValue: A, newValue: A) : P
/**
* Apply a patch (generated by a prior call to calcDiff) to a value. If patch
* is equal to noChange, then value is returned unmodified.
* @param value the value to apply the patch to
* @param patch the patch to apply
* @return the new value with the patch applied
*/
def applyPatch(value: A, patch: P) : A
}
object DataDiff {
/**
* Generate a DataDiff implementation for a product type
* @tparam A the value type
* @tparam P the patch type for the value type
* @return the DataDiff implementation
*/
def forProductType[A <: Product, P <: Product] : DataDiff[A,P] =
macro macroForProductType[A,P]
// Note: Scala requires this to be public
def macroForProductType[A:c.WeakTypeTag,P:c.WeakTypeTag](
c: blackbox.Context
) : c.Expr[DataDiff[A,P]] = {
val builder = new impl.DataDiffMacroBuilderImpl(c)
builder.build[A,P].asInstanceOf[c.Expr[DataDiff[A,P]]]
}
}
| S-Mach/s_mach.datadiff | datadiff-core/src/main/scala/s_mach/datadiff/DataDiff.scala | Scala | mit | 2,526 |
package cz.augi.gsonscala
import java.lang.reflect.ParameterizedType
import com.google.gson.reflect.TypeToken
import com.google.gson.stream.{JsonReader, JsonWriter}
import com.google.gson.{Gson, TypeAdapter, TypeAdapterFactory}
object OptionTypeAdapterFactory extends TypeAdapterFactory {
override def create[T](gson: Gson, t: TypeToken[T]): TypeAdapter[T] =
if (classOf[Option[_]].isAssignableFrom(t.getRawType)) new OptionTypeAdapter(gson, t) else null
}
class OptionTypeAdapter[T](gson: Gson, t: TypeToken[T]) extends TypeAdapter[T] {
val innerType = t.getType.asInstanceOf[ParameterizedType].getActualTypeArguments()(0)
override def write(out: JsonWriter, value: T): Unit =
value match {
case o: Option[_] => o match {
case Some(v) => gson.toJson(v, innerType, out)
case None =>
// we must forcibly write null in order the read method to be called
val orig = out.getSerializeNulls
out.setSerializeNulls(true)
out.nullValue()
out.setSerializeNulls(orig)
}
}
override def read(in: JsonReader): T = Option(gson.fromJson(in, innerType)).asInstanceOf[T]
}
| augi/gson-scala | src/main/scala/cz/augi/gsonscala/OptionTypeAdapter.scala | Scala | mit | 1,159 |
package org.embulk.parser.xpath2
import com.ximpleware.VTDNav
object VTD {
final def withinContext[A](nav: VTDNav)(f: => A): A = try {
nav.push()
f
} finally nav.pop()
}
| maji-KY/embulk-parser-xpath2 | src/main/scala/org/embulk/parser/xpath2/VTD.scala | Scala | gpl-2.0 | 186 |
package org.openurp.edu.teach.ws.code
import org.beangle.data.jpa.dao.OqlBuilder
import org.beangle.data.model.dao.EntityDao
import org.beangle.webmvc.api.annotation.response
import org.openurp.code.BaseCode
import org.beangle.data.model.Entity
import org.beangle.commons.http.accept.ContentNegotiationManager
import org.beangle.webmvc.entity.action.RestfulService
class AbstractWS[T <: BaseCode] extends RestfulService[T] {
@response
override def index(): Any = {
val builder = OqlBuilder.from(entityType, "code")
builder.orderBy(get("orderBy", "code.code"))
buildQuery(builder)
//if (this.isRequestCsv) put("properties", List(classOf[BaseCode] -> List("id", "code","name"), classOf[Entity[_]] -> List("id")))
entityDao.search(builder)
}
def buildQuery(builder: OqlBuilder[T]): Unit = {
}
} | openurp/edu-core | grade/ws/src/main/scala/org/openurp/edu/teach/ws/code/AbstractWS.scala | Scala | gpl-3.0 | 826 |
/*
,i::,
:;;;;;;;
;:,,::;.
1ft1;::;1tL
t1;::;1,
:;::; _____ __ ___ __
fCLff ;:: tfLLC / ___/ / |/ /____ _ _____ / /_
CLft11 :,, i1tffLi \\__ \\ ____ / /|_/ // __ `// ___// __ \\
1t1i .;; .1tf ___/ //___// / / // /_/ // /__ / / / /
CLt1i :,: .1tfL. /____/ /_/ /_/ \\__,_/ \\___//_/ /_/
Lft1,:;: , 1tfL:
;it1i ,,,:::;;;::1tti s_mach.datadiff
.t1i .,::;;; ;1tt Copyright (c) 2014 S-Mach, Inc.
Lft11ii;::;ii1tfL: Author: lance.gatlin@gmail.com
.L1 1tt1ttt,,Li
...1LLLL...
*/
package s_mach.datadiff.impl
import s_mach.datadiff.{OptionPatch, DataDiff}
class AppliedInnerPatchToNoneException extends RuntimeException
class OptionDataDiffImpl[A,P](implicit
aDiff: DataDiff[A,P]
) extends DataDiff[Option[A],OptionPatch[A,P]] {
override val noChange = OptionPatch.noChange
// Note: some impls may build diff at same time equality is tested
override def calcDiff(optOldValue: Option[A], optNewValue: Option[A]): OptionPatch[A,P] = {
optOldValue match {
// Old was set
case Some(oldValue) =>
optNewValue match {
case Some(newValue) =>
// Using map/Some.apply here so that if there is no diff between
// old and new then result is None
aDiff.calcDiff(oldValue,newValue) match {
case aDiff.noChange => OptionPatch.noChange
case aPatch => OptionPatch.ApplyInnerPatch(aPatch)
}
case None => OptionPatch.SetNone
}
// Old value was unset
case None =>
optNewValue match {
// If new value is set, need to convert it to a patch since no old value to diff against
case Some(newValue) => OptionPatch.SetValue(newValue)
case None => noChange
}
}
}
override def applyPatch(optValue: Option[A], patch: OptionPatch[A,P]): Option[A] = {
patch match {
case OptionPatch.NoChange => optValue
// Patch value to Some(A), extract inner patch for A
case OptionPatch.ApplyInnerPatch(aPatch) =>
optValue match {
case Some(value) =>
Some(aDiff.applyPatch(value, aPatch))
// Value has no real value to patch against
case None =>
throw new AppliedInnerPatchToNoneException
}
case OptionPatch.SetValue(aValue) => Some(aValue)
// Patch value to None
case OptionPatch.SetNone => None
}
}
}
| S-Mach/s_mach.datadiff | datadiff/src/main/scala/s_mach/datadiff/impl/OptionDataDiffImpl.scala | Scala | mit | 2,660 |
/* Copyright (c) 2015 Andrée Ekroth.
* Distributed under the MIT License (MIT).
* See accompanying file LICENSE or copy at
* http://opensource.org/licenses/MIT
*/
package com.github.ekroth
package songkick
/** Objects corresponding to Songkick's object model.
*/
trait Objects {
import scala.collection.immutable.Seq
import spray.json._
import DefaultJsonProtocol._
implicit class RichJsonFormat[T](val underlying: JsonFormat[T]) {
private def fix(in: String, out: String, obj: JsValue): JsValue = obj match {
case JsObject(fields) => JsObject(fields.map {
case (`in`, v) => (out, v)
case x => x
})
case x => x
}
def withTipe: JsonFormat[T] = new JsonFormat[T] {
override def write(obj: T): JsValue = fix("tipe", "type", underlying.write(obj))
override def read(obj: JsValue): T = underlying.read(fix("type", "tipe", obj))
}
}
object DisplayName {
implicit val displayNameFormat = jsonFormat1(DisplayName.apply).withTipe
}
case class DisplayName(displayName: String)
object MetroArea {
implicit val metroAreaFormat = jsonFormat7(MetroArea.apply).withTipe
}
case class MetroArea(uri: String, displayName: String, country: DisplayName, id: Int, lng: Option[Double], lat: Option[Double], state: Option[DisplayName])
object City {
implicit val cityFormat = jsonFormat4(City.apply).withTipe
}
case class City(displayName: String, country: DisplayName, lng: Option[Double], lat: Option[Double])
object LocationArea {
implicit val locationAreaFormat = jsonFormat2(LocationArea.apply).withTipe
}
case class LocationArea(city: City, metroArea: MetroArea)
object Date {
implicit val dateFormat = jsonFormat3(Date.apply).withTipe
}
case class Date(time: Option[String], date: String, datetime: Option[String])
object Location {
implicit val locationFormat = jsonFormat3(Location.apply).withTipe
}
case class Location(city: String, lng: Option[Double], lat: Option[Double])
object Venue {
implicit val venueFormat = jsonFormat6(Venue.apply).withTipe
}
case class Venue(id: Option[Int], displayName: String, uri: Option[String], lng: Option[Double], lat: Option[Double], metroArea: MetroArea)
object Artist {
implicit val artistFormat = jsonFormat3(Artist.apply).withTipe
}
case class Artist(uri: String, displayName: String, id: Int/*, identifier: Seq[JsValue]*/)
object Performance {
implicit val performanceFormat = jsonFormat5(Performance.apply).withTipe
}
case class Performance(artist: Artist, displayName: String, billingIndex: Int, id: Int, billing: String)
object Event {
implicit val eventFormat = jsonFormat8(Event.apply).withTipe
}
case class Event(id: Int, tipe: String, uri: String, displayName: String, start: Date, /*performance: Seq[Performance],*/ location: Location, venue: Venue, popularity: Double)
object ResultsPage {
implicit def resultsPageFormat[T : JsonFormat] = jsonFormat4(ResultsPage.apply[T]).withTipe
def emptyAt(page: Int): ResultsPage[Nothing] = ResultsPage(0, 0, page, Map.empty)
val empty = emptyAt(1)
}
case class ResultsPage[+T](totalEntries: Int, perPage: Int, page: Int, results: Map[String, Seq[T]])
}
| ekroth/play-songkick | src/main/scala/com/github/ekroth/songkick/Objects.scala | Scala | mit | 3,246 |
package wom.types
import wom.WomExpressionException
import wom.values.WomValue
import scala.runtime.ScalaRunTime
import scala.util.{Failure, Success, Try}
class WomTypeException(message: String) extends RuntimeException(message)
trait WomType {
/**
* Method to be overridden by implementation classes defining a partial function
* for the conversion of raw input values to specific implementation class value types.
* i.e. `WomBooleanType` should define a partial function that knows how to
* construct `WomBoolean`s for inputs of supported types and contents. Values for which
* the partial function is not defined are assumed to not be convertible to the target type.
*/
protected def coercion: PartialFunction[Any, WomValue]
def coercionDefined(any: Any) = coercion.isDefinedAt(any)
/**
* Public interface for a `Try`-wrapped conversion of an input of type `Any` to
* a `WomValue`.
*/
def coerceRawValue(any: Any): Try[WomValue] = {
any match {
case womValue: WomValue if womValue.womType == this => Success(womValue)
case womValue: WomValue if !coercion.isDefinedAt(any) => Failure(new IllegalArgumentException(
s"No coercion defined from '${WomValue.takeMaxElements(womValue, 3).toWomString}' of type" +
s" '${womValue.womType.toDisplayString}' to '$toDisplayString'."))
case _ if !coercion.isDefinedAt(any) => Failure(new IllegalArgumentException(
s"No coercion defined from '${ScalaRunTime.stringOf(any, 3)}' of type" +
s" '${Option(any.getClass.getCanonicalName).getOrElse(any.getClass.getName)}' to '$toDisplayString'."))
case _ => Try(coercion(any))
}
}
def isCoerceableFrom(otherType: WomType): Boolean = false
def toDisplayString: String
def invalid(operation: String) = Failure(new WomExpressionException(s"Type evaluation cannot determine type from expression: $operation"))
def add(rhs: WomType): Try[WomType] = invalid(s"$this + $rhs")
def subtract(rhs: WomType): Try[WomType] = invalid(s"$this - $rhs")
def multiply(rhs: WomType): Try[WomType] = invalid(s"$this * $rhs")
def divide(rhs: WomType): Try[WomType] = invalid(s"$this / $rhs")
def mod(rhs: WomType): Try[WomType] = invalid(s"$this % $rhs")
def equals(rhs: WomType): Try[WomType] = invalid(s"$this == $rhs")
def notEquals(rhs: WomType): Try[WomType] = equals(rhs) map { _ => WomBooleanType}
def lessThan(rhs: WomType): Try[WomType] = invalid(s"$this < $rhs")
def lessThanOrEqual(rhs: WomType): Try[WomType] = (lessThan(rhs), equals(rhs)) match {
case (Success(b:WomType), _) if b == WomBooleanType => Success(WomBooleanType)
case (_, Success(b:WomType)) if b == WomBooleanType => Success(WomBooleanType)
case (_, _) => invalid(s"$this <= $rhs")
}
def greaterThan(rhs: WomType): Try[WomType] = invalid(s"$this > $rhs")
def greaterThanOrEqual(rhs: WomType): Try[WomType] = (greaterThan(rhs), equals(rhs)) match {
case (Success(b:WomType), _) if b == WomBooleanType => Success(WomBooleanType)
case (_, Success(b:WomType)) if b == WomBooleanType => Success(WomBooleanType)
case (_, _) => invalid(s"$this >= $rhs")
}
def or(rhs: WomType): Try[WomType] = invalid(s"$this || $rhs")
def and(rhs: WomType): Try[WomType] = invalid(s"$this && $rhs")
def not: Try[WomType] = invalid(s"!$this")
def unaryPlus: Try[WomType] = invalid(s"+$this")
def unaryMinus: Try[WomType] = invalid(s"-$this")
}
object WomType {
/* This is in the order of coercion from non-wom types */
val womTypeCoercionOrder: Seq[WomType] = Seq(
WomStringType, WomIntegerType, WomFloatType, WomMapType(WomAnyType, WomAnyType),
WomArrayType(WomAnyType), WomBooleanType, WomObjectType
)
def homogeneousTypeFromValues(values: Iterable[WomValue]): WomType =
homogeneousTypeFromTypes(values.map(_.womType))
def homogeneousTypeFromTypes(types: Iterable[WomType]): WomType = {
types.toSet match {
case s if s.isEmpty => WomNothingType
case s if s.size == 1 => s.head
case _ => lowestCommonSubtype(types)
}
}
def lowestCommonSubtype(types: Iterable[WomType]): WomType = {
types.collectFirst {
case t1 if types.forall(t2 => t1.isCoerceableFrom(t2)) => t1
} getOrElse WomAnyType
}
}
| ohsu-comp-bio/cromwell | wom/src/main/scala/wom/types/WomType.scala | Scala | bsd-3-clause | 4,263 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.oap.execution
import com.intel.oap.ColumnarPluginConfig
import com.intel.oap.vectorized._
import org.apache.spark._
import org.apache.spark.rdd.RDD
import org.apache.spark.util._
import org.apache.spark.sql.connector.read.{
InputPartition,
PartitionReader,
PartitionReaderFactory
}
import org.apache.spark.sql.execution.datasources.{FilePartition, PartitionedFile}
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics}
import org.apache.spark.sql.vectorized.{ColumnarBatch, ColumnVector}
import org.apache.spark.sql.execution.datasources.v2.VectorizedFilePartitionReaderHandler
import org.apache.spark.sql.execution.datasources.v2.arrow.SparkMemoryUtils
import org.apache.spark.sql.execution.datasources.v2.parquet.ParquetPartitionReaderFactory
class DataSourceRDDPartition(val index: Int, val inputPartition: InputPartition)
extends Partition
with Serializable
// TODO: we should have 2 RDDs: an RDD[InternalRow] for row-based scan, an `RDD[ColumnarBatch]` for
// columnar scan.
class ColumnarDataSourceRDD(
sc: SparkContext,
@transient private val inputPartitions: Seq[InputPartition],
partitionReaderFactory: PartitionReaderFactory,
columnarReads: Boolean,
scanTime: SQLMetric,
numInputBatches: SQLMetric,
inputSize: SQLMetric,
tmp_dir: String)
extends RDD[ColumnarBatch](sc, Nil) {
val numaBindingInfo = ColumnarPluginConfig.getConf(sc.getConf).numaBindingInfo
override protected def getPartitions: Array[Partition] = {
inputPartitions.zipWithIndex.map {
case (inputPartition, index) => new DataSourceRDDPartition(index, inputPartition)
}.toArray
}
private def castPartition(split: Partition): DataSourceRDDPartition = split match {
case p: DataSourceRDDPartition => p
case _ => throw new SparkException(s"[BUG] Not a DataSourceRDDPartition: $split")
}
override def compute(split: Partition, context: TaskContext): Iterator[ColumnarBatch] = {
ExecutorManager.tryTaskSet(numaBindingInfo)
val inputPartition = castPartition(split).inputPartition
inputPartition match {
case p: FilePartition =>
p.files.foreach { f => inputSize += f.length }
case _ =>
}
val reader = if (columnarReads) {
partitionReaderFactory match {
case factory: ParquetPartitionReaderFactory =>
VectorizedFilePartitionReaderHandler.get(inputPartition, factory, tmp_dir)
case _ => partitionReaderFactory.createColumnarReader(inputPartition)
}
} else {
partitionReaderFactory.createReader(inputPartition)
}
val rddId = this
SparkMemoryUtils.addLeakSafeTaskCompletionListener[Unit](_ => reader.close())
val iter = new Iterator[Any] {
private[this] var valuePrepared = false
override def hasNext: Boolean = {
if (!valuePrepared) {
try {
val beforeScan = System.nanoTime()
valuePrepared = reader.next()
numInputBatches += 1
scanTime += (System.nanoTime() - beforeScan) / (1000 * 1000)
} catch {
case e: Throwable =>
val errmsg = e.getStackTrace.mkString("\\n")
logError(s"hasNext got exception: $errmsg")
valuePrepared = false
}
}
valuePrepared
}
override def next(): Any = {
if (!hasNext) {
throw new java.util.NoSuchElementException("End of stream")
}
valuePrepared = false
reader.get()
}
}
val closeableColumnarBatchIterator = new CloseableColumnBatchIterator(
iter.asInstanceOf[Iterator[ColumnarBatch]])
// TODO: SPARK-25083 remove the type erasure hack in data source scan
new InterruptibleIterator(context, closeableColumnarBatchIterator)
}
override def getPreferredLocations(split: Partition): Seq[String] = {
castPartition(split).inputPartition.preferredLocations()
}
}
| Intel-bigdata/OAP | oap-native-sql/core/src/main/scala/com/intel/oap/execution/ColumnarDataSourceRDD.scala | Scala | apache-2.0 | 4,738 |
/*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.spark.deploy.history
import java.io.InputStream
import java.security.PrivilegedAction
import com.linkedin.drelephant.analysis.{AnalyticJob, ElephantFetcher}
import com.linkedin.drelephant.configurations.fetcher.FetcherConfigurationData
import com.linkedin.drelephant.security.HadoopSecurity
import com.linkedin.drelephant.spark.legacydata.SparkApplicationData
import com.linkedin.drelephant.util.{HadoopUtils, SparkUtils, Utils}
import org.apache.commons.io.FileUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.log4j.Logger
import org.apache.spark.SparkConf
import org.apache.spark.scheduler.{ApplicationEventListener, ReplayListenerBus}
import org.apache.spark.storage.{StorageStatusListener, StorageStatusTrackingListener}
import org.apache.spark.ui.env.EnvironmentListener
import org.apache.spark.ui.exec.ExecutorsListener
import org.apache.spark.ui.jobs.JobProgressListener
import org.apache.spark.ui.storage.StorageListener
/**
* A wrapper that replays Spark event history from files and then fill proper data objects.
*/
class SparkFSFetcher(fetcherConfData: FetcherConfigurationData) extends ElephantFetcher[SparkApplicationData] {
import SparkFSFetcher._
val eventLogSizeLimitMb =
Option(fetcherConfData.getParamMap.get(LOG_SIZE_XML_FIELD))
.flatMap { x => Option(Utils.getParam(x, 1)) }
.map { _(0) }
.getOrElse(DEFAULT_EVENT_LOG_SIZE_LIMIT_MB)
logger.info("The event log limit of Spark application is set to " + eventLogSizeLimitMb + " MB")
val eventLogUri = Option(fetcherConfData.getParamMap.get(LOG_LOCATION_URI_XML_FIELD))
logger.info("The event log location of Spark application is set to " + eventLogUri)
private lazy val security = new HadoopSecurity()
protected lazy val hadoopUtils: HadoopUtils = HadoopUtils
protected lazy val sparkUtils: SparkUtils = SparkUtils
protected lazy val hadoopConfiguration: Configuration = new Configuration()
protected lazy val sparkConf: SparkConf = {
val sparkConf = new SparkConf()
sparkUtils.getDefaultPropertiesFile() match {
case Some(filename) => sparkConf.setAll(sparkUtils.getPropertiesFromFile(filename))
case None => throw new IllegalStateException("can't find Spark conf; please set SPARK_HOME or SPARK_CONF_DIR")
}
sparkConf
}
def fetchData(analyticJob: AnalyticJob): SparkApplicationData = {
val appId = analyticJob.getAppId()
doAsPrivilegedAction { () => doFetchData(appId) }
}
protected def doAsPrivilegedAction[T](action: () => T): T =
security.doAs[T](new PrivilegedAction[T] { override def run(): T = action() })
protected def doFetchData(appId: String): SparkDataCollection = {
val dataCollection = new SparkDataCollection()
val (eventLogFileSystem, baseEventLogPath) =
sparkUtils.fileSystemAndPathForEventLogDir(hadoopConfiguration, sparkConf, eventLogUri)
val (eventLogPath, eventLogCodec) =
sparkUtils.pathAndCodecforEventLog(sparkConf, eventLogFileSystem, baseEventLogPath, appId, None)
// Check if the log parser should be throttled when the file is too large.
val shouldThrottle = eventLogFileSystem.getFileStatus(eventLogPath).getLen() > (eventLogSizeLimitMb * FileUtils.ONE_MB)
if (shouldThrottle) {
dataCollection.throttle()
// Since the data set is empty, we need to set the application id,
// so that we could detect this is Spark job type
dataCollection.getGeneralData().setApplicationId(appId)
dataCollection.getConf().setProperty("spark.app.id", appId)
logger.info("The event log of Spark application: " + appId + " is over the limit size of "
+ eventLogSizeLimitMb + " MB, the parsing process gets throttled.")
} else {
logger.info("Replaying Spark logs for application: " + appId +
" withlogPath: " + eventLogPath +
" with codec:" + eventLogCodec)
sparkUtils.withEventLog(eventLogFileSystem, eventLogPath, eventLogCodec) { in =>
dataCollection.load(in, eventLogPath.toString())
}
logger.info("Replay completed for application: " + appId)
}
dataCollection
}
}
object SparkFSFetcher {
private val logger = Logger.getLogger(SparkFSFetcher.getClass)
val DEFAULT_EVENT_LOG_SIZE_LIMIT_MB = 100d; // 100MB
val LOG_SIZE_XML_FIELD = "event_log_size_limit_in_mb"
val LOG_LOCATION_URI_XML_FIELD = "event_log_location_uri"
val DEFAULT_ATTEMPT_ID = Some("1")
}
| shankar37/dr-elephant | app/org/apache/spark/deploy/history/SparkFSFetcher.scala | Scala | apache-2.0 | 5,121 |
// the type
Function1[A,B]
// can be written as
A => B
object ArrayUtils {
def filter(xs: Array[Int], pred: Int => Boolean): Array[Int] = ???
}
| agconti/scala-school | 04-functions-as-values/slides/slide034.scala | Scala | mit | 151 |
package scala
package reflect
package api
import scala.collection.immutable.ListMap
/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
*
* This trait provides annotation support for the reflection API.
*
* In Scala, annotations belong to one of the two categories:
*
* <ul>
* <li>''Java annotations'': annotations on definitions produced by the Java compiler, i.e., subtypes of [[java.lang.annotation.Annotation]]
* attached to program definitions. When read by Scala reflection, the [[scala.annotation.ClassfileAnnotation]] trait
* is automatically added as a subclass to every Java annotation.</li>
* <li>''Scala annotations'': annotations on definitions or types produced by the Scala compiler.</li>
* </ul>
*
* When a Scala annotation that inherits from [[scala.annotation.StaticAnnotation]] or [[scala.annotation.ClassfileAnnotation]] is compiled,
* it is stored as special attributes in the corresponding classfile, and not as a Java annotation. Note that subclassing
* just [[scala.annotation.Annotation]] is not enough to have the corresponding metadata persisted for runtime reflection.
*
* Both Java and Scala annotations are represented as typed trees carrying constructor invocations corresponding
* to the annotation. For instance, the annotation in `@ann(1, 2) class C` is represented as `q"@new ann(1, 2)"`.
*
* Unlike Java reflection, Scala reflection does not support evaluation of constructor invocations stored in annotations
* into underlying objects. For instance it's impossible to go from `@ann(1, 2) class C` to `ann(1, 2)`, so one
* has to analyze trees representing annotation arguments to manually extract corresponding values. Towards that end,
* arguments of an annotation can be obtained via `annotation.tree.children.tail`.
*
* For more information about `Annotation`s, see the [[http://docs.scala-lang.org/overviews/reflection/annotations-names-scopes.html Reflection Guide: Annotations, Names, Scopes, and More]]
*
* @contentDiagram hideNodes "*Api"
* @group ReflectionAPI
*/
trait Annotations { self: Universe =>
/** Information about an annotation.
* @template
* @group Annotations
*/
type Annotation >: Null <: AnyRef with AnnotationApi
/** The constructor/extractor for `Annotation` instances.
* @group Extractors
*/
val Annotation: AnnotationExtractor
/** An extractor class to create and pattern match with syntax `Annotation(tpe, scalaArgs, javaArgs)`.
* Here, `tpe` is the annotation type, `scalaArgs` the payload of Scala annotations, and `javaArgs` the payload of Java annotations.
* @group Extractors
*/
abstract class AnnotationExtractor {
def apply(tree: Tree): Annotation = treeToAnnotation(tree)
@deprecated("use `apply(tree: Tree): Annotation` instead", "2.11.0")
def apply(tpe: Type, scalaArgs: List[Tree], javaArgs: ListMap[Name, JavaArgument]): Annotation
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def unapply(ann: Annotation): Option[(Type, List[Tree], ListMap[Name, JavaArgument])]
}
/** The API of `Annotation` instances.
* The main source of information about annotations is the [[scala.reflect.api.Annotations]] page.
* @group API
*/
trait AnnotationApi {
/** The tree underlying the annotation. */
def tree: Tree = annotationToTree(this.asInstanceOf[Annotation])
/** The type of the annotation. */
@deprecated("use `tree.tpe` instead", "2.11.0")
def tpe: Type
/** Payload of the Scala annotation: a list of abstract syntax trees that represent the argument.
* Empty for Java annotations.
*/
@deprecated("use `tree.children.tail` instead", "2.11.0")
def scalaArgs: List[Tree]
/** Payload of the Java annotation: a list of name-value pairs.
* Empty for Scala annotations.
*/
@deprecated("use `tree.children.tail` instead", "2.11.0")
def javaArgs: ListMap[Name, JavaArgument]
}
protected[scala] def annotationToTree(ann: Annotation): Tree
protected[scala] def treeToAnnotation(tree: Tree): Annotation
/** A Java annotation argument
* @template
* @group Annotations
*/
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
type JavaArgument >: Null <: AnyRef with JavaArgumentApi
/** Has no special methods. Is here to provides erased identity for `CompoundType`.
* @group API
*/
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
trait JavaArgumentApi
/** A literal argument to a Java annotation as `"use X instead"` in `@Deprecated("use X instead")`
* @template
* @group Annotations
*/
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
type LiteralArgument >: Null <: LiteralArgumentApi with JavaArgument
/** The constructor/extractor for `LiteralArgument` instances.
* @group Extractors
*/
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
val LiteralArgument: LiteralArgumentExtractor
/** An extractor class to create and pattern match with syntax `LiteralArgument(value)`
* where `value` is the constant argument.
* @group Extractors
*/
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
abstract class LiteralArgumentExtractor {
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def apply(value: Constant): LiteralArgument
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def unapply(arg: LiteralArgument): Option[Constant]
}
/** The API of `LiteralArgument` instances.
* The main source of information about annotations is the [[scala.reflect.api.Annotations]] page.
* @group API
*/
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
trait LiteralArgumentApi {
/** The underlying compile-time constant value. */
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def value: Constant
}
/** An array argument to a Java annotation as in `@Target(value={TYPE,FIELD,METHOD,PARAMETER})`
* @template
* @group Annotations
*/
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
type ArrayArgument >: Null <: ArrayArgumentApi with JavaArgument
/** The constructor/extractor for `ArrayArgument` instances.
* @group Extractors
*/
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
val ArrayArgument: ArrayArgumentExtractor
/** An extractor class to create and pattern match with syntax `ArrayArgument(args)`
* where `args` is the argument array.
* @group Extractors
*/
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
abstract class ArrayArgumentExtractor {
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def apply(args: Array[JavaArgument]): ArrayArgument
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def unapply(arg: ArrayArgument): Option[Array[JavaArgument]]
}
/** API of `ArrayArgument` instances.
* The main source of information about annotations is the [[scala.reflect.api.Annotations]] page.
* @group API
*/
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
trait ArrayArgumentApi {
/** The underlying array of Java annotation arguments. */
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def args: Array[JavaArgument]
}
/** A nested annotation argument to a Java annotation as `@Nested` in `@Outer(@Nested)`.
* @template
* @group Annotations
*/
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
type NestedArgument >: Null <: NestedArgumentApi with JavaArgument
/** The constructor/extractor for `NestedArgument` instances.
* @group Extractors
*/
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
val NestedArgument: NestedArgumentExtractor
/** An extractor class to create and pattern match with syntax `NestedArgument(annotation)`
* where `annotation` is the nested annotation.
* @group Extractors
*/
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
abstract class NestedArgumentExtractor {
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def apply(annotation: Annotation): NestedArgument
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def unapply(arg: NestedArgument): Option[Annotation]
}
/** API of `NestedArgument` instances.
* The main source of information about annotations is the [[scala.reflect.api.Annotations]] page.
* @group API
*/
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
trait NestedArgumentApi {
/** The underlying nested annotation. */
@deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0")
def annotation: Annotation
}
}
| felixmulder/scala | src/reflect/scala/reflect/api/Annotations.scala | Scala | bsd-3-clause | 9,188 |
package ml.sparkling.graph.examples
import java.io.File
import org.apache.log4j.Logger
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.{SparkConf, SparkContext}
/**
* Created by Roman Bartusiak (roman.bartusiak@pwr.edu.pl http://riomus.github.io).
*/
object MatrixCreator extends Serializable {
@transient val logger=Logger.getLogger(this.getClass)
def main(args: Array[String]) = {
val usage =
s"""
Application used to create matrix from AAPSP and APSP output
Usage: [--app-name string(${this.getClass.getName})] [--vectorSize long(147478)] [--delimiter string(;)] [--checkpoint string(/tmp)] [--partitions int(auto)] inputPath outputPath
"""
if (args.length == 0) {
println(usage)
System.exit(1)
}
val optionsMap = Map( ('appName -> this.getClass.getName),('vectorSize->147478),('delimiter->";"),('tupleDelimiter->":"),('checkpoint->"/tmp"),('partitions->None))
type OptionMap = Map[Symbol, Any]
def nextOption(map: OptionMap, list: List[String]): OptionMap = {
def isNotSwitch(s: String) = (s(0) != '-')
list match {
case Nil => map
case "--app-name" :: value :: tail =>
nextOption(map ++ Map('appName -> value), tail)
case "--vectorSize" :: value :: tail =>
nextOption(map ++ Map('vectorSize -> value.toLong), tail)
case "--delimiter" :: value :: tail =>
nextOption(map ++ Map('delimiter -> value.toString), tail)
case "--tupleDelimiter" :: value :: tail =>
nextOption(map ++ Map('tupleDelimiter -> value.toString), tail)
case "--checkpoint" :: value :: tail =>
nextOption(map ++ Map('checkpoint -> value.toString), tail)
case "--partitions" :: value :: tail =>
nextOption(map ++ Map('partitions -> Some(value.toInt)), tail)
case inPath :: outPath :: Nil => map ++ Map('inputPath -> inPath) ++ Map('outputPath -> outPath)
case option :: tail => println("Unknown option " + option)
System.exit(1);
???
}
}
val options = nextOption(optionsMap, args.toList)
val in = options('inputPath).asInstanceOf[String]
val out = options('outputPath).asInstanceOf[String]
val name = options('appName).asInstanceOf[String]
val delimiter = options('delimiter).asInstanceOf[String]
val tupleDelimiter = options('tupleDelimiter).asInstanceOf[String]
val checkpoint = options('checkpoint).asInstanceOf[String]
val partitions = options('partitions).asInstanceOf[Option[Int]]
val vectorSize = options('vectorSize).asInstanceOf[Int]
logger.info("Running app sparkling-graph-example")
val sparkConf = new SparkConf().setAppName(name).set("spark.app.id", "sparkling-graph-example")
val ctx = new SparkContext(sparkConf)
ctx.setCheckpointDir(checkpoint)
val parts:List[File]=new File(in).listFiles.filter(f=>f.getName!="index"&&f.getName.startsWith("from")&&f.isDirectory).toList
parts match{
case head::tail=>{
val startData=loadWithPartitions(ctx, head,partitions).map(s=>s.split(delimiter).toList).map{
case head::rest=>{
val data =rest
(head.toDouble.toInt,data)
}
case _ => throw new RuntimeException("Incorrect data!")
}.cache()
logger.info(s"Files to process ${tail.length}")
val outData=tail.zipWithIndex.foldLeft(startData){
case (data,(file,index))=>{
logger.info(s"Processing file ${index}")
val loadedData=loadWithPartitions(ctx, file,partitions).map(s=>s.split(delimiter).toList).map{
case head::tail =>(head.toDouble.toInt,tail)
case _ => throw new RuntimeException("Incorrect data!")
}.cache()
val out=data.fullOuterJoin(loadedData).map{
case (id,(Some(d1),Some(d2)))=>(id,d1:::d2)
case (id,(Some(d1),None))=>(id,d1)
case (id,(None,Some(d2)))=>(id,d2)
}.cache()
if(index%20==0){
out.checkpoint()
out.foreachPartition((_)=>{})
}
out
}}
outData.map{
case (id,data)=>{
val dataString=StringBuilder.newBuilder
val transformedData=stringToList(tupleDelimiter,data)
val sortedData=transformedData.sortBy(_._1)
for (i <- 0 to vectorSize){
if(sortedData(i)._1==i){
dataString.append(s"${sortedData(i)._2}$delimiter")
}else{
dataString.append("0;")
}
}
s"$id$delimiter${dataString.toString()}"
}
}.saveAsTextFile(out)
}
case _=>logger.error("Not enaught data to create matrix!")
}
}
def loadWithPartitions(ctx: SparkContext, file: File,partitions:Option[Int]): RDD[String] = {
partitions.map((p)=>{
ctx.textFile(file.getAbsolutePath,minPartitions=p)
}).getOrElse(ctx.textFile(file.getAbsolutePath)).persist(StorageLevel.MEMORY_AND_DISK_SER)
}
def stringToList(tupleDelimiter: String, rest: List[String]): List[(Int, Long)] = {
rest.map(s => {
val splited = s.split(tupleDelimiter)
(splited(0).toDouble.toInt, splited(1).toDouble.toLong)
})
}
}
| sparkling-graph/sparkling-graph | examples/src/main/scala/ml/sparkling/graph/examples/MatrixCreator.scala | Scala | bsd-2-clause | 5,352 |
/*
* Copyright © 2015 Lukas Rosenthaler, Benjamin Geer, Ivan Subotic,
* Tobias Schweizer, André Kilchenmann, and Sepideh Alassi.
*
* This file is part of Knora.
*
* Knora is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Knora is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public
* License along with Knora. If not, see <http://www.gnu.org/licenses/>.
*/
package org.knora.webapi
import spray.json.JsValue
/**
* A trait for classes that can convert themselves into JSON using the spray-json library.
*/
trait Jsonable {
/**
* Converts this [[Jsonable]] into a [[JsValue]].
*
* @return a [[JsValue]].
*/
def toJsValue: JsValue
}
| nie-ine/Knora | webapi/src/main/scala/org/knora/webapi/Jsonable.scala | Scala | agpl-3.0 | 1,132 |
package edu.gemini.pit.ui.editor
import edu.gemini.model.p1.immutable._
import swing._
import edu.gemini.pit.ui.util.BooleanToolPreference._
import edu.gemini.pit.ui.robot.CatalogRobot
import edu.gemini.pit.model.Model
import edu.gemini.pit.catalog._
import java.awt
import awt.Color
import edu.gemini.pit.ui.util._
import javax.swing.BorderFactory
import Swing._
object SynchronousLookup {
def open(name:String, parent:UIElement) = new SynchronousLookup(name).open(parent)
}
class SynchronousLookup private (targetName:String) extends ModalEditor[Target] {dialog =>
// TODO: this probably leaks an actor
private lazy val handler = new CatalogRobot(peer)
handler.addListener(listener)
// Top-level config
title = "Catalog Lookup"
resizable = false
contents = Contents
updateEnabledState()
pack()
// Our main content object
object Contents extends BorderPanel {
// Space things out a little more
peer.setLayout(new awt.BorderLayout(8, 8))
border = BorderFactory.createEmptyBorder(8, 8, 8, 8)
// Add our content, defined below
add(new Label("Select one or more catalogs:"), BorderPanel.Position.North)
add(choices, BorderPanel.Position.Center)
add(footer, BorderPanel.Position.South)
// Footer is just a flow panel, more or less
object footer extends BorderPanel {
// Our content, defined below
add(msg, BorderPanel.Position.North)
add(new FlowPanel {
contents += spinner
contents += lookup
contents += cancel
}, BorderPanel.Position.East)
// Error line
object msg extends Label("") {
foreground = Color.RED
horizontalAlignment = Alignment.Left
preferredSize = (preferredSize.width, 15)
}
// Spinner icon
object spinner extends Label {
icon = SharedIcons.ICON_SPINNER_BLUE
visible = false
}
// Lookup is our default button
dialog.peer.getRootPane.setDefaultButton(lookup.peer)
// Lookup and cancel buttons
lazy val lookup:Button = Button("Lookup") {
// We're creating a new model with a single obs with an empty target
val t = Target.empty.copy(name = targetName)
val m = (Model.proposal andThen Proposal.targets).set(Model.empty, List(t))
handler.reset
handler.bind(Some(m), done) // This will set things going
handler.lookup(t)
}
// Cancel button
lazy val cancel = Button("Cancel") {
dialog.close()
}
}
// Our choice area is just a list of checkboxes
object choices extends GridBagPanel with Rows {
addRow(CatalogButton(SIMBAD))
addRow(CatalogButton(NED))
addRow(CatalogButton(HORIZONS))
}
// Our custom checkbox type
private case class CatalogButton(pref:BooleanToolPreference) extends CheckBox(pref.name) {
selected = pref.get
action = Action(pref.name) {
pref.set(selected)
updateEnabledState()
}
}
}
// Fix the the lookup button's enabled state
private def updateEnabledState() {
Contents.footer.lookup.enabled = SIMBAD.get || NED.get || HORIZONS.get
}
def listener(state:CatalogRobot#State) {
state.headOption.map {
case (t, s) => s match {
case Some(f) =>
Contents.footer.msg.text = f match {
case Offline => "Server(s) offline."
case Error(_) => "Server error."
case NotFound(_) => "Not found."
}
Contents.footer.lookup.enabled = true
Contents.footer.spinner.visible = false
case None =>
Contents.footer.msg.text = ""
Contents.footer.lookup.enabled = false
Contents.footer.spinner.visible = true
}
}
}
private def done(model:Option[Model]) {
for {
m <- model
t <- m.proposal.targets.headOption
} close(t)
}
} | arturog8m/ocs | bundle/edu.gemini.pit/src/main/scala/edu/gemini/pit/ui/editor/SynchronousLookup.scala | Scala | bsd-3-clause | 3,899 |
package parsers.generic
import org.jsoup.Jsoup
import org.jsoup.nodes.Document
import org.jsoup.nodes.Element
import scala.collection.JavaConversions._
import java.net.URL
import java.io.InputStream
import parsers.base.TableParser
class HTMLTableParser(url: String, tableExpression: String, rowExpression: String, columnExpression: String) extends TableParser {
def parse: Array[Array[String]] = {
val document = Jsoup.parse(new URL(url).openStream(), "ISO-8859-1", url)
val table = document.select(tableExpression).first
val (height, width) = getTableDimensions(table)
val array = Array.ofDim[String](height, width)
for ((row, currentRow) <- table.select(rowExpression).view.zipWithIndex) {
var currentColumn = 0
for (cell <- row.select(columnExpression)) {
var rowspan = 0
try {
rowspan = Integer.parseInt(cell.attr("rowspan"))
} catch {
case e: NumberFormatException => {} // Do nothing. Cell doesn't have a rowspan.
}
while (array(currentRow)(currentColumn) != null && currentColumn < width) {
currentColumn += 1
}
val cellText = Jsoup.parse(cell.html.replaceAll("(?i)<br[^>]*>", "br2n")).text().replaceAll("br2n", "\n")
array(currentRow)(currentColumn) = cellText
if (rowspan > 0) {
for (i <- currentRow + 1 until currentRow + rowspan) {
array(i)(currentColumn) = cellText
}
}
currentColumn += 1
}
}
array
}
def getTableDimensions(table: Element): (Int, Int) = {
val height = table.select(rowExpression).size
var width = 0
for (row <- table.select(rowExpression)) {
val length = row.select(columnExpression).size
if (length > width) {
width = length
}
}
(height, width)
}
}
| maxmouchet/vamk-timetables | parsers/src/main/scala/parsers/generic/HTMLTableParser.scala | Scala | mit | 1,848 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.