code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
package org.jetbrains.plugins.scala
package lang
package psi
package stubs
package elements
import com.intellij.psi.PsiElement
import com.intellij.psi.stubs.{IndexSink, StubElement, StubInputStream, StubOutputStream}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.ScImportSelectors
import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.imports.ScImportSelectorsImpl
import org.jetbrains.plugins.scala.lang.psi.stubs.impl.ScImportSelectorsStubImpl
/**
* User: Alexander Podkhalyuzin
* Date: 20.06.2009
*/
class ScImportSelectorsElementType[Func <: ScImportSelectors]
extends ScStubElementType[ScImportSelectorsStub, ScImportSelectors]("import selectors") {
def serialize(stub: ScImportSelectorsStub, dataStream: StubOutputStream): Unit = {
dataStream.writeBoolean(stub.hasWildcard)
}
def createStubImpl[ParentPsi <: PsiElement](psi: ScImportSelectors, parentStub: StubElement[ParentPsi]): ScImportSelectorsStub = {
new ScImportSelectorsStubImpl(parentStub, this, psi.hasWildcard)
}
def deserializeImpl(dataStream: StubInputStream, parentStub: Any): ScImportSelectorsStub = {
val hasWildcard = dataStream.readBoolean
new ScImportSelectorsStubImpl(parentStub.asInstanceOf[StubElement[PsiElement]], this, hasWildcard)
}
def indexStub(stub: ScImportSelectorsStub, sink: IndexSink): Unit = {}
def createPsi(stub: ScImportSelectorsStub): ScImportSelectors = {
new ScImportSelectorsImpl(stub)
}
} | LPTK/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/stubs/elements/ScImportSelectorsElementType.scala | Scala | apache-2.0 | 1,466 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.stat.distribution
import breeze.linalg.{diag, eigSym, max, DenseMatrix => BDM, DenseVector => BDV, Vector => BV}
import org.apache.spark.annotation.{DeveloperApi, Since}
import org.apache.spark.ml.impl.Utils
import org.apache.spark.ml.linalg.{Matrices, Matrix, Vector, Vectors}
/**
* This class provides basic functionality for a Multivariate Gaussian (Normal) Distribution. In
* the event that the covariance matrix is singular, the density will be computed in a
* reduced dimensional subspace under which the distribution is supported.
* (see <a href="http://en.wikipedia.org/wiki/Multivariate_normal_distribution#Degenerate_case">
* here</a>)
*
* @param mean The mean vector of the distribution
* @param cov The covariance matrix of the distribution
*/
@Since("2.0.0")
@DeveloperApi
class MultivariateGaussian @Since("2.0.0") (
@Since("2.0.0") val mean: Vector,
@Since("2.0.0") val cov: Matrix) extends Serializable {
require(cov.numCols == cov.numRows, "Covariance matrix must be square")
require(mean.size == cov.numCols, "Mean vector length must match covariance matrix size")
/** Private constructor taking Breeze types */
private[ml] def this(mean: BDV[Double], cov: BDM[Double]) = {
this(Vectors.fromBreeze(mean), Matrices.fromBreeze(cov))
}
@transient private lazy val breezeMu = mean.asBreeze.toDenseVector
/**
* Compute distribution dependent constants:
* rootSigmaInv = D^(-1/2)^ * U.t, where sigma = U * D * U.t
* u = log((2*pi)^(-k/2)^ * det(sigma)^(-1/2)^)
*/
@transient private lazy val (rootSigmaInv: BDM[Double], u: Double) = calculateCovarianceConstants
/**
* Returns density of this multivariate Gaussian at given point, x
*/
@Since("2.0.0")
def pdf(x: Vector): Double = {
pdf(x.asBreeze)
}
/**
* Returns the log-density of this multivariate Gaussian at given point, x
*/
@Since("2.0.0")
def logpdf(x: Vector): Double = {
logpdf(x.asBreeze)
}
/** Returns density of this multivariate Gaussian at given point, x */
private[ml] def pdf(x: BV[Double]): Double = {
math.exp(logpdf(x))
}
/** Returns the log-density of this multivariate Gaussian at given point, x */
private[ml] def logpdf(x: BV[Double]): Double = {
val delta = x - breezeMu
val v = rootSigmaInv * delta
u + v.t * v * -0.5
}
/**
* Calculate distribution dependent components used for the density function:
* pdf(x) = (2*pi)^(-k/2)^ * det(sigma)^(-1/2)^ * exp((-1/2) * (x-mu).t * inv(sigma) * (x-mu))
* where k is length of the mean vector.
*
* We here compute distribution-fixed parts
* log((2*pi)^(-k/2)^ * det(sigma)^(-1/2)^)
* and
* D^(-1/2)^ * U, where sigma = U * D * U.t
*
* Both the determinant and the inverse can be computed from the singular value decomposition
* of sigma. Noting that covariance matrices are always symmetric and positive semi-definite,
* we can use the eigendecomposition. We also do not compute the inverse directly; noting
* that
*
* sigma = U * D * U.t
* inv(Sigma) = U * inv(D) * U.t
* = (D^{-1/2}^ * U.t).t * (D^{-1/2}^ * U.t)
*
* and thus
*
* -0.5 * (x-mu).t * inv(Sigma) * (x-mu) = -0.5 * norm(D^{-1/2}^ * U.t * (x-mu))^2^
*
* To guard against singular covariance matrices, this method computes both the
* pseudo-determinant and the pseudo-inverse (Moore-Penrose). Singular values are considered
* to be non-zero only if they exceed a tolerance based on machine precision, matrix size, and
* relation to the maximum singular value (same tolerance used by, e.g., Octave).
*/
private def calculateCovarianceConstants: (BDM[Double], Double) = {
val eigSym.EigSym(d, u) = eigSym(cov.asBreeze.toDenseMatrix) // sigma = u * diag(d) * u.t
// For numerical stability, values are considered to be non-zero only if they exceed tol.
// This prevents any inverted value from exceeding (eps * n * max(d))^-1
val tol = Utils.EPSILON * max(d) * d.length
try {
// log(pseudo-determinant) is sum of the logs of all non-zero singular values
val logPseudoDetSigma = d.activeValuesIterator.filter(_ > tol).map(math.log).sum
// calculate the root-pseudo-inverse of the diagonal matrix of singular values
// by inverting the square root of all non-zero values
val pinvS = diag(new BDV(d.map(v => if (v > tol) math.sqrt(1.0 / v) else 0.0).toArray))
(pinvS * u.t, -0.5 * (mean.size * math.log(2.0 * math.Pi) + logPseudoDetSigma))
} catch {
case uex: UnsupportedOperationException =>
throw new IllegalArgumentException("Covariance matrix has no non-zero singular values")
}
}
}
| pgandhi999/spark | mllib-local/src/main/scala/org/apache/spark/ml/stat/distribution/MultivariateGaussian.scala | Scala | apache-2.0 | 5,534 |
package org.jetbrains.plugins.scala.lang.psi.light.scala
import com.intellij.psi._
import com.intellij.psi.impl.light.LightElement
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElement
import org.jetbrains.plugins.scala.lang.psi.api.base.ScModifierList
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScAnnotation
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunctionDeclaration
import org.jetbrains.plugins.scala.lang.psi.api.statements.params._
import org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.lang.psi.types.api.TypeParameter
import org.jetbrains.plugins.scala.lang.psi.types.result._
/**
* @author Alefas
* @since 03/04/14.
*/
class ScLightFunctionDeclaration(pTypes: Seq[Seq[ScType]], tParams: Seq[TypeParameter], rt: ScType,
val fun: ScFunctionDeclaration)
extends LightElement(fun.getManager, fun.getLanguage) with ScFunctionDeclaration {
setNavigationElement(fun)
override def getParent: PsiElement = fun.getParent
override def typeParametersClause: Option[ScTypeParamClause] = fun.typeParametersClause.map(new ScLightTypeParamClause(tParams, _))
override def paramClauses: ScParameters = new ScLightParameters(pTypes, fun)
override protected def returnTypeInner: TypeResult = Right(rt)
override def definedReturnType: TypeResult = Right(rt)
override def declaredType: TypeResult = Right(rt)
override def hasExplicitType: Boolean = true
override def hasFinalModifier: Boolean = fun.hasFinalModifier
override def hasAbstractModifier: Boolean = fun.hasAbstractModifier
override def hasModifierPropertyScala(name: String): Boolean = fun.hasModifierPropertyScala(name)
override def getModifierList: ScModifierList = fun.getModifierList
override def returnTypeElement: Option[ScTypeElement] = fun.returnTypeElement
override def name: String = fun.name
override def toString: String = fun.toString
override def nameId: PsiElement = fun.nameId
override def hasAssign: Boolean = fun.hasAssign
override def psiAnnotations: Array[PsiAnnotation] = fun.getAnnotations
override def getApplicableAnnotations: Array[PsiAnnotation] = fun.getApplicableAnnotations
override def findAnnotation(qualifiedName: String): PsiAnnotation = fun.findAnnotation(qualifiedName)
override def addAnnotation(qualifiedName: String): PsiAnnotation = fun.addAnnotation(qualifiedName)
override def hasAnnotation(qualifiedName: String): Boolean = fun.hasAnnotation(qualifiedName)
override def annotations: Seq[ScAnnotation] = fun.annotations
override def navigate(requestFocus: Boolean): Unit = fun.navigate(requestFocus)
override def canNavigate: Boolean = fun.canNavigate
override def canNavigateToSource: Boolean = fun.canNavigateToSource
override protected def findChildrenByClassScala[T >: Null <: ScalaPsiElement](clazz: Class[T]): Array[T] =
throw new UnsupportedOperationException("Operation on light function")
override protected def findChildByClassScala[T >: Null <: ScalaPsiElement](clazz: Class[T]): T =
throw new UnsupportedOperationException("Operation on light function")
}
| gtache/intellij-lsp | intellij-lsp-dotty/src/org/jetbrains/plugins/scala/lang/psi/light/scala/ScLightFunctionDeclaration.scala | Scala | apache-2.0 | 3,235 |
import com.typesafe.sbt.packager.archetypes.JavaAppPackaging
import com.typesafe.sbt.packager.universal.UniversalPlugin
import sbt._
import sbt.Keys._
import sbt.Path._
/**
* Externalizes the resources like with Play, but as a standalone plugin.
*/
object ExternalizedResourcesMappings extends AutoPlugin {
override def requires: Plugins = UniversalPlugin && JavaAppPackaging
import UniversalPlugin.autoImport._
import JavaAppPackaging.autoImport._
object autoImport {
val externalizedResources = TaskKey[Seq[(File, String)]]("externalizedResources", "The resources to externalize")
val jarSansExternalized =
TaskKey[File]("jarSansExternalized", "Creates a jar file that has all the externalized resources excluded")
val externalizeResourcesExcludes = SettingKey[Seq[File]](
"externalizeResourcesExcludes",
"Resources that should not be externalized but stay in the generated jar"
)
}
import autoImport._
override val projectSettings: Seq[Def.Setting[_]] = Seq(
externalizeResourcesExcludes := Nil,
Universal / mappings ++= {
val resourceMappings = (Compile / externalizedResources).value
resourceMappings.map {
case (resource, path) => resource -> ("conf/" + path)
}
},
scriptClasspath := {
val scriptClasspathValue = scriptClasspath.value
"../conf/" +: scriptClasspathValue
},
scriptClasspathOrdering := Def.taskDyn {
val oldValue = scriptClasspathOrdering.value
Def.task {
// Filter out the regular jar
val jar = (Runtime / packageBin).value
val jarSansExternalizedObj = (Runtime / jarSansExternalized).value
oldValue.map {
case (packageBinJar, _) if jar == packageBinJar =>
val id = projectID.value
val art = (Compile / jarSansExternalized / artifact).value
val jarName =
JavaAppPackaging.makeJarName(id.organization, id.name, id.revision, art.name, art.classifier)
jarSansExternalizedObj -> ("lib/" + jarName)
case other => other
}
}
}.value
) ++ inConfig(Compile)(externalizedSettings)
def getExternalizedResources(
rdirs: Seq[File],
unmanagedResourcesValue: Seq[File],
externalizeResourcesExcludes: Seq[File]
): Seq[(File, String)] =
(unmanagedResourcesValue --- rdirs --- externalizeResourcesExcludes).pair(relativeTo(rdirs) | flat)
private def externalizedSettings: Seq[Setting[_]] =
Defaults.packageTaskSettings(jarSansExternalized, jarSansExternalized / mappings) ++ Seq(
externalizedResources := getExternalizedResources(
unmanagedResourceDirectories.value,
unmanagedResources.value,
externalizeResourcesExcludes.value
),
jarSansExternalized / mappings := {
// packageBin mappings have all the copied resources from the classes directory
// so we need to get the copied resources, and map the source files to the destination files,
// so we can then exclude the destination files
val packageBinMappings = (packageBin / mappings).value
val externalized = externalizedResources.value.map(_._1).toSet
val copied = copyResources.value
val toExclude = copied.collect {
case (source, dest) if externalized(source) => dest
}.toSet
packageBinMappings.filterNot {
case (file, _) => toExclude(file)
}
},
jarSansExternalized / artifactClassifier := Option("sans-externalized")
)
}
| SpongePowered/Ore | project/ExternalizedResourcesMappings.scala | Scala | mit | 3,576 |
package com.guidewire.tools.chronos.client
/**
*
*/
import org.scalatest.junit.JUnitRunner
import org.scalatest.{ParallelTestExecution, BeforeAndAfterAll, SeveredStackTraces, FunSuite}
import org.junit.runner.RunWith
import org.scalatest.matchers.ShouldMatchers
import dispatch._, Defaults._
import play.api.libs.json._
import play.api.libs.functional._
import scalaz._
import com.guidewire.tools.chronos.client.api.v2.{Jobs, Schedule, Job, Chronos}
import org.joda.time.DateTime
@RunWith(classOf[JUnitRunner])
class BasicFunctionalitySuite extends FunSuite
with ParallelTestExecution
with ShouldMatchers
with SeveredStackTraces {
import ClientScalaTest._
test("Can connect to running instance") (ignoreIfHostNotUp { (host, port, secure) =>
val jobs = blockAndValidateSuccess {
Chronos.scheduler.jobs(Connection(host, port, secure))
}
for(job <- jobs) {
job should not be null
job.name should not be ""
//println(s"${job.schedule.get.period}")
//println(s"$job")
}
})
test("Can ping running instance") (ignoreIfHostNotUp { (host, port, secure) =>
val result = blockAndValidateSuccess {
Chronos.debug.ping(Connection(host, port))
}
result should be (true)
})
test("Can request metrics") (ignoreIfHostNotUp { (host, port, secure) =>
val metrics = blockAndValidateSuccess {
Chronos.metrics.full(Connection(host, port, secure))
}
for(gauge <- metrics.gauges) {
gauge should not be null
gauge.name should not be ""
//println(s"$gauge")
}
for(counter <- metrics.counters) {
counter should not be null
counter.name should not be ""
//println(s"$counter")
}
for(histogram <- metrics.histograms) {
histogram should not be null
histogram.name should not be ""
//println(s"$histogram")
}
for(meter <- metrics.meters) {
meter should not be null
meter.name should not be ""
//println(s"$meter")
}
for(timer <- metrics.timers) {
timer should not be null
timer.name should not be ""
//println(s"$timer")
}
})
test("Can request dot file") (ignoreIfHostNotUp { (host, port, secure) =>
val dot = blockAndValidateSuccess {
Chronos.scheduler.graphs.dot(Connection(host, port, secure))
}
dot should not be null
//println(s"$dot")
})
test("Can add simple scheduled job") (ignoreIfHostNotUp { (host, port, secure) =>
val List(_, added, cleanup) = blockAndValidateSuccess {
implicit val cn = Connection(host, port, secure)
for {
ensure <- Chronos.scheduler.jobs.delete("scalatest-scheduler-job-addScheduled", ignoreIfMissing = true)
added <- Chronos.scheduler.jobs.addScheduled(Jobs.scheduled(
name = s"scalatest-scheduler-job-addScheduled"
, command = s"echo 'scalatest-scheduler-job-addScheduled' >> /tmp/chronos-client-scala-test.txt"
, schedule = Schedule(5L, DateTime.now, "PT10S".toPeriod)
))
cleanup <- Chronos.scheduler.jobs.delete("scalatest-scheduler-job-addScheduled")
} yield List(ensure, added, cleanup)
}
withClue(s"[CLEANUP REQUIRED] Unable to fully process test on <$host:$port>: ") {
added should be (true)
cleanup should be (true)
}
})
test("Can add simple dependent job") (ignoreIfHostNotUp { (host, port, secure) =>
val List(_, _, added1, added2, cleanup1, cleanup2) = blockAndValidateSuccess {
implicit val cn = Connection(host, port, secure)
for {
ensure1 <- Chronos.scheduler.jobs.delete("scalatest-scheduler-job-adddependent-001", ignoreIfMissing = true)
ensure2 <- Chronos.scheduler.jobs.delete("scalatest-scheduler-job-adddependent", ignoreIfMissing = true)
added1 <- Chronos.scheduler.jobs.addScheduled(Jobs.scheduled(
name = s"scalatest-scheduler-job-adddependent"
, command = s"echo 'scalatest-scheduler-job-adddependent' >> /tmp/chronos-client-scala-test.txt"
, schedule = Schedule(5L, DateTime.now, "PT10S".toPeriod)
))
added2 <- Chronos.scheduler.jobs.addDependent(Jobs.dependent(
name = s"scalatest-scheduler-job-adddependent-001"
, command = s"echo 'scalatest-scheduler-job-adddependent-001' >> /tmp/chronos-client-scala-test.txt"
, parents = Set("scalatest-scheduler-job-adddependent")
))
cleanup1 <- Chronos.scheduler.jobs.delete("scalatest-scheduler-job-adddependent-001")
cleanup2 <- Chronos.scheduler.jobs.delete("scalatest-scheduler-job-adddependent")
} yield List(ensure1, ensure2, added1, added2, cleanup1, cleanup2)
}
withClue(s"[CLEANUP REQUIRED] Unable to fully process test on <$host:$port>: ") {
added1 should be (true)
added2 should be (true)
cleanup1 should be (true)
cleanup2 should be (true)
}
})
test("Can delete all tasks for a simple job") (ignoreIfHostNotUp { (host, port, secure) =>
val List(_, added, deleted, cleanup) = blockAndValidateSuccess {
implicit val cn = Connection(host, port, secure)
for {
ensure <- Chronos.scheduler.jobs.delete("scalatest-scheduler-tasks-killAll", ignoreIfMissing = true)
added <- Chronos.scheduler.jobs.addScheduled(Jobs.scheduled(
name = s"scalatest-scheduler-tasks-killAll"
, command = s"echo 'scalatest-scheduler-tasks-killAll'"
, schedule = Schedule(5L, DateTime.now, "PT10S".toPeriod)
))
deleted <- Chronos.scheduler.tasks.killAll("scalatest-scheduler-tasks-killAll")
cleanup <- Chronos.scheduler.jobs.delete("scalatest-scheduler-tasks-killAll")
} yield List(ensure, added, deleted, cleanup)
}
withClue(s"[CLEANUP REQUIRED] Unable to fully process test on <$host:$port>: ") {
added should be (true)
deleted should be (true)
cleanup should be (true)
}
})
test("Can manually start a job") (ignoreIfHostNotUp { (host, port, secure) =>
val List(_, added, started, cleanup) = blockAndValidateSuccess {
implicit val cn = Connection(host, port, secure)
for {
ensure <- Chronos.scheduler.jobs.delete("scalatest-scheduler-jobs-start", ignoreIfMissing = true)
added <- Chronos.scheduler.jobs.addScheduled(Job(
name = s"scalatest-scheduler-jobs-start"
, command = s"echo 'scalatest-scheduler-jobs-start'"
, schedule = Schedule(5L, DateTime.now, "PT10S".toPeriod)
))
started <- Chronos.scheduler.jobs.start("scalatest-scheduler-jobs-start")
cleanup <- Chronos.scheduler.jobs.delete("scalatest-scheduler-jobs-start")
} yield List(ensure, added, started, cleanup)
}
withClue(s"[CLEANUP REQUIRED] Unable to fully process test on <$host:$port>: ") {
added should be (true)
started should be (true)
cleanup should be (true)
}
})
for(i <- 1 to 1)
test(f"Can deserialize simple scheduler jobs (/chronos-scheduler-jobs-${i}%03d.json)")(validateResourceParse(f"/chronos-scheduler-jobs-${i}%03d.json")(api.v2.Scheduler.jobs.processList))
for(i <- 1 to 1)
test(f"Can deserialize simple metrics (/chronos-metrics-${i}%03d.json)")(validateResourceParse(f"/chronos-metrics-${i}%03d.json")(api.v2.Chronos.metrics.processFull))
}
| Guidewire/chronos-client | src/test/scala/com/guidewire/tools/chronos/client/BasicFunctionalitySuite.scala | Scala | apache-2.0 | 7,641 |
package monitoring
import app.ConfigProperties._
import gov.dwp.carers.CADSHealthCheck
import gov.dwp.carers.CADSHealthCheck.Result
import play.api.http.Status
import utils.HttpWrapper
import scala.language.{implicitConversions, postfixOps}
/**
* Ping ClaimService to check connection
*/
class ClaimServiceConnectionCheck extends CADSHealthCheck(s"${getStringProperty("application.name", throwError = false)}", getStringProperty("application.version", throwError = false).takeWhile(_ != '-')) {
override def check(): Result = {
val url = getStringProperty("claimsServiceUrl") + "/ping"
val timeout = getIntProperty("cs.timeout")
val httpWrapper = new HttpWrapper
val response = httpWrapper.get(url, timeout)
response.getStatus match {
case Status.OK =>
Result.healthy
case status@_ =>
Result.unhealthy(s"Claim Service ping failed: ${status} from $url with timeout $timeout.")
}
}
}
| Department-for-Work-and-Pensions/CarersAllowanceStaffAccess | casa/app/monitoring/ClaimServiceConnectionCheck.scala | Scala | mit | 943 |
package concurrency
import akka.actor.ActorSystem
import configuration.SiteSettings
import scaldi.Module
import scaldi.akka.AkkaInjectable
import spray.can.server.ServerSettings
package object actor {
val BlockingDispatcher = "akka.blocking-dispatcher"
class ActorsModule extends Module {
bind [ActorSystem] to ActorSystem("system") destroyWith (_.shutdown())
binding toProvider new HealthCheck()
binding toProvider new BackGround()
binding identifiedBy 'backgroundActor toNonLazy {
implicit val system = inject [ActorSystem]
AkkaInjectable.injectActorRef[BackGround]("background-service")
}
}
}
| onurzdg/spray-app | src/main/scala/concurrency/actor/package.scala | Scala | apache-2.0 | 641 |
package breeze.linalg
import breeze.generic.UFunc
import scala.reflect.ClassTag
import spire.implicits._
import breeze.storage.Zero
/**
* split the array
*
* @author stucchio
*/
object split extends UFunc {
implicit def implIntVec[T: ClassTag]: Impl2[DenseVector[T], Int, Seq[DenseVector[T]]] = new Impl2[DenseVector[T], Int, Seq[DenseVector[T]]] {
def apply(v: DenseVector[T], n: Int): Seq[DenseVector[T]] = {
require(n >= 0)
require(n < v.size)
require(v.size % n == 0)
val individualVectorSize = v.size / n
val result = new collection.mutable.ListBuffer[DenseVector[T]]()
cfor(0)(k => k < n, k => k+1)(k => {
val offsetInOriginalVector = k*individualVectorSize
val chunk = new Array[T](individualVectorSize)
cfor(0)(i => i < individualVectorSize, i => i+1)(i => {
chunk(i) = v(offsetInOriginalVector+i)
})
result += new DenseVector[T](chunk)
})
result.toSeq
}
}
implicit def implSeqVec[T: ClassTag]: Impl2[DenseVector[T], Seq[Int], Seq[DenseVector[T]]] = new Impl2[DenseVector[T], Seq[Int], Seq[DenseVector[T]]] {
def apply(v: DenseVector[T], nSeq: Seq[Int]): Seq[DenseVector[T]] = {
require(nSeq.size < v.size)
val result = new collection.mutable.ListBuffer[DenseVector[T]]()
var lastN: Int = 0
nSeq.foreach(n => {
val chunk = new Array[T](n - lastN)
cfor(lastN)(i => i < n, i => i + 1)(i => {
chunk(i-lastN) = v(i)
})
result += new DenseVector[T](chunk)
lastN = n
})
if (lastN < v.size) { //If we did not already add last chunk to result, do it now.
val chunk = new Array[T](v.size - lastN)
cfor(lastN)(i => i < v.size, i => i + 1)(i => {
chunk(i-lastN) = v(i)
})
result += new DenseVector[T](chunk)
}
result.toSeq
}
}
implicit def implIntMatrix[T: ClassTag](implicit zero: Zero[T]): Impl3[DenseMatrix[T], Int, Int, Seq[DenseMatrix[T]]] = new Impl3[DenseMatrix[T], Int, Int, Seq[DenseMatrix[T]]] {
def apply(v: DenseMatrix[T], n: Int, axis: Int): Seq[DenseMatrix[T]] = axis match {
case 0 => vsplit(v,n)
case 1 => hsplit(v,n)
case _ => throw new IllegalArgumentException("Matrices have only two axes.")
}
}
}
object hsplit extends UFunc {
implicit def implIntVec[T: ClassTag]: Impl2[DenseVector[T], Int, Seq[DenseVector[T]]] = new Impl2[DenseVector[T], Int, Seq[DenseVector[T]]] { //For vectors just an alias
def apply(v: DenseVector[T], n: Int): Seq[DenseVector[T]] = hsplit(v,n)
}
implicit def implSeqVec[T: ClassTag]: Impl2[DenseVector[T], Seq[Int], Seq[DenseVector[T]]] = new Impl2[DenseVector[T], Seq[Int], Seq[DenseVector[T]]] { //For vectors just an alias
def apply(v: DenseVector[T], n: Seq[Int]): Seq[DenseVector[T]] = hsplit(v,n)
}
implicit def implIntMat[T: ClassTag](implicit zero: Zero[T]): Impl2[DenseMatrix[T], Int, Seq[DenseMatrix[T]]] = new Impl2[DenseMatrix[T],Int, Seq[DenseMatrix[T]]] { //for matrices
def apply(v: DenseMatrix[T], n: Int): Seq[DenseMatrix[T]] = {
require(n >= 0)
require(n < v.cols)
require(v.cols % n == 0)
val result = new collection.mutable.ListBuffer[DenseMatrix[T]]()
val newCols = v.cols / n
val newSize = v.rows * newCols
cfor(0)(k => k < n, k => k+1)(k => {
val offsetInOriginalMatrix = k*newCols
val chunk = DenseMatrix.create(v.rows, newCols, new Array[T](newSize))
cfor(0)(i => i < v.rows, i => i+1)(i => {
cfor(0)(j => j < newCols, j => j+1)(j => {
chunk(i,j) = v(i,j+offsetInOriginalMatrix)
})
})
result += chunk
})
result.toSeq
}
}
}
object vsplit extends UFunc {
implicit def implIntMat[T: ClassTag](implicit zero: Zero[T]): Impl2[DenseMatrix[T], Int, Seq[DenseMatrix[T]]] = new Impl2[DenseMatrix[T],Int, Seq[DenseMatrix[T]]] { //for matrices
def apply(v: DenseMatrix[T], n: Int): Seq[DenseMatrix[T]] = {
require(n >= 0)
require(n < v.cols)
require(v.cols % n == 0)
val result = new collection.mutable.ListBuffer[DenseMatrix[T]]()
val newRows = v.rows / n
cfor(0)(k => k < n, k => k+1)(k => {
val offsetInOriginalMatrix = k*newRows
val chunk = DenseMatrix.create(newRows, v.cols, new Array[T](v.cols * newRows))
cfor(0)(i => i < newRows, i => i+1)(i => {
cfor(0)(j => j < v.cols, j => j+1)(j => {
chunk(i,j) = v(i+offsetInOriginalMatrix,j)
})
})
result += chunk
})
result.toSeq
}
}
}
| sheide/breeze | math/src/main/scala/breeze/linalg/functions/split.scala | Scala | apache-2.0 | 4,639 |
package org.jetbrains.plugins.scala.lang.completion.lookups
import com.intellij.psi.impl.light.LightElement
import com.intellij.psi.tree.IElementType
import com.intellij.psi.{PsiElement, PsiManager}
import com.intellij.util.containers.ConcurrentWeakHashMap
import org.jetbrains.plugins.scala.ScalaFileType
import org.jetbrains.plugins.scala.lang.lexer.ScalaLexer
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElement
/**
* @author Alefas
* @since 27.03.12
*/
class ScalaLightKeyword private (manager: PsiManager, text: String)
extends LightElement(manager, ScalaFileType.SCALA_LANGUAGE) with ScalaPsiElement {
protected def findChildrenByClassScala[T >: Null <: ScalaPsiElement](clazz: Class[T]): Array[T] =
findChildrenByClass[T](clazz)
protected def findChildByClassScala[T >: Null <: ScalaPsiElement](clazz: Class[T]): T = findChildByClass[T](clazz)
override def getText: String = text
def getTokenType: IElementType = {
val lexer = new ScalaLexer
lexer.start(text)
lexer.getTokenType
}
override def copy: PsiElement = new ScalaLightKeyword(getManager, text)
override def toString: String = "ScalaLightKeyword:" + text
}
object ScalaLightKeyword {
private val keywords = new ConcurrentWeakHashMap[(PsiManager, String), ScalaLightKeyword]()
def apply(manager: PsiManager, text: String): ScalaLightKeyword = {
var res = keywords.get((manager, text))
if (res != null && res.isValid) return res
res = new ScalaLightKeyword(manager, text)
keywords.put((manager, text), res)
res
}
}
| triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/lang/completion/lookups/ScalaLightKeyword.scala | Scala | apache-2.0 | 1,557 |
package com.datamountaineer.streamreactor.connect.ftp.source
import com.typesafe.scalalogging.StrictLogging
import org.scalatest.BeforeAndAfter
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
import scala.collection.JavaConverters._
class ManyFilesTest extends AnyFunSuite with Matchers with BeforeAndAfter with StrictLogging {
val ftpServer = new EmbeddedFtpServer(3333)
val fileCount = 132
val sliceSize = 1024
val maxPollRecords = 74
val lineSep = System.getProperty("line.separator")
val fileContent = (1 to 12000).map(index => s"line_${"%010d".format(index)}${lineSep}").mkString.getBytes
val fileName = "the_file_name"
val filePath = s"/folder/${fileName}"
val sourceConfig = Map(
FtpSourceConfig.Address -> s"${ftpServer.host}:${ftpServer.port}",
FtpSourceConfig.User -> ftpServer.username,
FtpSourceConfig.Password -> ftpServer.password,
FtpSourceConfig.RefreshRate -> "PT0S",
FtpSourceConfig.MonitorTail -> "/folder/:output_topic",
FtpSourceConfig.MonitorSliceSize -> sliceSize.toString,
FtpSourceConfig.FileMaxAge -> "P7D",
FtpSourceConfig.KeyStyle -> "string",
FtpSourceConfig.fileFilter -> ".*",
FtpSourceConfig.FtpMaxPollRecords -> s"${maxPollRecords}",
FtpSourceConfig.KeyStyle -> "struct"
)
test("Read only FtpMaxPollRecords even if using MonitorSliceSize") {
val fs = new FileSystem(ftpServer.rootDir).clear()
val cfg = new FtpSourceConfig(sourceConfig.asJava)
val offsets = new DummyOffsetStorage
(0 to fileCount).map(index => fs.applyChanges(Seq(s"${filePath}_${index}" -> Append(fileContent))))
val poller = new FtpSourcePoller(cfg, offsets)
ftpServer.start()
val slices = poller.poll()
(slices.size) shouldBe (maxPollRecords)
ftpServer.stop()
}
}
| datamountaineer/stream-reactor | kafka-connect-ftp/src/test/scala/com/datamountaineer/streamreactor/connect/ftp/source/ManyFilesTest.scala | Scala | apache-2.0 | 1,823 |
package org.tensorframes.dsl
import java.io.{BufferedReader, InputStreamReader, File}
import java.nio.file.Files
import java.nio.charset.StandardCharsets
import org.tensorframes.Logging
import org.scalatest.Matchers
import scala.collection.JavaConverters._
object ExtractNodes extends Matchers with Logging {
def executeCommand(py: String): Map[String, String] = {
val content =
s"""
|from __future__ import print_function
|import tensorflow as tf
|
|$py
|g = tf.get_default_graph().as_graph_def()
|for n in g.node:
| print(">>>>>", str(n.name), "<<<<<<")
| print(n)
""".stripMargin
val f = File.createTempFile("pythonTest", ".py")
logTrace(s"Created temp file ${f.getAbsolutePath}")
Files.write(f.toPath, content.getBytes(StandardCharsets.UTF_8))
// Using the standard python installation in the PATH. It needs to have TensorFlow installed.
val p = new ProcessBuilder("python", f.getAbsolutePath).start()
val s = p.getInputStream
val isr = new InputStreamReader(s)
val br = new BufferedReader(isr)
var res: String = ""
var str: String = ""
while(str != null) {
str = br.readLine()
if (str != null) {
res = res + "\n" + str
}
}
p.waitFor()
assert(p.exitValue() === 0, (p.exitValue(),
{
println(content)
s"===========\n$content\n==========="
}))
res.split(">>>>>").map(_.trim).filterNot(_.isEmpty).map { b =>
val zs = b.split("\n")
val node = zs.head.dropRight(7)
val rest = zs.tail
node -> rest.mkString("\n")
} .toMap
}
def compareOutput(py: String, nodes: Operation*): Unit = {
val g = TestUtilities.buildGraph(nodes.head, nodes.tail:_*)
val m1 = g.getNodeList.asScala.map { n =>
n.getName -> n.toString.trim
} .toMap
val pym = executeCommand(py)
logTrace(s"m1 = '$m1'")
logTrace(s"pym = '$pym'")
assert((m1.keySet -- pym.keySet).isEmpty, {
val diff = (m1.keySet -- pym.keySet).toSeq.sorted
s"Found extra nodes in scala: $diff"
})
assert((pym.keySet -- m1.keySet).isEmpty, {
val diff = (pym.keySet -- m1.keySet).toSeq.sorted
s"Found extra nodes in python: $diff"
})
for (k <- m1.keySet) {
assert(m1(k) === pym(k),
s"scala=${m1(k)}\npython=${pym(k)}")
}
}
}
| databricks/tensorframes | src/test/scala/org/tensorframes/dsl/ExtractNodes.scala | Scala | apache-2.0 | 2,402 |
package com.danielwestheide.kontextfrei.rdd
import com.danielwestheide.kontextfrei.DCollectionPairFunctions
import org.apache.spark.Partitioner
import org.apache.spark.rdd.RDD
import scala.collection.Map
import scala.reflect.ClassTag
private[kontextfrei] trait RDDPairFunctions
extends DCollectionPairFunctions[RDD] { this: RDDBase =>
override final def cogroup[A: ClassTag, B: ClassTag, C: ClassTag](
x: RDD[(A, B)])(y: RDD[(A, C)]): RDD[(A, (Iterable[B], Iterable[C]))] = withSite(x) {
_.cogroup(y)
}
override final def values[A: ClassTag, B: ClassTag](x: RDD[(A, B)]): RDD[B] = withSite(x) {
_.values
}
override final def keys[A: ClassTag, B: ClassTag](x: RDD[(A, B)]): RDD[A] = withSite(x) {
_.keys
}
override final def leftOuterJoin[A: ClassTag, B: ClassTag, C: ClassTag](
x: RDD[(A, B)])(y: RDD[(A, C)]): RDD[(A, (B, Option[C]))] = withSite(x) {
_.leftOuterJoin(y)
}
override final def rightOuterJoin[A: ClassTag, B: ClassTag, C: ClassTag](
x: RDD[(A, B)])(y: RDD[(A, C)]): RDD[(A, (Option[B], C))] = withSite(x) {
_.rightOuterJoin(y)
}
override final def fullOuterJoin[A: ClassTag, B: ClassTag, C: ClassTag](
x: RDD[(A, B)])(y: RDD[(A, C)]): RDD[(A, (Option[B], Option[C]))] = withSite(x) {
_.fullOuterJoin(y)
}
override final def mapValues[A: ClassTag, B: ClassTag, C: ClassTag](
x: RDD[(A, B)])(f: B => C): RDD[(A, C)] = withSite(x) {
_.mapValues(f)
}
override final def flatMapValues[A: ClassTag, B: ClassTag, C: ClassTag](
x: RDD[(A, B)])(f: B => TraversableOnce[C]): RDD[(A, C)] = withSite(x) {
_.flatMapValues(f)
}
override final def reduceByKey[A: ClassTag, B: ClassTag](xs: RDD[(A, B)])(
f: (B, B) => B): RDD[(A, B)] = withSite(xs) {
_.reduceByKey(f)
}
override final def foldByKey[A: ClassTag, B: ClassTag](
xs: RDD[(A, B)])(zeroValue: B, f: (B, B) => B): RDD[(A, B)] = withSite(xs) {
_.foldByKey(zeroValue)(f)
}
override final def aggregateByKey[A: ClassTag, B: ClassTag, C: ClassTag](
xs: RDD[(A, B)])(zeroValue: C)(seqOp: (C, B) => C,
combOp: (C, C) => C): RDD[(A, C)] = withSite(xs) {
_.aggregateByKey(zeroValue)(seqOp, combOp)
}
override final def combineByKey[A: ClassTag, B: ClassTag, C: ClassTag](
xs: RDD[(A, B)])(createCombiner: B => C)(
mergeValue: (C, B) => C,
mergeCombiners: (C, C) => C): RDD[(A, C)] = withSite(xs) {
_.combineByKey(createCombiner, mergeValue, mergeCombiners)
}
override final def countByKey[A: ClassTag, B: ClassTag](
xs: RDD[(A, B)]): Map[A, Long] = withSite(xs) {
_.countByKey()
}
override final def collectAsMap[A: ClassTag, B: ClassTag](
xs: RDD[(A, B)]): Map[A, B] = withSite(xs) {
_.collectAsMap()
}
override final def partitionBy[A: ClassTag, B: ClassTag](
xs: RDD[(A, B)])(partitioner: Partitioner): RDD[(A, B)] = withSite(xs) {
_.partitionBy(partitioner)
}
}
| dwestheide/kontextfrei | core/src/main/scala/com/danielwestheide/kontextfrei/rdd/RDDPairFunctions.scala | Scala | apache-2.0 | 2,985 |
package dotty.tools.dotc.core
def round(f: Float, digits: Int = 0): Float = ???
//@scala.annotation.targetName("roundDouble") // does not change anything
def round(d: Double, digits: Int = 0): Double = ??? // error
| dotty-staging/dotty | tests/neg/i12245.scala | Scala | apache-2.0 | 217 |
package com.twitter.finagle.netty4.channel
import io.netty.buffer.ByteBuf
import io.netty.buffer.Unpooled.wrappedBuffer
import io.netty.channel._
import java.net.InetSocketAddress
import org.junit.runner.RunWith
import org.mockito.Mockito.when
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar
@RunWith(classOf[JUnitRunner])
class ChannelSnooperTest extends FunSuite with MockitoSugar {
val msg = "buffer content"
val msgBuffer = wrappedBuffer(msg.getBytes("UTF-8"))
test("ByteBufSnooper decodes and prints inbound and outbound messages") {
var messageCount = 0
val bbs = new ByteBufSnooper("bbs") {
override def dump(printer: (Channel, String) => Unit, ch: Channel, buf: ByteBuf): Unit = {
messageCount += 1
assert(buf == msgBuffer)
super.dump( { (_: Channel, m: String) => assert(msg == m) }, ch, buf )
}
}
bbs.channelRead(mock[ChannelHandlerContext], msgBuffer)
bbs.write(mock[ChannelHandlerContext], msgBuffer, mock[ChannelPromise])
assert(messageCount == 2)
}
trait InstrumentedSnooperCtx {
var eventCount = 0
var inboundCount = 0
var outboundCount = 0
var exnCount = 0
val ctx = mock[ChannelHandlerContext]
val cid = mock[ChannelId]
when(cid.asShortText).thenReturn("1")
val ch = mock[Channel]
when(ctx.channel()).thenReturn(ch)
when(ch.remoteAddress()).thenReturn(new InetSocketAddress(80))
when(ch.id).thenReturn(cid)
val scs = new SimpleChannelSnooper("scs") {
override def printInbound(ch: Channel, message: String): Unit =
inboundCount += 1
override def printOutbound(ch: Channel, message: String): Unit =
outboundCount += 1
override def printer(message: String, exc: Throwable): Unit =
exnCount += 1
override def printEvent(ch: Channel, eventName: String): Unit =
eventCount += 1
}
}
test("SimpleChannelSnooper prints incoming and outgoing messages") {
new InstrumentedSnooperCtx {
scs.channelRead(ctx, msgBuffer)
scs.write(ctx, msgBuffer, mock[ChannelPromise])
assert(inboundCount == 1)
assert(outboundCount == 1)
}
}
test("SimpleChannelSnooper snoops exceptionCaught") {
new InstrumentedSnooperCtx {
assert(exnCount == 0)
scs.exceptionCaught(ctx, new Exception)
assert(exnCount == 1)
}
}
// outbound events
test("SimpleChannelSnooper snoops write") {
new InstrumentedSnooperCtx {
assert(outboundCount == 0)
scs.write(ctx, msg, mock[ChannelPromise])
assert(outboundCount == 1)
}
}
test("SimpleChannelSnooper snoops disconnect") {
new InstrumentedSnooperCtx {
assert(eventCount == 0)
scs.disconnect(ctx, mock[ChannelPromise])
assert(eventCount == 1)
}
}
test("SimpleChannelSnooper snoops flush") {
new InstrumentedSnooperCtx {
assert(eventCount == 0)
scs.flush(ctx)
assert(eventCount == 1)
}
}
test("SimpleChannelSnooper snoops close") {
new InstrumentedSnooperCtx {
assert(eventCount == 0)
scs.close(ctx, mock[ChannelPromise])
assert(eventCount == 1)
}
}
test("SimpleChannelSnooper snoops deregister") {
new InstrumentedSnooperCtx {
assert(eventCount == 0)
scs.deregister(ctx, mock[ChannelPromise])
assert(eventCount == 1)
}
}
test("SimpleChannelSnooper snoops read") {
new InstrumentedSnooperCtx {
assert(eventCount == 0)
scs.read(ctx)
assert(eventCount == 1)
}
}
test("SimpleChannelSnooper snoops connect") {
new InstrumentedSnooperCtx {
assert(eventCount == 0)
scs.connect(ctx, new InetSocketAddress(0), new InetSocketAddress(0), mock[ChannelPromise])
assert(eventCount == 1)
}
}
test("SimpleChannelSnooper snoops bind") {
new InstrumentedSnooperCtx {
assert(eventCount == 0)
scs.bind(ctx, new InetSocketAddress(0), mock[ChannelPromise])
assert(eventCount == 1)
}
}
// inbound events
test("SimpleChannelSnooper snoops channelActive") {
new InstrumentedSnooperCtx {
assert(eventCount == 0)
scs.channelActive(ctx)
assert(eventCount == 1)
}
}
test("SimpleChannelSnooper snoops channelUnregistered") {
new InstrumentedSnooperCtx {
assert(eventCount == 0)
scs.channelUnregistered(ctx)
assert(eventCount == 1)
}
}
test("SimpleChannelSnooper snoops channelInactive") {
new InstrumentedSnooperCtx {
assert(eventCount == 0)
scs.channelInactive(ctx)
assert(eventCount == 1)
}
}
test("SimpleChannelSnooper snoops channelWritabilityChanged") {
new InstrumentedSnooperCtx {
assert(eventCount == 0)
scs.channelWritabilityChanged(ctx)
assert(eventCount == 1)
}
}
test("SimpleChannelSnooper snoops userEventTriggered") {
new InstrumentedSnooperCtx {
assert(eventCount == 0)
scs.userEventTriggered(ctx, new Object)
assert(eventCount == 1)
}
}
test("SimpleChannelSnooper snoops channelRegistered") {
new InstrumentedSnooperCtx {
assert(eventCount == 0)
scs.channelRegistered(ctx)
assert(eventCount == 1)
}
}
test("SimpleChannelSnooper snoops channelReadComplete") {
new InstrumentedSnooperCtx {
assert(eventCount == 0)
scs.channelReadComplete(ctx)
assert(eventCount == 1)
}
}
test("SimpleChannelSnooper snoops channelRead"){
new InstrumentedSnooperCtx {
assert(inboundCount == 0)
scs.channelRead(ctx, msg)
assert(inboundCount == 1)
}
}
}
| koshelev/finagle | finagle-netty4/src/test/scala/com/twitter/finagle/netty4/channel/ChannelSnooperTest.scala | Scala | apache-2.0 | 5,654 |
package controllers
import com.google.inject.Inject
import uk.gov.dvla.vehicles.presentation.common.controllers
import uk.gov.dvla.vehicles.presentation.common.controllers.Version.Suffix
import uk.gov.dvla.vehicles.presentation.common.webserviceclients.acquire.AcquireConfig
import uk.gov.dvla.vehicles.presentation.common.webserviceclients.emailservice.EmailServiceConfig
import uk.gov.dvla.vehicles.presentation.common.webserviceclients.addresslookup.ordnanceservey.OrdnanceSurveyConfig
import uk.gov.dvla.vehicles.presentation.common.webserviceclients.vehicleandkeeperlookup.VehicleAndKeeperLookupConfig
class Version @Inject()(vehicleAndKeeperLookupConfig: VehicleAndKeeperLookupConfig,
osAddressLookupConfig: OrdnanceSurveyConfig,
vehiclesAcquireConfig: AcquireConfig,
emailConfig: EmailServiceConfig)
extends controllers.Version(
emailConfig.emailServiceMicroServiceBaseUrl + Suffix,
osAddressLookupConfig.baseUrl + Suffix,
vehicleAndKeeperLookupConfig.vehicleAndKeeperLookupMicroServiceBaseUrl + Suffix,
vehiclesAcquireConfig.baseUrl + Suffix
)
| dvla/vehicles-acquire-online | app/controllers/Version.scala | Scala | mit | 1,144 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command.management
import org.apache.spark.sql.{CarbonDatasourceHadoopRelation, Dataset, Row, SparkSession}
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.catalyst.plans.logical.{GlobalLimit, LogicalPlan}
import org.apache.spark.sql.execution.command.{AtomicRunnableCommand, DataCommand}
import org.apache.spark.storage.StorageLevel
import org.apache.carbondata.common.logging.{LogService, LogServiceFactory}
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
import org.apache.carbondata.spark.util.CarbonSparkUtil
case class CarbonInsertIntoCommand(
relation: CarbonDatasourceHadoopRelation,
child: LogicalPlan,
overwrite: Boolean,
partition: Map[String, Option[String]])
extends AtomicRunnableCommand {
var loadCommand: CarbonLoadDataCommand = _
override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getName)
def containsLimit(plan: LogicalPlan): Boolean = {
plan find {
case limit: GlobalLimit => true
case other => false
} isDefined
}
val isPersistEnabledUserValue = CarbonProperties.getInstance
.getProperty(CarbonCommonConstants.CARBON_INSERT_PERSIST_ENABLED,
CarbonCommonConstants.CARBON_INSERT_PERSIST_ENABLED_DEFAULT)
val isPersistRequired =
isPersistEnabledUserValue.equalsIgnoreCase("true") || containsLimit(child)
val df =
if (isPersistRequired) {
LOGGER.info("Persist enabled for Insert operation")
Dataset.ofRows(sparkSession, child).persist(
StorageLevel.fromString(
CarbonProperties.getInstance.getInsertIntoDatasetStorageLevel))
} else {
Dataset.ofRows(sparkSession, child)
}
val header = relation.tableSchema.get.fields.map(_.name).mkString(",")
loadCommand = CarbonLoadDataCommand(
databaseNameOp = Some(relation.carbonRelation.databaseName),
tableName = relation.carbonRelation.tableName,
factPathFromUser = null,
dimFilesPath = Seq(),
options = scala.collection.immutable.Map("fileheader" -> header),
isOverwriteTable = overwrite,
inputSqlString = null,
dataFrame = Some(df),
updateModel = None,
tableInfoOp = None,
internalOptions = Map.empty,
partition = partition)
val load = loadCommand.processMetadata(sparkSession)
if (isPersistRequired) {
df.unpersist()
}
load
}
override def processData(sparkSession: SparkSession): Seq[Row] = {
if (null != loadCommand) {
loadCommand.processData(sparkSession)
} else {
Seq.empty
}
}
}
| jatin9896/incubator-carbondata | integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoCommand.scala | Scala | apache-2.0 | 3,582 |
/*
* Copyright 2015 the original author or authors.
* @https://github.com/scouter-project/scouter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package scouter.server.db;
import java.io.File
import scouter.server.Configure
import scouter.server.Logger
import scouter.server.core.ServerStat
import scouter.server.db.xlog.XLogDataWriter
import scouter.server.db.xlog.XLogIndex
import scouter.server.util.OftenAction
import scouter.server.util.ThreadScala
import scouter.util.DateUtil
import scouter.util.FileUtil
import scouter.util.RequestQueue
object XLogWR {
val dir = "/xlog"
val prefix = "xlog"
val queue = new RequestQueue[Data](Configure.getInstance().xlog_queue_size);
var currentDateUnit: Long = 0
var index: XLogIndex = null
var writer: XLogDataWriter = null
ThreadScala.start("scouter.server.db.XLogWR") {
while (DBCtr.running) {
val m = queue.get();
ServerStat.put("xlog.db.queue",queue.size());
try {
if (currentDateUnit != DateUtil.getDateUnit(m.time)) {
currentDateUnit = DateUtil.getDateUnit(m.time);
close();
open(DateUtil.yyyymmdd(m.time));
}
if (index == null) {
OftenAction.act("XLoWR", 10) {
queue.clear();
currentDateUnit = 0;
}
Logger.println("S143", 10, "can't open ");
} else {
val location = writer.write(m.data);
index.setByTime(m.time, location);
index.setByTxid(m.txid, location);
index.setByGxid(m.gxid, location);
}
} catch {
case t: Throwable => t.printStackTrace()
}
}
close()
}
def add(time: Long, tid: Long, gid: Long, elapsed: Int, data: Array[Byte]) {
val ok = queue.put(new Data(time, tid, gid, elapsed, data));
if (ok == false) {
Logger.println("S144", 10, "queue exceeded!!");
}
}
class Data(_time: Long, _txid: Long, _gxid: Long, _elapsed: Int, _data: Array[Byte]) {
val time = _time;
val txid = _txid;
val gxid = _gxid;
val elapsed = _elapsed;
val data = _data;
}
def close() {
FileUtil.close(index);
FileUtil.close(writer);
index = null;
writer = null;
}
def open(date: String) {
try {
val path = getDBPath(date);
val f = new File(path);
if (f.exists() == false)
f.mkdirs();
val file = path + "/" + prefix;
index = XLogIndex.open(file);
writer = XLogDataWriter.open(date, file);
} catch {
case e: Throwable => {
e.printStackTrace();
close()
}
}
}
def getDBPath(date: String): String = {
val sb = new StringBuffer();
sb.append(DBCtr.getRootPath());
sb.append("/").append(date).append(dir);
return sb.toString();
}
}
| yuyupapa/OpenSource | scouter.server/src/scouter/server/db/XLogWR.scala | Scala | apache-2.0 | 3,824 |
import scala.language.implicitConversions
import spray.json._
class RichJsValue(js: JsValue) {
def \ (name: String): JsValue = js match {
case JsObject(fields) =>
fields(name)
case _ =>
throw new IllegalArgumentException("Cannot select field "+ name +" from non-JsObject "+ js)
}
def hasFieldNamed(name: String) = js match {
case JsObject(fields) =>
fields.contains(name)
case _ =>
false
}
def arrayValues: List[JsValue] = js match {
case JsArray(values) =>
values.toList
case _ =>
throw new IllegalArgumentException("Trying to select values from non-JsArray"+ js)
}
}
object RichJsValue {
implicit def enrichJsValue(js: JsValue) = new RichJsValue(js)
}
| jan-j/functional-programming-principles-in-scala | week-7/project/RichJsValue.scala | Scala | mit | 733 |
/*
* tuProlog - Copyright (C) 2001-2002 aliCE team at deis.unibo.it
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package com.szadowsz.gospel.core.data
import java.util
/**
* Long class represents the long prolog data type
*/
@SerialVersionUID(1L)
case class Long(value: scala.Long) extends Number {
/**
* Returns the value of the Integer as int
*/
override final def intValue: scala.Int = value.toInt
/**
* Returns the value of the Integer as float
*/
override final def floatValue: scala.Float = value.toFloat
/**
* Returns the value of the Integer as double
*/
override final def doubleValue: scala.Double = value.toDouble
/**
* Returns the value of the Integer as long
*/
override final def longValue: scala.Long = value
/**
* is this term a prolog integer term?
*/
override final def isInteger = true
/**
* is this term a prolog real term?
*/
override final def isReal = false
/**
* Returns true if this integer term is grater that the term provided.
* For number term argument, the int value is considered.
*/
override def isGreater(t: Term): Boolean = {
t.getTerm match {
case n : Number => value > n.longValue
case term : Term => !term.isInstanceOf[Struct] && term.isInstanceOf[Var]
}
}
/**
* Tries to unify a term with the provided term argument.
* This service is to be used in demonstration context.
*/
override def unify(vl1: util.List[Var], vl2: util.List[Var], t: Term, isOccursCheckEnabled: Boolean): Boolean = {
t.getTerm match {
case v: Var => v.unify(vl2, vl1, this, isOccursCheckEnabled)
case term: Term => term.isInstanceOf[Number] && term.asInstanceOf[Number].isInteger && value == term.asInstanceOf[Number].longValue
} }
override def toString: String = java.lang.Long.toString(value)
/**
* @author Paolo Contessi
*/
override def compareTo(o: Number): scala.Int = value.compareTo(o.longValue)
override private[data] def unify(varsUnifiedArg1: util.List[Var], varsUnifiedArg2: util.List[Var], t: Term) = {
unify(varsUnifiedArg1, varsUnifiedArg2, t, true)
}
} | zakski/project-soisceal | gospel-core/src/main/scala/com/szadowsz/gospel/core/data/Long.scala | Scala | lgpl-3.0 | 2,862 |
package tests
package implicitConversions2
class Methods //unexpected
{
def shouldBeImplicitlyAdded1: String
= ???
val shouldBeImplicitlyAdded2: String
= ???
class ShouldBeImplicitlyAdded3
type ShouldBeImplicitlyAdded4
}
class OuterClass //unexpected
{
implicit def conversionMethodWithOneParam(param: ClassWithConversionWithOneParam): Methods //unexpected
= ???
class ClassWithConversionWithOneParam //unexpected
class ClassWithConversionWithProperType extends InheritedClass //unexpected
class InheritedClass //unexpected
object InheritedClass //unexpected
{
implicit def conversionMethodWithProperType: Conversion[ClassWithConversionWithProperType, Methods] //unexpected
= ???
}
given conversionFromVal: Conversion[ClassWithConversionFromVal, Methods] with //unexpected
{
def apply(a: ClassWithConversionFromVal): Methods //unexpected
= ???
}
class ClassWithConversionFromVal //unexpected
} | dotty-staging/dotty | scaladoc-testcases/src/tests/implicitConversions2.scala | Scala | apache-2.0 | 965 |
package pl.touk.nussknacker.engine.requestresponse.http
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.server.Directives
import akka.stream.Materializer
import com.typesafe.config.{Config, ConfigFactory}
import com.typesafe.scalalogging.LazyLogging
import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport
import io.dropwizard.metrics5.MetricRegistry
import pl.touk.nussknacker.engine.lite.api.runtimecontext.LiteEngineRuntimeContextPreparer
import pl.touk.nussknacker.engine.lite.metrics.dropwizard.{DropwizardMetricsProviderFactory, LiteMetricRegistryFactory}
import pl.touk.nussknacker.engine.requestresponse.deployment.DeploymentService
import pl.touk.nussknacker.engine.requestresponse.http.logging.RequestResponseLogger
import scala.util.Try
object RequestResponseHttpApp extends Directives with FailFastCirceSupport with LazyLogging with App {
private val config = ConfigFactory.load()
implicit val system: ActorSystem = ActorSystem("nussknacker-request-response-http", config)
import system.dispatcher
implicit private val materializer: Materializer = Materializer(system)
val managementPort = Try(args(0).toInt).getOrElse(8070)
val processesPort = Try(args(1).toInt).getOrElse(8080)
val metricRegistry = LiteMetricRegistryFactory.usingHostnameAndPortAsDefaultInstanceId(processesPort).prepareRegistry(config)
val requestResponseApp = new RequestResponseHttpApp(config, metricRegistry)
Http().newServerAt(
interface = "0.0.0.0",
port = managementPort
).bind(requestResponseApp.managementRoute.route)
Http().newServerAt(
interface = "0.0.0.0",
port = processesPort
).bind(requestResponseApp.processRoute.route(RequestResponseLogger.get(Thread.currentThread.getContextClassLoader)))
}
class RequestResponseHttpApp(config: Config, metricRegistry: MetricRegistry)(implicit as: ActorSystem)
extends Directives with LazyLogging {
private val contextPreparer = new LiteEngineRuntimeContextPreparer(new DropwizardMetricsProviderFactory(metricRegistry))
private val deploymentService = DeploymentService(contextPreparer, config)
val managementRoute = new ManagementRoute(deploymentService)
val processRoute = new ProcessRoute(deploymentService)
}
| TouK/nussknacker | engine/lite/request-response/app/src/main/scala/pl/touk/nussknacker/engine/requestresponse/http/RequestResponseHttpApp.scala | Scala | apache-2.0 | 2,259 |
package uk.gov.dvla.iep.testing.rabbitmq
import java.text.SimpleDateFormat
import java.util.concurrent.LinkedBlockingQueue
import com.rabbitmq.client.AMQP.BasicProperties
import com.rabbitmq.client._
import org.codehaus.jackson.map.ObjectMapper
import scala.collection.JavaConversions._
import scala.collection.mutable
object QueueUtil {
private val rabbitmqHost = "localhost"
private val rabbitmqUser = "guest"
private val rabbitmqPass = "guest"
def apply(exchange: String, queue: String, binding: String): QueueUtil =
new QueueUtil(QueueConfig(exchange, queue, binding) :: Nil)
def apply(queues: QueueConfig*) = new QueueUtil(queues)
}
class QueueUtil(queueConfigs: Seq[QueueConfig]) {
val connection = makeConnection
val channel = initChannel
private val objectMapper = getMapper
def msgCount(queue: String): Int =
channel.queueDeclarePassive(queue).getMessageCount
def consumer(queue: String): QueueingConsumer = {
val consumer = new QueueingConsumer(channel)
channel.basicConsume(queue, false, consumer)
consumer
}
/**
* Send using Jackson Object Mapper to produce message bytes
*/
def send(exchange: String, msg: AnyRef, headers: mutable.Map[String, AnyRef] = mutable.Map.empty) {
val bytes = objectMapper.writeValueAsBytes(msg)
_send(exchange, bytes, headers)
}
/**
* Send String with no object mapping
*/
def sendString(exchange: String, msg: String, headers: mutable.Map[String, AnyRef] = mutable.Map.empty) {
_send(exchange, msg.getBytes, headers)
}
/**
* Receive String with no object mapping
*/
def receiveString(queue: String): Option[String] = {
val response = channel.basicGet(queue, true)
if (response == null) None
else Some(new String(response.getBody))
}
/**
* Receive using Jackson Object Mapper
*/
def receive[T](queue: String, `type`: Class[T]): Option[T] = {
val response = channel.basicGet(queue, true)
if (response == null) None
else Some(objectMapper.readValue(response.getBody, `type`))
}
/**
* Receive multiple using Jackson Object Mapper
*/
def receive[T](queue: String, `type`: Class[T], timeoutInMillis: Int): List[T] = {
val startTime: Long = System.currentTimeMillis
var receivedList = List[T]()
do {
val received = receive(queue, `type`)
if (received.isDefined) {
receivedList :+= received.get
}
} while (System.currentTimeMillis - startTime < timeoutInMillis)
receivedList
}
def acknowledge(deliveryTag: Long): Unit = {
channel.basicAck(deliveryTag, false)
}
def purge(queue: String) {
channel.queuePurge(queue)
}
def close() {
channel.close()
connection.close()
}
private def makeConnection: Connection = {
val factory = new ConnectionFactory
factory.setHost(QueueUtil.rabbitmqHost)
factory.setUsername(QueueUtil.rabbitmqPass)
factory.setPassword(QueueUtil.rabbitmqUser)
factory.newConnection
}
private def initChannel: Channel = {
val channel = connection.createChannel
queueConfigs.foreach { qc =>
channel.exchangeDeclare(qc.exchange, "direct", true)
channel.queueDeclare(qc.name, true, false, false, null)
channel.queueBind(qc.name, qc.exchange, qc.binding)
}
channel
}
private def config(exchangeName: String): Option[QueueConfig] =
queueConfigs.find(_.exchange == exchangeName)
private def getMapper: ObjectMapper = {
val mapper: ObjectMapper = new ObjectMapper
mapper.setDateFormat(new SimpleDateFormat("yyyy-MM-dd"))
mapper
}
private def _send(exchange: String, bytes: Array[Byte], headers: mutable.Map[String, AnyRef] = mutable.Map.empty): Unit = {
val props = new BasicProperties().builder()
if (headers.nonEmpty) props.headers(headers)
config(exchange).foreach { qc =>
channel.basicPublish(qc.exchange, qc.binding, props.build, bytes)
}
}
}
case class QueueConfig(exchange: String, name: String, binding: String)
case class Delivery(consumerTag: String, envelope: Envelope, properties: BasicProperties, body: Array[Byte])
class QueueingConsumer(channel: Channel) extends DefaultConsumer(channel) {
private val queue = new LinkedBlockingQueue[Delivery]()
override def handleDelivery(consumerTag: String, envelope: Envelope, properties: BasicProperties, body: Array[Byte]): Unit = {
queue.put(Delivery(consumerTag, envelope, properties, body))
}
def nextDelivery: Delivery = {
queue.take()
}
} | dvla/sdl-opensource | test-helpers/src/main/scala/uk/gov/dvla/iep/testing/rabbitmq/QueueUtil.scala | Scala | mit | 4,509 |
package com.github.mdr.mash.inference
import com.github.mdr.mash.ns.core.BooleanClass
import com.github.mdr.mash.parser.AbstractSyntax._
import com.github.mdr.mash.runtime.MashBoolean
sealed trait TypedArgument {
def argValueOpt: Option[ValueInfo]
}
object TypedArgument {
case class PositionArg(arg: ValueInfo) extends TypedArgument {
def argValueOpt = Some(arg)
}
case class LongFlag(flag: String, valueOpt: Option[ValueInfo]) extends TypedArgument {
def argValueOpt = valueOpt orElse Some(ValueInfo(Some(MashBoolean.True), Some(BooleanClass)))
}
case class ShortFlag(flags: Seq[String]) extends TypedArgument {
val argValueOpt = Some(ValueInfo(Some(MashBoolean.True), Some(BooleanClass)))
}
}
object TypedArguments {
private def makeTypedArg(arg: Argument): TypedArgument = arg match {
case Argument.PositionArg(e, _) ⇒
TypedArgument.PositionArg(ValueInfo(e.constantValueOpt, e.typeOpt))
case Argument.ShortFlag(flags, _) ⇒
TypedArgument.ShortFlag(flags)
case Argument.LongFlag(flag, valueOpt, _) ⇒
TypedArgument.LongFlag(flag, valueOpt.map(e ⇒ ValueInfo(e.constantValueOpt, e.typeOpt)))
}
def from(invocationExpr: InvocationExpr): TypedArguments = from(invocationExpr.arguments)
def from(arguments: Seq[Argument]): TypedArguments = TypedArguments(arguments.map(makeTypedArg))
}
case class TypedArguments(arguments: Seq[TypedArgument] = Seq()) {
def positionArgs: Seq[ValueInfo] = arguments.collect { case TypedArgument.PositionArg(arg) ⇒ arg }
def argSet: Set[String] = arguments.collect {
case TypedArgument.ShortFlag(flags) ⇒ flags
case TypedArgument.LongFlag(flag, None) ⇒ Seq(flag)
}.flatten.toSet
def argValues: Map[String, Option[ValueInfo]] = arguments.collect {
case TypedArgument.LongFlag(flag, value) ⇒ flag -> value
}.toMap
def isProvidedAsNamedArg(name: String): Boolean = argSet.contains(name) || argValues.contains(name)
}
| mdr/mash | src/main/scala/com/github/mdr/mash/inference/TypedArguments.scala | Scala | mit | 1,991 |
package net.tomasherman.specus.server.api.net.session
import net.tomasherman.specus.common.api.net.Packet
/**
* This file is part of Specus.
*
* Specus is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Specus is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*
* You should have received a copy of the GNU General Public License
* along with Specus. If not, see <http://www.gnu.org/licenses/>.
*
*/
/** Abstraction that represents connected client and know how to write data and close connection. */
trait Session {
/** Writes data to a client.
* @param data Data to be written. */
def write(data: Packet)
/** Closes connection */
def close()
} | tomasherman/specus | server_api/src/main/scala/net/session/Session.scala | Scala | gpl-3.0 | 1,045 |
package info.armado.ausleihe.client.transport.dataobjects.entities
import javax.xml.bind.annotation.{XmlAccessType, XmlAccessorType, XmlRootElement}
object IdentityCardDTO {
def apply(barcode: String, owner: String): IdentityCardDTO = new IdentityCardDTO(barcode, owner)
def apply(barcode: String): IdentityCardDTO = new IdentityCardDTO(barcode, null)
def unapply(identityCardData: IdentityCardDTO): Option[(String, Option[String])] =
Some((identityCardData.barcode, Option(identityCardData.owner)))
}
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
class IdentityCardDTO(var barcode: String, var owner: String) extends LendingEntityDTO {
def this() = this(null, null)
override def equals(other: Any): Boolean = {
val Barcode = barcode
val Owner = Option(owner)
other match {
case IdentityCardDTO(Barcode, Owner) => true
case _ => false
}
}
override def hashCode: Int = {
val prime = 31
var result = 1
result = prime * result + (if (barcode == null) 0 else barcode.hashCode)
result = prime * result + (if (owner == null) 0 else owner.hashCode)
result
}
override def toString: String = s"IdentityCardData($barcode, ${Option(owner)})"
}
| Spielekreis-Darmstadt/lending | lending-client-interfaces/src/main/scala/info/armado/ausleihe/client/transport/dataobjects/entities/IdentityCardDTO.scala | Scala | apache-2.0 | 1,253 |
package howitworks.wip
import cats.data.OptionT
import rng.{Cmwc5, RNG}
class PigeonsVsEagels extends wp.Spec {
"pigeons vs eagels" in {
import cats.data.State
import cats.instances.list._
import cats.instances.option._
import cats.syntax.traverse._
type StateRNG[A] = State[RNG, A]
val nextLong: State[RNG, Long] = State { rng => (rng.next, rng.run) }
val nextPositiveLong: State[RNG, Long] = nextLong.map(math.abs)
def nextLongWithinRange(min: Long, max: Long): State[RNG, Long] = {
require(max >= min)
nextPositiveLong.map(x => if(min == max) min else (x % (max-min)) + min)
}
def nextIntWithinRange(min: Int, max: Int): State[RNG, Int] = nextLongWithinRange(min, max).map(_.toInt)
List.fill(1000)(nextLongWithinRange(-400L, +500L)).sequenceU.runA(Cmwc5.default).value
.filter(x => x < -400 || x > 500) mustBe List() withClue " there must be no items out of scope"
nextLongWithinRange(0L, 0L).runA(Cmwc5.default).value mustBe 0L
nextLongWithinRange(10L, 10L).runA(Cmwc5.default).value mustBe 10L
val nextDouble: State[RNG, Double] = nextLong.map(java.lang.Double.longBitsToDouble)
//next double in [0.0, 1.0).
val nextDouble01: State[RNG, Double] = nextLong.map(x => (x >>> 11) * 1.1102230246251565e-16)
//next double -1.0 or +1.0
val nextDoubleSign: State[RNG, Double] = nextLong.map(x => if (x < 0) -1.0 else 1.0)
//next double (-1.0, 1.0)
val `nextDouble(-1,1)`: State[RNG, Double] = for {
d <- nextDouble01
sign <- nextDoubleSign
} yield d * sign
//sanity checks
List.fill(20)(`nextDouble(-1,1)`).sequenceU.runA(Cmwc5.default)
.value mustBe List(-0.29316522241869947, -0.9697575525067785, 0.3068910261461185, -0.7702207907655839, 0.5553678830019705, -0.4432261118112225, -0.28424319784829355, -0.08791050640342224, -0.5670936912087755, -0.9768893061441374, -0.7029143005768838, -0.05164059676953758, -0.4367337078823904, 0.8549730811337944, -0.3433512512200432, -0.8475146263698287, 0.9332563422003102, -0.5769324196846992, -0.7697366225549156,
-0.7114157338601429) withClue " example values differ"
//sanity checks
List.fill(1000)(`nextDouble(-1,1)`).sequenceU.runA(Cmwc5.default).value
.filter(x => x < -1.0 || x > 1.0 ) mustBe List() withClue " there must be no items out of scope"
//[min,max] (inclusive)
def nextDoubleWithinRange(min: Double, max: Double): State[RNG, Double] = nextDouble01.map { x =>
require(min <= max)
val distance = max - min
(x * distance) + min
}
//sanity checks
nextDoubleWithinRange(1.2401, 1.2401).runA(Cmwc5.default).value mustBe 1.2401
//sanity checks
List.fill(10)(nextDouble).sequenceU.runA(Cmwc5.default).value mustBe List(3.4572161265504856E53, -1.880662019667126E-96, -1.9051316263851603E271, -1.641203669455796E-52, 2.831491392235305E70, 4.56631116003441E-160, -1.7644117361033466E25, -1.4365442467425055E286, -2.1437851331099934E-240, 5.857929779579694E211)
//sanity checks
List.fill(1000)(nextDoubleWithinRange(-1000.0, 21.5)).sequenceU.runA(Cmwc5.default).value
.filter(x => x <= -1000.0 || x >= 21.5 ) mustBe List() withClue " there must be no items out of scope"
//results in x +- variance, where min < x < max
def nextWithinRangeWithVariance(min: Double, max: Double, `+- variance`: Double): State[RNG, Double] = for {
base <- nextDoubleWithinRange(min, max)
variance <- nextDoubleWithinRange(-`+- variance`, `+- variance`)
random = base + variance
} yield random
//sanity checks
List.fill(1000)(nextWithinRangeWithVariance(-1000.0, 21.5, 50.0)).sequenceU.runA(Cmwc5.default).value
.filter(x => x <= (-1000.0-50.0) || x >= (21.5+50.0) ) mustBe List() withClue " there must be no items out of scope"
def `nextDouble(-n,n)`(n: Double): State[RNG, Double] = `nextDouble(-1,1)`.map(x => n * x)
//model
type BirdKey = Int
type Bird = Attack
type LifePoint = Int
case class Attack(value: Double)
object Attack {
val variance: Double = 50.0
def nextAttack(min: Attack, max: Attack): State[RNG, Attack] = nextDoubleWithinRange(min.value, max.value).map(Attack.apply)
}
case class Bravery(value: Double)
object Bravery {
val variance: Double = 20.0
def nextBravery(min: Bravery, max: Bravery): State[RNG, Bravery] = nextDoubleWithinRange(min.value, max.value).map(Bravery.apply)
}
case class BirdGroup(
aliveKeys: List[BirdKey],
attack: Map[BirdKey, Attack],
bravery: Map[BirdKey, Bravery],
lifePoints: Map[BirdKey, LifePoint]
) {
def size: Int = attack.size
def isAlive(key: BirdKey): Boolean = lifePoints.get(key).get > 0
}
def nextBirdGroup(
size: Int,
minAttack: Attack = Attack(1.0),
maxAttack: Attack = Attack(100.0),
minBravery: Bravery = Bravery(2.0),
maxBravery: Bravery = Bravery(20.0)
): State[RNG, BirdGroup] = for {
attack <- List.fill(size)(Attack.nextAttack(minAttack, maxAttack)).sequenceU
bravery <- List.fill(size)(Bravery.nextBravery(minBravery, maxBravery)).sequenceU
} yield BirdGroup(
aliveKeys = (0 until size).toList,
attack = attack.zipWithIndex.map(_.swap).toMap,
bravery = bravery.zipWithIndex.map(_.swap).toMap,
lifePoints = List.fill(size)(3).zipWithIndex.map(_.swap).toMap
)
//sanity checks
val exampleGroupSize = 10
val exampleGroup: BirdGroup = nextBirdGroup(exampleGroupSize).runA(Cmwc5.default).value
exampleGroup.size mustBe exampleGroupSize
exampleGroup.isAlive(0) mustBe true
exampleGroup.aliveKeys.size mustBe exampleGroupSize
exampleGroup.aliveKeys.toSet mustBe exampleGroup.attack.keySet
exampleGroup.aliveKeys.toSet mustBe exampleGroup.bravery.keySet
exampleGroup.aliveKeys.toSet mustBe exampleGroup.lifePoints.keySet
exampleGroup.attack.size mustBe exampleGroupSize
exampleGroup.bravery.size mustBe exampleGroupSize
exampleGroup.lifePoints.size mustBe exampleGroupSize
//TODO algebra for attack, bravery, min, max etc
def nextIndex(list: List[_]): State[RNG, Option[Int]] =
(if (list.isEmpty) None else Some(list.size))
.map (size => nextIntWithinRange(0, size))
.sequenceU
//sanity checks
nextIndex(Nil).runA(Cmwc5.default).value mustBe None
nextIndex(List('a)).runA(Cmwc5.default).value mustBe Some(0)
List.fill(1000)(nextIndex(List.fill(100)('whatever))).sequenceU.runA(Cmwc5.default)
.value.map(_.get).filter(x => x < 0 || x > 99) mustBe List() withClue " no values out of scope, all defined"
def next2Indexes(l: List[_]): State[RNG, Option[(Int, Int)]] = for {
maybeIndex1 <- nextIndex(l)
maybeIndex2 <- maybeIndex1.map(index1 => l.patch(index1, Nil, 1)).map(x => nextIndex(x)).sequenceU.map(_.flatten)
} yield {
maybeIndex1.flatMap(i1 => maybeIndex2.map{i2 =>
val normalizedI2 = if(i2 < i1) i2 else i2+1
(i1, normalizedI2)
})
}
//sanity checks
// next2Indexes(Nil).runA(Cmwc5.default).value mustBe None
// next2Indexes(List('a)).runA(Cmwc5.default).value mustBe None
next2Indexes(List('a, 'b)).runA(Cmwc5.default).value mustBe Some((1,0))
next2Indexes(List('a, 'b, 'c)).runA(Cmwc5.default).value mustBe Some((1,0))
next2Indexes(List('a, 'b, 'c, 'c)).runA(Cmwc5.default).value mustBe Some((1,0))
List.fill(1000)(next2Indexes(List.fill(100)('whatever))).sequenceU.runA(Cmwc5.default)
.value.map(_.get).filter{case (i1, i2) =>
i1 == i2 ||
i1 < 0 ||
i2 < 0 ||
i1 > 99 ||
i2 > 99
} mustBe List() withClue " no values out of scope, all defined"
List.fill(20)(next2Indexes(List.fill(100)('whatever))).sequenceU.runA(Cmwc5.default)
.value.map(_.get) mustBe List((17,46), (40,70), (76,78), (53,76), (8,75), (53,80), (12,47), (50,46), (98,14), (59,75), (3,78), (44,53), (53,27), (59,57), (21,65), (72,67), (35,17), (95,69), (95,48), (71,58))
def next2Indexes2(l: List[_]): OptionT[StateRNG, (Int, Int)] = for {
i1 <- OptionT[StateRNG, Int](nextIndex(l))
i2 <- OptionT[StateRNG, Int](nextIndex(l.patch(i1, Nil, 1)))
} yield {
val normalizedI2 = if(i2 < i1) i2 else i2+1
(i1, normalizedI2)
}
//sanity checks
next2Indexes2(List('a, 'b)).value.runA(Cmwc5.default).value mustBe Some((1,0))
next2Indexes2(List('a, 'b, 'c)).value.runA(Cmwc5.default).value mustBe Some((1,0))
next2Indexes2(List('a, 'b, 'c, 'c)).value.runA(Cmwc5.default).value mustBe Some((1,0))
List.fill(1000)(next2Indexes2(List.fill(100)('whatever)).value).sequenceU.runA(Cmwc5.default)
.value.map(_.get).filter{case (i1, i2) =>
i1 == i2 ||
i1 < 0 ||
i2 < 0 ||
i1 > 99 ||
i2 > 99
} mustBe List() withClue " no values out of scope, all defined"
List.fill(20)(next2Indexes2(List.fill(100)('whatever)).value).sequenceU.runA(Cmwc5.default)
.value.map(_.get) mustBe List((17,46), (40,70), (76,78), (53,76), (8,75), (53,80), (12,47), (50,46), (98,14), (59,75), (3,78), (44,53), (53,27), (59,57), (21,65), (72,67), (35,17), (95,69), (95,48), (71,58))
def selectFighters(birdGroup: BirdGroup): State[RNG, Option[(BirdKey, BirdKey)]] = next2Indexes(birdGroup.aliveKeys)
.map(_.map{ case (i1, i2) => (birdGroup.aliveKeys(i1), birdGroup.aliveKeys(i1))})
//
//
// def iterate(birdGroup: BirdGroup): State[RNG, BirdGroup] = {
//
// }
//
// /**
// * @return winner
// */
// def fight(a: BirdKey, b: BirdKey, birdGroup: BirdGroup): State[RNG, BirdKey] = for {
// augmnentAttackA <- `nextDouble(-n,n)`(Attack.variance)
// augmnentAttackB <- `nextDouble(-n,n)`(Attack.variance)
// attackA = birds(a).value + augmnentAttackA
// attackB = birds(b).value + augmnentAttackB
// } yield (if (attackA >= attackB) a else b)
}}
| jawp/wicked-playground | modules/server/src/test/scala/howitworks/wip/PigeonsVsEagels.scala | Scala | mit | 10,232 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cs.ucla.edu.bwaspark.worker1
import cs.ucla.edu.bwaspark.datatype._
import scala.math._
import scala.collection.mutable.MutableList
import cs.ucla.edu.bwaspark.worker1.SAPos2RefPos._
import java.util.TreeSet
import java.util.Comparator
import cs.ucla.edu.bwaspark.debug.DebugFlag._
//standalone object for generating all MEM chains for each read
object MemChain {
//get the next start point for forward and backward extension
def smemNext(itr: SMemItrType, splitLen: Int, splitWidth: Int, startWidth: Int): Array[BWTIntvType] = {
if (debugLevel > 0) {
println("Perform function smemNext")
}
//if the start point has exceeded the length
//or it has gone back to negative number
//return null
if (itr.start >= itr.len || itr.start < 0) null
else {
//skip ambiguous bases
while (itr.start < itr.len && itr.query(itr.start) > 3) itr.start += 1
//if all the bases left are N bases, return null
if (itr.start == itr.len) null
else {
if (debugLevel > 0) {
println("smemNext: non-trivial execution")
}
//skipping all the N bases, the point is actually the real start point
var oriStart = itr.start
//now, call the bwtSMem1 to generate the next start point
//create a BWTSMem object to call the function
val smemObj = new BWTSMem
if (debugLevel > 0) {
println("smemNext: carry out function bwtSMem1")
}
itr.start = smemObj.bwtSMem1(itr.bwt, itr.len, itr.query, oriStart, startWidth, itr.matches, itr.tmpVec0, itr.tmpVec1)
if (debugLevel > 0) {
println ("The original run of bwtSMem1")
itr.matches.map(ele => ele.print())
}
if (debugLevel > 0) {
println("smemNext: the first run of function bwtSMem1 done")
}
assert (itr.matches.length > 0) //in theory, there is at least one match
//looking for the longest match
var maxBWTIntv = itr.matches.maxBy(ele => (ele.endPoint - ele.startPoint))
var maxLength = maxBWTIntv.endPoint - maxBWTIntv.startPoint
var middlePointOfMax = (maxBWTIntv.endPoint + maxBWTIntv.startPoint) / 2
if (debugLevel > 0) {
print("Max BWT Interval: ")
maxBWTIntv.print()
println ("Max length: " + maxLength)
println ("Middle point is " + middlePointOfMax)
}
//if the longest SMEM is unique and long
if (splitLen > 0 && splitLen <= maxLength && maxBWTIntv.s <= splitWidth) {
if (debugLevel > 0) {
print("The max BWT is unique and long: ")
maxBWTIntv.print()
}
//re-do the seeding process starting from the middle of the longest MEM
val tmp = smemObj.bwtSMem1(itr.bwt, itr.len, itr.query, middlePointOfMax, (maxBWTIntv.s + 1).toInt, itr.sub, itr.tmpVec0, itr.tmpVec1)
if (debugLevel > 0) {
println("The reseeding run of bwtSMem1")
itr.sub.map(ele => ele.print())
}
//only some seeds in the sub array can still be there
//1)length of the seed should be no less than maxLength/2
//2)endPoint should exceed original start point
itr.sub = itr.sub.filter(ele => ((ele.endPoint - ele.startPoint) >= maxLength / 2) &&
ele.endPoint > oriStart)
//merge itr.matches and itr.sub and sort by start point (end point if start point equals)
itr.matches = (itr.matches.++(itr.sub)).sortWith((a, b) => (if (a.startPoint < b.startPoint) true else if (a.startPoint > b.startPoint) false else a.endPoint > b.endPoint))
}
var res: Array[BWTIntvType] = itr.matches.toArray
if (debugLevel > 0) {
println("The final result of bwtSMem1 for one iteration")
res.map(ele => ele.print())
}
res
}
}
}
//generate a chain tree for each read
def generateChainTree(opt: MemOptType, l_pac: Long, smemItr: SMemItrType): TreeSet[MemChainType] = {
if (debugLevel > 0) {
println("Perform function generateChainTree")
}
//calculate splitLen
val splitLen = min((opt.minSeedLen * opt.splitFactor + 0.499).toInt, smemItr.len)
//!!!be careful!!!
//we temporarily assign startWidth as 1 other than 2
//but it need be fixed
//val startWidth = { if (opt.flag & MEM_F_NO_EXACT) 2 else 1 }
val startWidth = 1
//the mutable list is for storing all the bi-intervals generated from specific point
var bwtIntvOnPoint: Array[BWTIntvType] = null
//the return value: mutable TreeSet which maintains all the chains
var chainTree: TreeSet[MemChainType] = null
//if bi-intervals responding to specific point are generated
//go through each seed, either
//1) merge it to existing chain
//2) generate new chain from it
if (debugLevel > 0) {
println("generateChainTree: carry out the main while loop with start width " + startWidth + ", split length " + splitLen + ", and split width " + opt.splitWidth)
}
bwtIntvOnPoint = smemNext(smemItr, splitLen, opt.splitWidth, startWidth)
if (debugLevel > 0) {
println("generateChainTree: finish the 1st smemNext")
}
var idx = 0;
while ( bwtIntvOnPoint != null ) {
if (debugLevel > 0) {println("It is the " + idx + "th times for the while loop"); idx += 1}
//traverse all the seeds
for (i <- 0 until bwtIntvOnPoint.length) {
//end - start = length of the seed
val seedLen = bwtIntvOnPoint(i).endPoint - bwtIntvOnPoint(i).startPoint
//ignore the seed if it it too short or too repetitive
if (seedLen < opt.minSeedLen || bwtIntvOnPoint(i).s > opt.maxOcc) {
//do nothing
}
//the statements in the else clause are the main part
//it will traverse all the possible positions in the suffix array(reference)
else {
//traverse each aligned position
for (j <- 0 until bwtIntvOnPoint(i).s.toInt) {
//prepare for generating a new seed
if (debugLevel > 0) println("The loop index for j-loop is: " + j)
if (debugLevel > 0) println("The parameter for calling suffixArrayPos2ReferencePos: " + (bwtIntvOnPoint(i).k + j))
var rBeg = suffixArrayPos2ReferencePos(smemItr.bwt, bwtIntvOnPoint(i).k + j)
var qBeg = bwtIntvOnPoint(i).startPoint
var len = seedLen
//handle edge cases
if (rBeg < l_pac && l_pac < rBeg + len) {
//do nothing if a seed crossing the boundary
}
else {
//generate a seed for this position
val newSeed = new MemSeedType(rBeg, qBeg, len)
//find the closest chain in the existing chain tree
//the closest chain should satisfy
//1)chain.pos <= seed.rbegin
//2)the chain.pos is the largest one of all chains that satisfy 1)
def findClosestChain(chainTree: TreeSet[MemChainType], refPoint: Long): MemChainType = {
//if the tree is empty, return null
if (chainTree == null) null
else {
//create a temporary chain for finding the lower chain
//because lower.pos < tmpChain.pos
//having refPoint + 1 to handle if lower.pos == tmpChain.pos
val tmpChain = new MemChainType(refPoint + 1, null)
//if (debugLevel > 0) {
// println("The tmpChain's refPoint is: " + (refPoint+1))
// println("Display the current chain tree")
// var itr = chainTree.iterator()
// while (itr.hasNext) {
// itr.next().print()
// }
//}
val res = chainTree.lower(tmpChain)
val tmp = chainTree.higher(tmpChain)
if (debugLevel > 0) {
if (res == null && chainTree.size != 0) println("Chain Tree is not empty but no lower node found")
else { println("Lower chain found, which is:"); res.print()}
if (tmp == null && chainTree.size != 0) println("Chain Tree is not empty but no higher node found")
else { println("Higher chain found, which is:"); tmp.print()}
}
res
}
}
val targetChain = findClosestChain(chainTree, newSeed.rBeg)
if (debugLevel > 0) {
println("New seed is: (rBeg, qBeg, len) " + rBeg + " " + qBeg + " " + len)
}
//test if the seed can be merged into some existing chain
//return true/false
//if return true, actually also DID the merging task
//define tryMergeSeedToChain to test if a seed can be merged with some chain in the chain tree
def tryMergeSeedToChain(opt: MemOptType, l_pac: Long, chain: MemChainType, seed: MemSeedType): Boolean = {
//get query begin and end, reference begin and end
//!!!to clarify!!!: the order of seeds in a chain
//qBeg sorting? or rBeg sorting?
if (debugLevel > 0) {
println("Trying merge a seed to a chain")
println("The seed is: (rBeg, qBeg, len) " + seed.rBeg + " " + seed.qBeg + " " + seed.len)
println("The chain is: ")
chain.print()
}
val qBegChain = chain.seeds.head.qBeg
val rBegChain = chain.seeds.head.rBeg
val qEndChain = chain.seeds.last.qBeg + chain.seeds.last.len
val rEndChain = chain.seeds.last.rBeg + chain.seeds.last.len
if (debugLevel > 0) println("qBeg, rBeg, qEnd, rEnd of the chain are: " + qBegChain + " " + rBegChain + " " + qEndChain + " " + rEndChain)
//if the seed is fully contained by the chain, return true
if (qBegChain <= seed.qBeg && qEndChain >= seed.qBeg + seed.len &&
rBegChain <= seed.rBeg && rEndChain >= seed.rBeg + seed.len)
true
//if not in the same strand (crossing l_pac boundary), return false
else if ( (rBegChain < l_pac || chain.seeds.last.rBeg < l_pac) &&
seed.rBeg >= l_pac)
false
else {
//follow the conditions judged in original BWA test_and_merge function
val x: Int = seed.qBeg - chain.seeds.last.qBeg // always non-negtive???
val y: Int = (seed.rBeg - chain.seeds.last.rBeg).toInt
if (y >= 0 &&
x - y <= opt.w &&
y - x <= opt.w &&
x - chain.seeds.last.len.toInt < opt.maxChainGap &&
y - chain.seeds.last.len.toInt < opt.maxChainGap) {
//all the conditions are satisfied? growing the chain
chain.seeds += seed
true //return true
}
else false
}
}
val isMergable = if (targetChain == null) false else tryMergeSeedToChain(opt, l_pac, targetChain, newSeed)
if (debugLevel > 0) {
if (!isMergable) println("Cannot be merged to any existing chain")
else targetChain.print()
}
//add the seed as a new chain if not mergable
if (!isMergable) {
val newSeedList = MutableList[MemSeedType](newSeed)
val newChain = new MemChainType(rBeg, newSeedList)
//Push the new chain to the chain tree
//1)if the chainTree is empty
if (chainTree == null) {
//using java style to new a TreeSet[MemChainType]
chainTree = new TreeSet[MemChainType](new Comparator[MemChainType]() {
def compare(a: MemChainType, b: MemChainType): Int = {
if (a.pos > b.pos) 1
else if (a.pos < b. pos) -1
else 0
}
} )
//insert the chain to the tree
chainTree.add(newChain)
}
//2)if the chainTree is not empty, directly add it
else chainTree.add(newChain)
}
}
}
}
}
bwtIntvOnPoint = smemNext(smemItr, splitLen, opt.splitWidth, startWidth)
}
//finally, return the tree
if (debugLevel > 0) {
println("End function generateChainTree")
}
chainTree
}
def traverseChainTree(chainTree: TreeSet[MemChainType]): Array[MemChainType] = {
//if the tree is empty, return null
if (chainTree == null) null
//else, it's gonna be a simple map() task
else {
val itr = chainTree.iterator
val chains = new Array[MemChainType](chainTree.size).map(ele => itr.next)
chains.foreach(ele => {
ele.seedsRefArray = ele.seeds.toArray
} )
chains
}
}
//generate chains for each read
def generateChains(opt: MemOptType, bwt: BWTType, l_pac: Long, len: Int, seq: Array[Byte]): Array[MemChainType] = {
if (debugLevel > 0) {
println("Perform function generateChains")
}
//if the query is shorter than the seed length, no match, return null
if (len < opt.minSeedLen) {
if (debugLevel > 0) {
println("Warning: the length of read is too short")
println("End function generateChains")
}
null
}
//the else part the real meaty part for this function
else {
//generate a SMemItrType object for smemNext
val smemItr = new SMemItrType(bwt,
seq,
0, //the first startpoint for smemNext is 0
len,
new MutableList[BWTIntvType](), //matches array
new MutableList[BWTIntvType](), //sub array
new MutableList[BWTIntvType](), //temporary array 0
new MutableList[BWTIntvType]()) //temporary array 1
//generate a tree for all chains
if (debugLevel > 0) {
println("generateChains 1st: generate a tree of chains (start)")
}
val chainTree = generateChainTree(opt, l_pac, smemItr)
if (debugLevel > 0) {
println("generateChains 1st: generate a tree of chains (end)")
}
//return value, the chains to be generated for a read
if (debugLevel > 0) {
println("generateChains 2nd: transform the chain tree into a chain array (start)")
}
val chains = traverseChainTree(chainTree)
if (debugLevel > 0) {
println("generateChains 2nd: transform the chain tree into a chain array (end)")
}
if (debugLevel > 0) {
println("End function generateChains (no warning)")
}
chains
}
}
}
| ytchen0323/cloud-scale-bwamem | src/main/scala/cs/ucla/edu/bwaspark/worker1/MemChain.scala | Scala | apache-2.0 | 16,117 |
package org.jetbrains.plugins.scala
package console
import com.intellij.execution.Executor
import com.intellij.execution.configurations._
import com.intellij.execution.filters.TextConsoleBuilderImpl
import com.intellij.execution.runners.ExecutionEnvironment
import com.intellij.execution.ui.ConsoleView
import com.intellij.openapi.options.SettingsEditor
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.JDOMExternalizer
import org.jdom.Element
import org.jetbrains.plugins.scala.runner.BaseRunConfiguration
/**
* User: Alexander Podkhalyuzin
* Date: 10.02.2009
*/
class ScalaConsoleRunConfiguration(project: Project, configurationFactory: ConfigurationFactory, name: String)
extends BaseRunConfiguration(project, configurationFactory, name) {
val mainClass = "org.jetbrains.plugins.scala.compiler.rt.ConsoleRunner"
def apply(params: ScalaConsoleRunConfigurationForm) {
javaOptions = params.getJavaOptions
consoleArgs = params.getConsoleArgs
workingDirectory = params.getWorkingDirectory
setModule(params.getModule)
}
def getState(executor: Executor, env: ExecutionEnvironment): RunProfileState = {
val state = new JavaCommandLineState(env) {
protected override def createJavaParameters: JavaParameters = {
val params = createParams
params.getProgramParametersList.addParametersString(consoleArgs)
params
}
}
val consoleBuilder = new TextConsoleBuilderImpl(project) {
override def getConsole: ConsoleView = {
val consoleView = new ScalaLanguageConsole(project, ScalaLanguageConsoleView.SCALA_CONSOLE)
consoleView.setPrompt(null)
consoleView
}
}
state.setConsoleBuilder(consoleBuilder)
state
}
def getConfigurationEditor: SettingsEditor[_ <: RunConfiguration] = new ScalaConsoleRunConfigurationEditor(project, this)
override def writeExternal(element: Element) {
super.writeExternal(element)
JDOMExternalizer.write(element, "consoleArgs", consoleArgs)
}
override def readExternal(element: Element) {
super.readExternal(element)
consoleArgs = JDOMExternalizer.readString(element, "consoleArgs")
}
}
| triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/console/ScalaConsoleRunConfiguration.scala | Scala | apache-2.0 | 2,193 |
/*
* Copyright © 2014 Teo Klestrup, Carl Dybdahl
*
* This file is part of Republix.
*
* Republix is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Republix is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Republix. If not, see <http://www.gnu.org/licenses/>.
*/
package republix
import republix.sim._
import republix.io._
package object game {
import shapeless._
// client -> game
sealed trait Command
sealed trait PhaseCommand extends Command
case class Intro(party: String) extends Command
case class SendChat(chat: String) extends Command
case class SetReady(ready: Boolean) extends PhaseCommand
case class ProposeAmendment(law: GameNode, update: Option[Intensity]) extends PhaseCommand
case class CancelChanges(law: GameNode) extends PhaseCommand
case class VoteFor(proposer: Party) extends PhaseCommand
// game -> client
sealed trait Update
sealed trait GenericUpdate extends Update
sealed trait PhaseUpdate extends Update
case class YouAre(party: Party) extends GenericUpdate
case class IntroModel(model: GameModel) extends GenericUpdate
case class Chat(chat: String) extends GenericUpdate
case class SwitchPhase(newPhase: GamePhase, state: GameState) extends GenericUpdate
case class NewParty(party: Party) extends PhaseUpdate
case class CountryIs(country: Country) extends PhaseUpdate
case class SetProposals(proposals: Map[(Party, GameNode), Option[Intensity]]) extends PhaseUpdate
// case classes because it works better with shapeless
sealed trait GamePhase
case class NewsPhase() extends GamePhase
case class LobbyPhase() extends GamePhase
case class LawsPhase() extends GamePhase
case class VotePhase(proposals: Map[(Party, GameNode), Option[Intensity]]) extends GamePhase
case class ElectionPhase() extends GamePhase
// todo: use id along with name
case class Party(name: String)
import Serial._
implicit val countrySerial: Serial[Country] = TypeClass[Serial, Country]
implicit val serialIntensity: Serial[Intensity] =
serialInstance.project(doubleSerial,
(x: Intensity) => x.intensity,
Intensity.apply _)
implicit val serialOptionIntensity: Serial[Option[Intensity]] = Serial.optionSerial(serialIntensity)
implicit val serialNode: Serial[GameNode] = TypeClass[Serial, GameNode]
implicit val serialLink: Serial[Link] = new Serial[Link] { // scalac is a buggy mess
val sd = doubleSerial
val si = serialIntensity
def serialize(l: Link) = l match {
case LogisticLink(a, b) => ByteString(0.toByte) ++ sd.serialize(a) ++ sd.serialize(b)
case LinearLink(a, b) => ByteString(1.toByte) ++ sd.serialize(a) ++ sd.serialize(b)
case DiscontinuousLink(a, b, c) => ByteString(2.toByte) ++ si.serialize(a) ++ si.serialize(b) ++ si.serialize(c)
}
def deserialize(bs: ByteString) = for {
(kind, bs1) <- bs.extract
res <- kind match {
case 0 => for {
(a, bs2) <- sd.deserialize(bs1)
(b, bs3) <- sd.deserialize(bs2)
} yield (LogisticLink(a, b), bs3)
case 1 => for {
(a, bs2) <- sd.deserialize(bs1)
(b, bs3) <- sd.deserialize(bs2)
} yield (LinearLink(a, b), bs3)
case 2 => for {
(a, bs2) <- si.deserialize(bs1)
(b, bs3) <- si.deserialize(bs2)
(c, bs4) <- si.deserialize(bs3)
} yield (DiscontinuousLink(a, b, c), bs4)
}
} yield res
}
implicit val serialState: Serial[GameState] = TypeClass[Serial, GameState]
implicit val serialModel: Serial[GameModel] = TypeClass[Serial, GameModel]
implicit val serialParty: Serial[Party] = TypeClass[Serial, Party]
implicit val serialPhase: Serial[GamePhase] = TypeClass[Serial, GamePhase]
implicit val serialCommand: Serial[Command] = TypeClass[Serial, Command]
implicit val serialUpdate: Serial[Update] = TypeClass[Serial, Update]
} | teozkr/republix-online | republix/src/main/scala/republix/game/package.scala | Scala | agpl-3.0 | 4,200 |
package at.forsyte.apalache.tla.lir.io
import at.forsyte.apalache.tla.lir._
import at.forsyte.apalache.tla.lir.oper.{TlaControlOper, TlaFunOper, TlaOper}
import at.forsyte.apalache.tla.lir.values._
import scala.collection.immutable.HashMap
import scala.collection.mutable.LinkedHashMap
/**
* <p>A reader of TlaEx and TlaModule from JSON, for interoperability with external tools.</p>
* @author Andrey Kuprianov
**/
object JsonReader {
def readModule(from: ujson.Readable): TlaModule = {
parseModule(ujson.read(from))
}
def readExpr(from: ujson.Readable): TlaEx = {
parseJson(ujson.read(from))
}
// expect ujson.Value to be an encoding of a module
def parseModule(v: ujson.Value): TlaModule = {
// it should be a JSON object
val m = v.objOpt match {
case Some(value) => value
case None => throw new Exception("incorrect TLA+ JSON: expecting a module object")
}
if(!m.contains("module") || !m.contains("declarations"))
throw new Exception("incorrect TLA+ JSON: malformed module object")
new TlaModule(parseStr(m("module")), parseDecls(m("declarations")))
}
val unaryOps = JsonWriter.unaryOps.map(_.swap)
val naryOps = JsonWriter.naryOps.map(_.swap)
val binaryOps = JsonWriter.binaryOps.map(_.swap)
val naryPairOps = JsonWriter.naryPairOps.map(_.swap)
val functionalOps = JsonWriter.functionalOps.map(_.swap)
val boundedPredOps = JsonWriter.boundedPredOps.map(_.swap)
val unboundedPredOps = JsonWriter.unboundedPredOps.map(_.swap)
val stutterOps = JsonWriter.stutterOps.map(_.swap)
val fairnessOps = JsonWriter.fairnessOps.map(_.swap)
val otherOps = Set("id", "str", "int", "set", "applyFun", "applyOp", "if", "case", "let")
val sets = HashMap(
"BOOLEAN" -> TlaBoolSet,
"Int" -> TlaIntSet,
"Nat" -> TlaNatSet,
"Real" -> TlaRealSet,
"STRING" -> TlaStrSet
)
// parse arbitrary ujson.Value
def parseJson(v: ujson.Value): TlaEx = {
v match {
case ujson.Str(value) => NameEx(value)
case ujson.Num(value) =>
if(value.isValidInt) ValEx(TlaInt(value.toInt))
else throw new Exception("incorrect TLA+ JSON: wrong number")
case ujson.Bool(value) => ValEx(TlaBool(value))
case ujson.Obj(value) => parseExpr(value)
case _ => throw new Exception("incorrect TLA+ JSON: unexpected input")
}
}
// expect ujson.Value to be an encoding of TLA+ expression object
def parseExpr(m: LinkedHashMap[String, ujson.Value]): TlaEx = {
val unary = m.keySet & unaryOps.keySet
val binary = m.keySet & binaryOps.keySet
val nary = m.keySet & naryOps.keySet
val naryPair = m.keySet & naryPairOps.keySet
val functional = m.keySet & functionalOps.keySet
val boundedPred = m.keySet & boundedPredOps.keySet
val unboundedPred = m.keySet & unboundedPredOps.keySet
val stutter = m.keySet & stutterOps.keySet
val fairness = m.keySet & fairnessOps.keySet
val other = m.keySet & otherOps
val ourKeys = unary.size + binary.size + nary.size + naryPair.size + functional.size +
+ boundedPred.size + unboundedPred.size + stutter.size + fairness.size + other.size
val expr =
if(ourKeys < 1)
throw new Exception("incorrect TLA+ JSON: expected expression, but none found")
else if(ourKeys > 1)
throw new Exception("incorrect TLA+ JSON: multiple matching expressions")
else if(unary.nonEmpty)
OperEx(unaryOps(unary.head), parseJson(m(unary.head)))
else if(binary.nonEmpty) {
if(!m.contains("arg"))
throw new Exception("incorrect TLA+ JSON: expecting 'arg'")
OperEx(binaryOps(binary.head), parseJson(m(binary.head)), parseJson(m("arg")))
} else if(nary.nonEmpty)
OperEx(naryOps(nary.head), parseArray(m(nary.head)):_*)
else if(naryPair.nonEmpty) {
OperEx(naryPairOps(naryPair.head), parsePairs(m(naryPair.head)) :_*)
}
else if(functional.nonEmpty) {
if(!m.contains("where"))
throw new Exception("incorrect TLA+ JSON: expecting 'where'")
OperEx(functionalOps(functional.head), parseJson(m(functional.head)) +: parsePairs(m("where")) :_*)
}
else if(unboundedPred.nonEmpty) {
if(!m.contains("that"))
throw new Exception("incorrect TLA+ JSON: expecting 'that'")
OperEx(unboundedPredOps(unboundedPred.head), parseJson(m(unboundedPred.head)), parseJson(m("that")))
}
else if(boundedPred.nonEmpty) {
val nameSet = parsePair(m(boundedPred.head))
if(!m.contains("that"))
throw new Exception("incorrect TLA+ JSON: expecting 'that'")
OperEx(boundedPredOps(boundedPred.head), nameSet(0), nameSet(1), parseJson(m("that")))
}
else if(stutter.nonEmpty) {
if(!m.contains("vars"))
throw new Exception("incorrect TLA+ JSON: expecting 'vars'")
OperEx(stutterOps(stutter.head), parseJson(m(stutter.head)), parseJson(m("vars")))
}
else if(fairness.nonEmpty) {
if(!m.contains("vars"))
throw new Exception("incorrect TLA+ JSON: expecting 'vars'")
OperEx(fairnessOps(fairness.head), parseJson(m("vars")), parseJson(m(fairness.head)))
}
else if(other.nonEmpty) {
other.head match {
case "id" => NameEx(parseStr(m("id")))
case "str" => ValEx(TlaStr(parseStr(m("str"))))
case "int" => ValEx(TlaInt(BigInt(parseStr(m("int")))))
case "set" => {
val set = parseStr(m("set"))
if(sets.contains(set))
ValEx(sets(set))
else
throw new Exception("can't parse TLA+ JSON: reference to unknown set")
}
case "applyFun" => {
if(!m.contains("arg"))
throw new Exception("incorrect TLA+ JSON: expecting 'arg'")
OperEx(TlaFunOper.app, parseJson(m("applyFun")), parseJson(m("arg")))
}
case "applyOp" => {
if(!m.contains("args"))
throw new Exception("incorrect TLA+ JSON: expecting 'args'")
val name = parseStr(m("applyOp"))
val args = parseArray(m("args"))
if(name == "recFunRef") {
if(args.nonEmpty)
throw new Exception("incorrect TLA+ JSON: found arguments for 'recFunRef'")
OperEx(TlaFunOper.recFunRef)
}
else
OperEx(TlaOper.apply, NameEx(name) +: args:_*)
}
case "if" => {
if(!m.contains("then") || !m.contains("else"))
throw new Exception("incorrect TLA+ JSON: malformed 'if'")
OperEx(TlaControlOper.ifThenElse, parseJson(m("if")), parseJson(m("then")), parseJson(m("else")))
}
case "case" => {
if(m.contains("other"))
OperEx(TlaControlOper.caseWithOther, parseJson(m("other")) +: parsePairs(m("case")) :_*)
else
OperEx(TlaControlOper.caseNoOther, parsePairs(m("case")):_*)
}
case "let" => {
if(!m.contains("body"))
throw new Exception("incorrect TLA+ JSON: malformed 'let'")
LetInEx(parseJson(m("body")), parseOperDecls(m("let")):_*)
}
case _ =>
throw new Exception("can't parse TLA+ JSON: unknown JSON key")
}
}
else
throw new Exception("can't parse TLA+ JSON: cannot find a known JSON key")
if(m.contains("label")) {
val (name, args) = parseLabel(m("label"))
OperEx(TlaOper.label, (expr +: ValEx(TlaStr(name)) +: args) :_*)
}
else
expr
}
// expect ujson.Value to be a string
def parseStr(v: ujson.Value): String = {
// it should be a JSON string
v.strOpt match {
case Some(value) => value
case None => throw new Exception("incorrect TLA+ JSON: expecting string")
}
}
// expect ujson.Value to be an encoding of TLA+ expression array
def parseArray(v: ujson.Value): Seq[TlaEx] = {
// it should be a JSON array
val arr = v.arrOpt match {
case Some(value) => value
case None => throw new Exception("incorrect TLA+ JSON: expecting expression array")
}
arr.map(parseJson)
}
// expect ujson.Value to be an encoding of a set of pairs of expressions
def parsePairs(v: ujson.Value): Seq[TlaEx] = {
// it should be a JSON array
val arr = v.arrOpt match {
case Some(value) => value
case None => throw new Exception("incorrect TLA+ JSON: expecting array of pairs")
}
arr.map(parsePair).flatten
}
// expect ujson.Value to be an encoding of a pair of expressions
def parsePair(v: ujson.Value): Seq[TlaEx] = {
val m = v.objOpt match {
case Some(value) => value
case None => throw new Exception("incorrect TLA+ JSON: expecting a key-value object")
}
if(!m.contains("key") || !m.contains("value"))
throw new Exception("incorrect TLA+ JSON: malformed key-value object")
val key = parseJson(m("key"))
val value = parseJson(m("value"))
Seq(key, value)
}
// expect ujson.Value to be an encoding of a label
def parseLabel(v: ujson.Value): (String, Seq[TlaEx]) = {
// it should be a JSON object
val m = v.objOpt match {
case Some(value) => value
case None => throw new Exception("incorrect TLA+ JSON: expecting a label object")
}
if(!m.contains("name") || !m.contains("args"))
throw new Exception("incorrect TLA+ JSON: malformed label")
val name = parseStr(m("name"))
val args = parseArray(m("args"))
(name, args.map {
case NameEx(str) => ValEx(TlaStr(str)) // change back from NameEx to ValEx
case _ => throw new Exception("incorrect TLA+ JSON: malformed label")
})
}
// expect ujson.Value to be an encoding of TLA+ declarations array
def parseDecls(v: ujson.Value): Seq[TlaDecl] = {
// it should be a JSON array
val arr = v.arrOpt match {
case Some(value) => value
case None => throw new Exception("incorrect TLA+ JSON: expecting declaration array")
}
arr.map(parseDecl)
}
def parseDecl(v: ujson.Value): TlaDecl = {
val m = v.objOpt match {
case Some(value) => value
case None => throw new Exception("incorrect TLA+ JSON: expecting a declaration object")
}
if(m.contains("constant"))
TlaConstDecl(parseStr(m("constant")))
else if(m.contains("variable"))
TlaVarDecl(parseStr(m("variable")))
else if(m.contains("assume"))
TlaAssumeDecl(parseJson(m("assume")))
else if(m.contains("operator")) {
if(!m.contains("body") || !m.contains("params"))
throw new Exception("incorrect TLA+ JSON: malformed operator declaration")
TlaOperDecl(parseStr(m("operator")), parseParams(m("params")).toList, parseJson(m("body")))
}
else
throw new Exception("incorrect TLA+ JSON: malformed declaration object")
}
// expect ujson.Value to be an encoding of TLA+ operator declarations array
def parseOperDecls(v: ujson.Value): Seq[TlaOperDecl] = {
// it should be a JSON array
val arr = v.arrOpt match {
case Some(value) => value
case None => throw new Exception("incorrect TLA+ JSON: expecting declaration array")
}
arr.map(parseOperDecl)
}
def parseOperDecl(v: ujson.Value): TlaOperDecl = {
val m = v.objOpt match {
case Some(value) => value
case None => throw new Exception("incorrect TLA+ JSON: expecting a declaration object")
}
if(!m.contains("operator") || !m.contains("body") || !m.contains("params"))
throw new Exception("incorrect TLA+ JSON: malformed operator declaration")
TlaOperDecl(parseStr(m("operator")), parseParams(m("params")).toList, parseJson(m("body")))
}
// expect ujson.Value to be an encoding of TLA+ params array
def parseParams(v: ujson.Value): Seq[FormalParam] = {
// it should be a JSON array
val arr = v.arrOpt match {
case Some(value) => value
case None => throw new Exception("incorrect TLA+ JSON: expecting parameter array")
}
arr.map(parseParam)
}
// expect ujson.Value to be an encoding of a parameter
def parseParam(v: ujson.Value): FormalParam = {
// it should be a JSON object
val m = v.objOpt match {
case Some(value) => value
case None => throw new Exception("incorrect TLA+ JSON: expecting a parameter object")
}
if(!m.contains("name") || m("name").strOpt == None
|| !m.contains("arity") || m("arity").numOpt == None || !m("arity").num.isValidInt)
throw new Exception("incorrect TLA+ JSON: malformed parameter")
val arity = m("arity").num.toInt
if(arity == 0)
SimpleFormalParam(m("name").str)
else
OperFormalParam(m("name").str, arity)
}
}
| konnov/apalache | tlair/src/main/scala/at/forsyte/apalache/tla/lir/io/JsonReader.scala | Scala | apache-2.0 | 12,545 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.javalib.io
import java.io._
import scala.language.implicitConversions
import org.junit.Test
import org.junit.Assert._
import org.scalajs.testsuite.utils.AssertThrows._
trait CommonStreamsTests {
def mkStream(seq: Seq[Int]): InputStream
private val length = 50
private def newStream: InputStream = mkStream(1 to length)
private implicit def seqToArray(seq: Seq[Int]): Array[Byte] =
seq.toArray.map(_.toByte)
@Test def should_provide_read()(): Unit = {
val stream = newStream
for (i <- 1 to length)
assertEquals(i, stream.read())
for (_ <- 1 to 5)
assertEquals(-1, stream.read())
}
@Test def should_provide_read_from_buf(): Unit = {
val stream = newStream
val buf = new Array[Byte](10)
assertEquals(10, stream.read(buf))
assertArrayEquals(1 to 10, buf)
assertEquals(35L, stream.skip(35))
assertEquals(5, stream.read(buf))
assertArrayEquals((46 to 50) ++ (6 to 10), buf)
assertEquals(-1, stream.read(buf))
assertEquals(-1, stream.read())
}
@Test def should_provide_full_argument_read(): Unit = {
val stream = newStream
val buf = new Array[Byte](20)
assertEquals(5, stream.read(buf, 10, 5))
assertArrayEquals(Seq.fill(10)(0) ++ (1 to 5) ++ Seq.fill(5)(0), buf)
assertEquals(20, stream.read(buf, 0, 20))
assertArrayEquals(6 to 25, buf)
assertEquals(0, stream.read(buf, 10, 0))
assertArrayEquals(6 to 25, buf)
expectThrows(classOf[IndexOutOfBoundsException], stream.read(buf, -1, 0))
expectThrows(classOf[IndexOutOfBoundsException], stream.read(buf, 0, -1))
expectThrows(classOf[IndexOutOfBoundsException], stream.read(buf, 100, 0))
expectThrows(classOf[IndexOutOfBoundsException], stream.read(buf, 10, 100))
assertArrayEquals(6 to 25, buf)
assertEquals(20L, stream.skip(20))
assertEquals(5, stream.read(buf, 0, 10))
assertArrayEquals((46 to 50) ++ (11 to 25), buf)
assertEquals(-1, stream.read(buf, 0, 10))
assertArrayEquals((46 to 50) ++ (11 to 25), buf)
}
@Test def should_provide_available(): Unit = {
val stream = newStream
def mySkip(n: Int) = for (_ <- 1 to n) assertNotEquals(stream.read(), -1)
def check(n: Int) = assertEquals(n, stream.available)
check(50)
mySkip(5)
check(45)
assertEquals(10L, stream.skip(10))
check(35)
mySkip(30)
check(5)
assertEquals(5L, stream.skip(20))
check(0)
}
@Test def should_provide_skip(): Unit = {
val stream = newStream
assertEquals(7L, stream.skip(7))
for (i <- 8 to 32)
assertEquals(i, stream.read())
assertEquals(0L, stream.skip(0))
assertEquals(33, stream.read())
assertEquals(0L, stream.skip(-4))
assertEquals(34, stream.read())
assertEquals(16L, stream.skip(30))
assertEquals(0L, stream.skip(30))
}
@Test def should_return_true_from_markSupported(): Unit = {
assertTrue(newStream.markSupported)
}
@Test def should_provide_no_op_close(): Unit = {
val stream = newStream
for (i <- 1 to length) {
stream.close()
assertEquals(i, stream.read())
}
}
@Test def should_provide_mark_and_reset(): Unit = {
val stream = newStream
def read(range: Range) = for (i <- range) assertEquals(i, stream.read())
read(1 to 10)
stream.reset() // mark must be 0 at creation
read(1 to 5)
stream.mark(length)
read(6 to 22)
stream.reset()
read(6 to 20)
stream.reset()
read(6 to 25)
stream.reset()
assertEquals(40L, stream.skip(40))
stream.mark(length)
read(46 to 50)
stream.reset()
read(46 to 50)
stream.mark(length)
assertEquals(-1, stream.read())
stream.reset()
assertEquals(-1, stream.read())
}
@Test def should_return_positive_integers_when_calling_read(): Unit = {
val stream = mkStream(Seq(-1, -2, -3))
assertEquals(255, stream.read())
assertEquals(254, stream.read())
assertEquals(253, stream.read())
assertEquals(-1, stream.read())
}
}
| lrytz/scala-js | test-suite/shared/src/test/scala/org/scalajs/testsuite/javalib/io/CommonStreamsTests.scala | Scala | bsd-3-clause | 4,557 |
package im.actor.server.user
import akka.actor.ActorSystem
import akka.util.Timeout
import im.actor.server.{ KeyValueMappings, models, persist }
import shardakka.ShardakkaExtension
import slick.dbio.DBIO
import scala.concurrent.{ ExecutionContext, Future }
object ContactsUtils {
def localNameKey(ownerUserId: Int, contactId: Int): String = s"${ownerUserId}_${contactId}"
def addContact(
ownerUserId: Int,
userId: Int,
name: Option[String],
accessSalt: String
)(implicit ec: ExecutionContext, timeout: Timeout, system: ActorSystem): DBIO[Int] =
for {
_ ← DBIO.from(registerLocalName(ownerUserId, userId, name))
contact = models.contact.UserContact(ownerUserId, userId, name, accessSalt, isDeleted = false)
result ← persist.contact.UserContact.insertOrUpdate(contact)
} yield result
def addContact(
ownerUserId: Int,
userId: Int,
phoneNumber: Long,
name: Option[String],
accessSalt: String
)(implicit ec: ExecutionContext, timeout: Timeout, system: ActorSystem): DBIO[Int] =
for {
_ ← DBIO.from(registerLocalName(ownerUserId, userId, name))
contact = models.contact.UserPhoneContact(phoneNumber, ownerUserId, userId, name, accessSalt, isDeleted = false)
result ← persist.contact.UserPhoneContact.insertOrUpdate(contact)
} yield result
def addContact(
ownerUserId: Int,
userId: Int,
email: String,
name: Option[String],
accessSalt: String
)(implicit ec: ExecutionContext, timeout: Timeout, system: ActorSystem): DBIO[Int] =
for {
_ ← DBIO.from(registerLocalName(ownerUserId, userId, name))
contact = models.contact.UserEmailContact(email, ownerUserId, userId, name, accessSalt, isDeleted = false)
result ← persist.contact.UserEmailContact.insertOrUpdate(contact)
} yield result
def deleteContact(ownerUserId: Int, userId: Int)(implicit ec: ExecutionContext, timeout: Timeout, system: ActorSystem): DBIO[Int] =
for {
_ ← DBIO.from(registerLocalName(ownerUserId, userId, None))
result ← persist.contact.UserContact.delete(ownerUserId, userId)
} yield result
def updateName(ownerUserId: Int, userId: Int, name: Option[String])(implicit ec: ExecutionContext, timeout: Timeout, system: ActorSystem): DBIO[Int] =
for {
_ ← DBIO.from(registerLocalName(ownerUserId, userId, name))
result ← persist.contact.UserContact.updateName(ownerUserId, userId, name)
} yield result
def registerLocalName(ownerUserId: Int, userId: Int, name: Option[String])(implicit ec: ExecutionContext, timeout: Timeout, system: ActorSystem): Future[Unit] = {
val kv = ShardakkaExtension(system).simpleKeyValue(KeyValueMappings.LocalNames)
val contactKey = localNameKey(ownerUserId, userId)
name map { n ⇒ kv.upsert(contactKey, n) } getOrElse kv.delete(contactKey)
}
}
| lzpfmh/actor-platform | actor-server/actor-core/src/main/scala/im/actor/server/user/ContactsUtils.scala | Scala | mit | 2,919 |
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.data.format
import java.sql.Timestamp
import java.time._
import java.time.format.DateTimeFormatter
import java.util.UUID
import play.api.data._
import annotation.implicitNotFound
/**
* Handles field binding and unbinding.
*/
@implicitNotFound(
msg = "Cannot find Formatter type class for ${T}. Perhaps you will need to import play.api.data.format.Formats._ "
)
trait Formatter[T] {
/**
* The expected format of `Any`.
*/
val format: Option[(String, Seq[Any])] = None
/**
* Binds this field, i.e. constructs a concrete value from submitted data.
*
* @param key the field key
* @param data the submitted data
* @return Either a concrete value of type T or a set of error if the binding failed.
*/
def bind(key: String, data: Map[String, String]): Either[Seq[FormError], T]
/**
* Unbinds this field, i.e. transforms a concrete value to plain data.
*
* @param key the field ke
* @param value the value to unbind
* @return either the plain data or a set of errors if unbinding failed
*/
def unbind(key: String, value: T): Map[String, String]
}
/** This object defines several default formatters. */
object Formats {
/**
* Formatter for ignored values.
*
* @param value As we ignore this parameter in binding/unbinding we have to provide a default value.
*/
def ignoredFormat[A](value: A): Formatter[A] = new Formatter[A] {
def bind(key: String, data: Map[String, String]) = Right(value)
def unbind(key: String, value: A) = Map.empty
}
/**
* Default formatter for the `String` type.
*/
implicit def stringFormat: Formatter[String] = new Formatter[String] {
def bind(key: String, data: Map[String, String]) = data.get(key).toRight(Seq(FormError(key, "error.required", Nil)))
def unbind(key: String, value: String) = Map(key -> value)
}
/**
* Default formatter for the `Char` type.
*/
implicit def charFormat: Formatter[Char] = new Formatter[Char] {
def bind(key: String, data: Map[String, String]) =
data.get(key).filter(s => s.length == 1 && s != " ").map(s => Right(s.charAt(0))).getOrElse(
Left(Seq(FormError(key, "error.required", Nil)))
)
def unbind(key: String, value: Char) = Map(key -> value.toString)
}
/**
* Helper for formatters binders
* @param parse Function parsing a String value into a T value, throwing an exception in case of failure
* @param errArgs Error to set in case of parsing failure
* @param key Key name of the field to parse
* @param data Field data
*/
def parsing[T](parse: String => T, errMsg: String, errArgs: Seq[Any])(key: String, data: Map[String, String]): Either[Seq[FormError], T] = {
stringFormat.bind(key, data).right.flatMap { s =>
scala.util.control.Exception.allCatch[T]
.either(parse(s))
.left.map(e => Seq(FormError(key, errMsg, errArgs)))
}
}
private def numberFormatter[T](convert: String => T, real: Boolean = false): Formatter[T] = {
val (formatString, errorString) = if (real) ("format.real", "error.real") else ("format.numeric", "error.number")
new Formatter[T] {
override val format = Some(formatString -> Nil)
def bind(key: String, data: Map[String, String]) =
parsing(convert, errorString, Nil)(key, data)
def unbind(key: String, value: T) = Map(key -> value.toString)
}
}
/**
* Default formatter for the `Long` type.
*/
implicit def longFormat: Formatter[Long] = numberFormatter(_.toLong)
/**
* Default formatter for the `Int` type.
*/
implicit def intFormat: Formatter[Int] = numberFormatter(_.toInt)
/**
* Default formatter for the `Short` type.
*/
implicit def shortFormat: Formatter[Short] = numberFormatter(_.toShort)
/**
* Default formatter for the `Byte` type.
*/
implicit def byteFormat: Formatter[Byte] = numberFormatter(_.toByte)
/**
* Default formatter for the `Float` type.
*/
implicit def floatFormat: Formatter[Float] = numberFormatter(_.toFloat, real = true)
/**
* Default formatter for the `Double` type.
*/
implicit def doubleFormat: Formatter[Double] = numberFormatter(_.toDouble, real = true)
/**
* Default formatter for the `BigDecimal` type.
*/
def bigDecimalFormat(precision: Option[(Int, Int)]): Formatter[BigDecimal] = new Formatter[BigDecimal] {
override val format = Some(("format.real", Nil))
def bind(key: String, data: Map[String, String]) = {
Formats.stringFormat.bind(key, data).right.flatMap { s =>
scala.util.control.Exception.allCatch[BigDecimal]
.either {
val bd = BigDecimal(s)
precision.map({
case (p, s) =>
if (bd.precision - bd.scale > p - s) {
throw new java.lang.ArithmeticException("Invalid precision")
}
bd.setScale(s)
}).getOrElse(bd)
}
.left.map { e =>
Seq(
precision match {
case Some((p, s)) => FormError(key, "error.real.precision", Seq(p, s))
case None => FormError(key, "error.real", Nil)
}
)
}
}
}
def unbind(key: String, value: BigDecimal) = Map(key -> precision.map({ p => value.setScale(p._2) }).getOrElse(value).toString)
}
/**
* Default formatter for the `BigDecimal` type with no precision
*/
implicit val bigDecimalFormat: Formatter[BigDecimal] = bigDecimalFormat(None)
/**
* Default formatter for the `Boolean` type.
*/
implicit def booleanFormat: Formatter[Boolean] = new Formatter[Boolean] {
override val format = Some(("format.boolean", Nil))
def bind(key: String, data: Map[String, String]) = {
Right(data.getOrElse(key, "false")).right.flatMap {
case "true" => Right(true)
case "false" => Right(false)
case _ => Left(Seq(FormError(key, "error.boolean", Nil)))
}
}
def unbind(key: String, value: Boolean) = Map(key -> value.toString)
}
import java.util.{ Date, TimeZone }
/**
* Formatter for the `java.util.Date` type.
*
* @param pattern a date pattern, as specified in `java.time.format.DateTimeFormatter`.
* @param timeZone the `java.util.TimeZone` to use for parsing and formatting
*/
def dateFormat(pattern: String, timeZone: TimeZone = TimeZone.getDefault): Formatter[Date] = new Formatter[Date] {
val javaTimeZone = timeZone.toZoneId
val formatter = DateTimeFormatter.ofPattern(pattern)
def dateParse(data: String) = {
val instant = PlayDate.parse(data, formatter).toZonedDateTime(ZoneOffset.UTC)
Date.from(instant.withZoneSameLocal(javaTimeZone).toInstant)
}
override val format = Some(("format.date", Seq(pattern)))
def bind(key: String, data: Map[String, String]) = parsing(dateParse, "error.date", Nil)(key, data)
def unbind(key: String, value: Date) = Map(key -> formatter.format(value.toInstant.atZone(javaTimeZone)))
}
/**
* Default formatter for the `java.util.Date` type with pattern `yyyy-MM-dd`.
*/
implicit val dateFormat: Formatter[Date] = dateFormat("yyyy-MM-dd")
@deprecated("Use sqlDateFormat(pattern). SQL dates do not have time zones.", "2.6.2")
def sqlDateFormat(pattern: String, timeZone: java.util.TimeZone): Formatter[java.sql.Date] = sqlDateFormat(pattern)
// Added for bincompat
@deprecated("This method will be removed when sqlDateFormat(pattern, timeZone) is removed.", "2.6.2")
private[format] def sqlDateFormat$default$2: java.util.TimeZone = java.util.TimeZone.getDefault
/**
* Formatter for the `java.sql.Date` type.
*
* @param pattern a date pattern as specified in `java.time.DateTimeFormatter`.
*/
def sqlDateFormat(pattern: String): Formatter[java.sql.Date] = new Formatter[java.sql.Date] {
private val dateFormatter: Formatter[LocalDate] = localDateFormat(pattern)
override val format = Some(("format.date", Seq(pattern)))
def bind(key: String, data: Map[String, String]) = {
dateFormatter.bind(key, data).right.map(d => java.sql.Date.valueOf(d))
}
def unbind(key: String, value: java.sql.Date) = dateFormatter.unbind(key, value.toLocalDate)
}
/**
* Default formatter for `java.sql.Date` type with pattern `yyyy-MM-dd`.
*/
implicit val sqlDateFormat: Formatter[java.sql.Date] = sqlDateFormat("yyyy-MM-dd")
/**
* Formatter for the `java.sql.Timestamp` type.
*
* @param pattern a date pattern as specified in `java.time.DateTimeFormatter`.
* @param timeZone the `java.util.TimeZone` to use for parsing and formatting
*/
def sqlTimestampFormat(pattern: String, timeZone: TimeZone = TimeZone.getDefault): Formatter[java.sql.Timestamp] = new Formatter[java.sql.Timestamp] {
import java.time.LocalDateTime
private val formatter = java.time.format.DateTimeFormatter.ofPattern(pattern).withZone(timeZone.toZoneId)
private def timestampParse(data: String) = java.sql.Timestamp.valueOf(LocalDateTime.parse(data, formatter))
override val format = Some(("format.timestamp", Seq(pattern)))
override def bind(key: String, data: Map[String, String]): Either[Seq[FormError], Timestamp] = parsing(timestampParse, "error.timestamp", Nil)(key, data)
override def unbind(key: String, value: java.sql.Timestamp) = Map(key -> value.toLocalDateTime.format(formatter))
}
/**
* Default formatter for `java.sql.Timestamp` type with pattern `yyyy-MM-dd HH:mm:ss`.
*/
implicit val sqlTimestampFormat: Formatter[java.sql.Timestamp] = sqlTimestampFormat("yyyy-MM-dd HH:mm:ss")
/**
* Formatter for the `java.time.LocalDate` type.
*
* @param pattern a date pattern as specified in `java.time.format.DateTimeFormatter`.
*/
def localDateFormat(pattern: String): Formatter[java.time.LocalDate] = new Formatter[java.time.LocalDate] {
import java.time.LocalDate
val formatter = java.time.format.DateTimeFormatter.ofPattern(pattern)
def localDateParse(data: String) = LocalDate.parse(data, formatter)
override val format = Some(("format.date", Seq(pattern)))
def bind(key: String, data: Map[String, String]) = parsing(localDateParse, "error.date", Nil)(key, data)
def unbind(key: String, value: LocalDate) = Map(key -> value.format(formatter))
}
/**
* Default formatter for `java.time.LocalDate` type with pattern `yyyy-MM-dd`.
*/
implicit val localDateFormat: Formatter[java.time.LocalDate] = localDateFormat("yyyy-MM-dd")
/**
* Formatter for the `java.time.LocalDateTime` type.
*
* @param pattern a date pattern as specified in `java.time.format.DateTimeFormatter`.
* @param zoneId the `java.time.ZoneId` to use for parsing and formatting
*/
def localDateTimeFormat(pattern: String, zoneId: java.time.ZoneId = java.time.ZoneId.systemDefault()): Formatter[java.time.LocalDateTime] = new Formatter[java.time.LocalDateTime] {
import java.time.LocalDateTime
val formatter = java.time.format.DateTimeFormatter.ofPattern(pattern).withZone(zoneId)
def localDateTimeParse(data: String) = LocalDateTime.parse(data, formatter)
override val format = Some(("format.localDateTime", Seq(pattern)))
def bind(key: String, data: Map[String, String]) = parsing(localDateTimeParse, "error.localDateTime", Nil)(key, data)
def unbind(key: String, value: LocalDateTime) = Map(key -> value.format(formatter))
}
/**
* Default formatter for `java.time.LocalDateTime` type with pattern `yyyy-MM-dd`.
*/
implicit val localDateTimeFormat: Formatter[java.time.LocalDateTime] = localDateTimeFormat("yyyy-MM-dd HH:mm:ss")
/**
* Formatter for the `java.time.LocalTime` type.
*
* @param pattern a date pattern as specified in `java.time.format.DateTimeFormatter`.
*/
def localTimeFormat(pattern: String): Formatter[java.time.LocalTime] = new Formatter[java.time.LocalTime] {
import java.time.LocalTime
val formatter = java.time.format.DateTimeFormatter.ofPattern(pattern)
def localTimeParse(data: String) = LocalTime.parse(data, formatter)
override val format = Some(("format.localTime", Seq(pattern)))
def bind(key: String, data: Map[String, String]) = parsing(localTimeParse, "error.localTime", Nil)(key, data)
def unbind(key: String, value: LocalTime) = Map(key -> value.format(formatter))
}
/**
* Default formatter for `java.time.LocalTime` type with pattern `HH:mm:ss`.
*/
implicit val localTimeFormat: Formatter[java.time.LocalTime] = localTimeFormat("HH:mm:ss")
/**
* Default formatter for the `java.util.UUID` type.
*/
implicit def uuidFormat: Formatter[UUID] = new Formatter[UUID] {
override val format = Some(("format.uuid", Nil))
override def bind(key: String, data: Map[String, String]) = parsing(UUID.fromString, "error.uuid", Nil)(key, data)
override def unbind(key: String, value: UUID) = Map(key -> value.toString)
}
}
| Shenker93/playframework | framework/src/play/src/main/scala/play/api/data/format/Format.scala | Scala | apache-2.0 | 13,025 |
/**
* Copyright 2011-2012 @WalmartLabs, a division of Wal-Mart Stores, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.walmartlabs.mupd8
import org.jboss.netty.handler.codec.replay.ReplayingDecoder
import com.walmartlabs.mupd8.network.common.Decoder.DecodingState._
import com.walmartlabs.mupd8.network.common.Decoder.DecodingState
import com.walmartlabs.mupd8.network.common._
import com.walmartlabs.mupd8.GT._
import com.walmartlabs.mupd8.Misc._
import org.jboss.netty.channel.ChannelHandlerContext
import org.jboss.netty.channel.Channel
import org.jboss.netty.buffer.ChannelBuffer
class Decoder(val appRun: AppRuntime) extends ReplayingDecoder[DecodingState](PRIORITY) {
var pri: Priority = -1
var pid: Int = -1
var key: Key = Key(new Array[Byte](0))
var event: Event = Array()
var stream: Array[Byte] = Array()
reset()
private def reset() {
checkpoint(PRIORITY)
pri = -1
pid = -1
key = Key(new Array[Byte](0))
event = Array()
stream = Array()
}
protected def decode(ctx: ChannelHandlerContext, channel: Channel, buffer: ChannelBuffer, stateParam: DecodingState): AnyRef = {
var p: PerformerPacket = null
var state = stateParam
do {
// (state.## : @scala.annotation.switch) match {
state match {
case PRIORITY =>
pri = buffer.readInt
checkpoint(PERFORMERID)
case PERFORMERID =>
pid = buffer.readInt
checkpoint(KEY_LENGTH)
case KEY_LENGTH =>
val keyLen = buffer.readInt
if (keyLen < 0) {
throw new Exception("Invalid key size")
}
key = Key(new Array[Byte](keyLen))
checkpoint(KEY)
case KEY =>
buffer.readBytes(key.value, 0, key.value.length)
checkpoint(EVENT_LENGTH)
case EVENT_LENGTH =>
val eventLen = buffer.readInt
if (eventLen < 0) {
throw new Exception("Invalid event size")
}
event = new Array[Byte](eventLen)
checkpoint(EVENT)
case EVENT =>
buffer.readBytes(event, 0, event.length)
checkpoint(STREAM_LENGTH)
case STREAM_LENGTH =>
val streamLen = buffer.readInt
if (streamLen < 0) {
throw new Exception("Invalid stream size")
}
stream = new Array[Byte](streamLen)
checkpoint(STREAM)
case STREAM =>
buffer.readBytes(stream, 0, stream.length)
p = PerformerPacket(pri, pid, key, event, str(stream), appRun)
reset()
case _ =>
throw new Exception("Unknown decoding state: " + state)
}
state = getState
} while (state != PRIORITY)
// try { return p } finally { reset() }
p
}
}
| walmartlabs/mupd8 | src/main/scala/com/walmartlabs/mupd8/Decoder.scala | Scala | apache-2.0 | 3,298 |
/*
* Copyright 2014 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s
package blazecore
package websocket
import cats.effect._
import cats.effect.std.Dispatcher
import cats.effect.std.Semaphore
import cats.syntax.all._
import fs2._
import fs2.concurrent.SignallingRef
import org.http4s.blaze.pipeline.Command.EOF
import org.http4s.blaze.pipeline.LeafBuilder
import org.http4s.blaze.pipeline.TailStage
import org.http4s.blaze.pipeline.TrunkBuilder
import org.http4s.blaze.util.Execution.directec
import org.http4s.blaze.util.Execution.trampoline
import org.http4s.websocket.ReservedOpcodeException
import org.http4s.websocket.UnknownOpcodeException
import org.http4s.websocket.WebSocket
import org.http4s.websocket.WebSocketCombinedPipe
import org.http4s.websocket.WebSocketFrame
import org.http4s.websocket.WebSocketFrame._
import org.http4s.websocket.WebSocketSeparatePipe
import java.net.ProtocolException
import java.util.concurrent.atomic.AtomicBoolean
import scala.concurrent.ExecutionContext
import scala.util.Failure
import scala.util.Success
private[http4s] class Http4sWSStage[F[_]](
ws: WebSocket[F],
sentClose: AtomicBoolean,
deadSignal: SignallingRef[F, Boolean],
writeSemaphore: Semaphore[F],
dispatcher: Dispatcher[F],
)(implicit F: Async[F])
extends TailStage[WebSocketFrame] {
def name: String = "Http4s WebSocket Stage"
// ////////////////////// Source and Sink generators ////////////////////////
val isClosed: F[Boolean] = F.delay(sentClose.get())
val setClosed: F[Boolean] = F.delay(sentClose.compareAndSet(false, true))
def evalFrame(frame: WebSocketFrame): F[Unit] = frame match {
case c: Close => setClosed.ifM(writeFrame(c, directec), F.unit)
case _ => writeFrame(frame, directec)
}
def snkFun(frame: WebSocketFrame): F[Unit] = isClosed.ifM(F.unit, evalFrame(frame))
private[this] def writeFrame(frame: WebSocketFrame, ec: ExecutionContext): F[Unit] =
writeSemaphore.permit.use { _ =>
F.async_[Unit] { cb =>
channelWrite(frame).onComplete {
case Success(res) => cb(Right(res))
case Failure(t) => cb(Left(t))
}(ec)
}
}
private[this] def readFrameTrampoline: F[WebSocketFrame] =
F.async_[WebSocketFrame] { cb =>
channelRead().onComplete {
case Success(ws) => cb(Right(ws))
case Failure(exception) => cb(Left(exception))
}(trampoline)
}
/** Read from our websocket.
*
* To stay faithful to the RFC, the following must hold:
*
* - If we receive a ping frame, we MUST reply with a pong frame
* - If we receive a pong frame, we don't need to forward it.
* - If we receive a close frame, it means either one of two things:
* - We sent a close frame prior, meaning we do not need to reply with one. Just end the stream
* - We are the first to receive a close frame, so we try to atomically check a boolean flag,
* to prevent sending two close frames. Regardless, we set the signal for termination of
* the stream afterwards
*
* @return A websocket frame, or a possible IO error.
*/
private[this] def handleRead(): F[WebSocketFrame] = {
def maybeSendClose(c: Close): F[Unit] =
F.delay(sentClose.compareAndSet(false, true)).flatMap { cond =>
if (cond) writeFrame(c, trampoline)
else F.unit
} >> deadSignal.set(true)
readFrameTrampoline
.recoverWith {
case t: ReservedOpcodeException =>
F.delay(logger.error(t)("Decoded a websocket frame with a reserved opcode")) *>
F.fromEither(Close(1003))
case t: UnknownOpcodeException =>
F.delay(logger.error(t)("Decoded a websocket frame with an unknown opcode")) *>
F.fromEither(Close(1002))
case t: ProtocolException =>
F.delay(logger.error(t)("Websocket protocol violation")) *> F.fromEither(Close(1002))
}
.flatMap {
case c: Close =>
for {
s <- F.delay(sentClose.get())
// If we sent a close signal, we don't need to reply with one
_ <- if (s) deadSignal.set(true) else maybeSendClose(c)
} yield c
case p @ Ping(d) =>
// Reply to ping frame immediately
writeFrame(Pong(d), trampoline) >> F.pure(p)
case rest =>
F.pure(rest)
}
}
/** The websocket input stream
*
* Note: On receiving a close, we MUST send a close back, as stated in section
* 5.5.1 of the websocket spec: https://datatracker.ietf.org/doc/html/rfc6455#section-5.5.1
*
* @return
*/
def inputstream: Stream[F, WebSocketFrame] =
Stream.repeatEval(handleRead())
// ////////////////////// Startup and Shutdown ////////////////////////
override protected def stageStartup(): Unit = {
super.stageStartup()
// Effect to send a close to the other endpoint
val sendClose: F[Unit] = F.delay(closePipeline(None))
val receiveSent: Stream[F, WebSocketFrame] =
ws match {
case WebSocketSeparatePipe(send, receive, _) =>
// We don't need to terminate if the send stream terminates.
send.concurrently(receive(inputstream))
case WebSocketCombinedPipe(receiveSend, _) =>
receiveSend(inputstream)
}
val wsStream =
receiveSent
.evalMap(snkFun)
.drain
.interruptWhen(deadSignal)
.onFinalizeWeak(
ws.onClose.attempt.void
) // Doing it this way ensures `sendClose` is sent no matter what
.onFinalizeWeak(sendClose)
.compile
.drain
val result = F.handleErrorWith(wsStream) {
case EOF =>
F.delay(stageShutdown())
case t =>
F.delay(logger.error(t)("Error closing Web Socket"))
}
dispatcher.unsafeRunAndForget(result)
}
override protected def stageShutdown(): Unit = {
val fa = F.handleError(deadSignal.set(true)) { t =>
logger.error(t)("Error setting dead signal")
}
dispatcher.unsafeRunAndForget(fa)
super.stageShutdown()
}
}
object Http4sWSStage {
def bufferingSegment[F[_]](stage: Http4sWSStage[F]): LeafBuilder[WebSocketFrame] =
TrunkBuilder(new SerializingStage[WebSocketFrame]).cap(stage)
def apply[F[_]](
ws: WebSocket[F],
sentClose: AtomicBoolean,
deadSignal: SignallingRef[F, Boolean],
dispatcher: Dispatcher[F],
)(implicit F: Async[F]): F[Http4sWSStage[F]] =
Semaphore[F](1L).map(t => new Http4sWSStage(ws, sentClose, deadSignal, t, dispatcher))
}
| http4s/http4s | blaze-core/src/main/scala/org/http4s/blazecore/websocket/Http4sWSStage.scala | Scala | apache-2.0 | 7,077 |
package unluac.decompile.statement
import java.util
import unluac.decompile.{Declaration, Output}
class Declare(val decls: util.List[Declaration]) extends Statement {
def print(out: Output) {
out.print("local ")
out.print(decls.get(0).name)
var i: Int = 1
while (i < decls.size) {
out.print(", ")
out.print(decls.get(i).name)
i += 1
}
}
} | danielwegener/unluac-scala | shared/src/main/scala/unluac/decompile/statement/Declare.scala | Scala | mit | 404 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming.continuous.shuffle
import java.util.concurrent._
import java.util.concurrent.atomic.AtomicBoolean
import org.apache.spark.internal.Logging
import org.apache.spark.rpc.{RpcCallContext, RpcEnv, ThreadSafeRpcEndpoint}
import org.apache.spark.sql.catalyst.expressions.UnsafeRow
import org.apache.spark.util.NextIterator
/**
* Messages for the RPCContinuousShuffleReader endpoint. Either an incoming row or an epoch marker.
*
* Each message comes tagged with writerId, identifying which writer the message is coming
* from. The receiver will only begin the next epoch once all writers have sent an epoch
* marker ending the current epoch.
*/
private[shuffle] sealed trait RPCContinuousShuffleMessage extends Serializable {
def writerId: Int
}
private[shuffle] case class ReceiverRow(writerId: Int, row: UnsafeRow)
extends RPCContinuousShuffleMessage
private[shuffle] case class ReceiverEpochMarker(writerId: Int) extends RPCContinuousShuffleMessage
/**
* RPC endpoint for receiving rows into a continuous processing shuffle task. Continuous shuffle
* writers will send rows here, with continuous shuffle readers polling for new rows as needed.
*
* TODO: Support multiple source tasks. We need to output a single epoch marker once all
* source tasks have sent one.
*/
private[shuffle] class RPCContinuousShuffleReader(
queueSize: Int,
numShuffleWriters: Int,
epochIntervalMs: Long,
override val rpcEnv: RpcEnv)
extends ThreadSafeRpcEndpoint with ContinuousShuffleReader with Logging {
// Note that this queue will be drained from the main task thread and populated in the RPC
// response thread.
private val queues = Array.fill(numShuffleWriters) {
new ArrayBlockingQueue[RPCContinuousShuffleMessage](queueSize)
}
// Exposed for testing to determine if the endpoint gets stopped on task end.
private[shuffle] val stopped = new AtomicBoolean(false)
override def onStop(): Unit = {
stopped.set(true)
}
override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = {
case r: RPCContinuousShuffleMessage =>
// Note that this will block a thread the shared RPC handler pool!
// The TCP based shuffle handler (SPARK-24541) will avoid this problem.
queues(r.writerId).put(r)
context.reply(())
}
override def read(): Iterator[UnsafeRow] = {
new NextIterator[UnsafeRow] {
// An array of flags for whether each writer ID has gotten an epoch marker.
private val writerEpochMarkersReceived = Array.fill(numShuffleWriters)(false)
private val executor = Executors.newFixedThreadPool(numShuffleWriters)
private val completion = new ExecutorCompletionService[RPCContinuousShuffleMessage](executor)
private def completionTask(writerId: Int) = new Callable[RPCContinuousShuffleMessage] {
override def call(): RPCContinuousShuffleMessage = queues(writerId).take()
}
// Initialize by submitting tasks to read the first row from each writer.
(0 until numShuffleWriters).foreach(writerId => completion.submit(completionTask(writerId)))
/**
* In each call to getNext(), we pull the next row available in the completion queue, and then
* submit another task to read the next row from the writer which returned it.
*
* When a writer sends an epoch marker, we note that it's finished and don't submit another
* task for it in this epoch. The iterator is over once all writers have sent an epoch marker.
*/
override def getNext(): UnsafeRow = {
var nextRow: UnsafeRow = null
while (!finished && nextRow == null) {
completion.poll(epochIntervalMs, TimeUnit.MILLISECONDS) match {
case null =>
// Try again if the poll didn't wait long enough to get a real result.
// But we should be getting at least an epoch marker every checkpoint interval.
val writerIdsUncommitted = writerEpochMarkersReceived.zipWithIndex.collect {
case (flag, idx) if !flag => idx
}
logWarning(
s"Completion service failed to make progress after $epochIntervalMs ms. Waiting " +
s"for writers $writerIdsUncommitted to send epoch markers.")
// The completion service guarantees this future will be available immediately.
case future => future.get() match {
case ReceiverRow(writerId, r) =>
// Start reading the next element in the queue we just took from.
completion.submit(completionTask(writerId))
nextRow = r
case ReceiverEpochMarker(writerId) =>
// Don't read any more from this queue. If all the writers have sent epoch markers,
// the epoch is over; otherwise we need to loop again to poll from the remaining
// writers.
writerEpochMarkersReceived(writerId) = true
if (writerEpochMarkersReceived.forall(_ == true)) {
finished = true
}
}
}
}
nextRow
}
override def close(): Unit = {
executor.shutdownNow()
}
}
}
}
| bravo-zhang/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/continuous/shuffle/RPCContinuousShuffleReader.scala | Scala | apache-2.0 | 6,102 |
package cs.ucla.edu.bwaspark.worker1
import cs.ucla.edu.bwaspark.datatype._
import scala.collection.mutable.MutableList
import java.util.TreeSet
import java.util.Comparator
import cs.ucla.edu.bwaspark.worker1.MemChain._
import cs.ucla.edu.bwaspark.worker1.MemChainFilter._
import cs.ucla.edu.bwaspark.worker1.MemChainToAlignBatched._
import cs.ucla.edu.bwaspark.worker1.MemSortAndDedup._
import cs.ucla.edu.avro.fastq._
import cs.ucla.edu.bwaspark.debug.DebugFlag._
// Profiling
import cs.ucla.edu.bwaspark.profiling.SWBatchTimeBreakdown
//this standalone object defines the main job of BWA MEM:
//1)for each read, generate all the possible seed chains
//2)using SW algorithm to extend each chain to all possible aligns
object BWAMemWorker1Batched {
//the function which do the main task
def bwaMemWorker1Batched(opt: MemOptType, //BWA MEM options
bwt: BWTType, //BWT and Suffix Array
bns: BNTSeqType, //.ann, .amb files
pac: Array[Byte], //.pac file uint8_t
pes: Array[MemPeStat], //pes array
seqArray: Array[FASTQRecord], //the batched reads
numOfReads: Int //the number of the batched reads
): SWBatchTimeBreakdown = {
//): Array[ReadType] = { //all possible alignments for all the reads
//for paired alignment, to add
//!!!to add!!!
//for now, we only focus on single sequence alignment
//if (!(opt.flag & MEM_F_PE)) {
if (true) {
// ***** PROFILING *****
var profileData = new SWBatchTimeBreakdown
val startTime = System.currentTimeMillis
//pre-process: transform A/C/G/T to 0,1,2,3
def locusEncode(locus: Char): Byte = {
//transforming from A/C/G/T to 0,1,2,3
locus match {
case 'A' => 0
case 'a' => 0
case 'C' => 1
case 'c' => 1
case 'G' => 2
case 'g' => 2
case 'T' => 3
case 't' => 3
case '-' => 5
case _ => 4
}
}
//println(seq)
//val read: Array[Byte] = seq.toCharArray.map(ele => locusEncode(ele))
//val readArray: Array[Array[Byte]] = seqArray.map(ele => ele.toCharArray.map(locus => locusEncode(locus)))
//val readArray = seqArray.map(ele => (new String(ele.getSeq.array)).toCharArray.map(locus => locusEncode(locus)))
val readArray = new Array[Array[Byte]](numOfReads)
for (i <- 0 until numOfReads) readArray(i) = (new String(seqArray(i).getSeq.array)).toCharArray.map(locus => locusEncode(locus))
//val lenArray = seqArray.map(ele => ele.getSeqLength.toInt)
val lenArray = new Array[Int](numOfReads)
for (i <- 0 until numOfReads) lenArray(i) = seqArray(i).getSeqLength.toInt
//for (i <- 0 until numOfReads) {
// readArray(i).foreach(ele => {
// if(ele.toInt == 0) print('A')
// else if (ele.toInt == 1) print('C')
// else if (ele.toInt == 2) print('G')
// else if (ele.toInt == 3) print('T')
// else print('N')
// })
// println()
// println(lenArray(i))
//}
//first step: generate all possible MEM chains for this read
//val chains = generateChains(opt, bwt, bns.l_pac, len, read)
//val chainsArray = new Array[Array[MemChainType]](numOfReads)
val chainsFilteredArray = new Array[Array[MemChainType]](numOfReads)
var i = 0;
while (i < numOfReads) {
chainsFilteredArray(i) = memChainFilter(opt, generateChains(opt, bwt, bns.l_pac, lenArray(i), readArray(i)))
i = i+1;
}
// ***** PROFILING *******
val generatedChainEndTime = System.currentTimeMillis
profileData.generatedChainTime = generatedChainEndTime - startTime
//second step: filter chains
//val chainsFiltered = memChainFilter(opt, chains)
//val chainsFilteredArray = chainsArray.map( ele => memChainFilter(opt, ele) )
val readRetArray = new Array[ReadType](numOfReads)
i = 0;
while (i < numOfReads) {
readRetArray(i) = new ReadType
readRetArray(i).seq = seqArray(i)
i = i+1
}
val preResultsOfSW = new Array[Array[SWPreResultType]](numOfReads)
val numOfSeedsArray = new Array[Int](numOfReads)
val regArrays = new Array[MemAlnRegArrayType](numOfReads)
i = 0;
while (i < numOfReads) {
if (chainsFilteredArray(i) == null) {
preResultsOfSW(i) = null
numOfSeedsArray(i) = 0
//regArrays(i) = new MemAlnRegArrayType
//regArrays(i).maxLength = 0
//regArrays(i).regs = null
regArrays(i) = null
}
else {
preResultsOfSW(i) = new Array[SWPreResultType](chainsFilteredArray(i).length)
var j = 0;
while (j < chainsFilteredArray(i).length) {
preResultsOfSW(i)(j)= calPreResultsOfSW(opt, bns.l_pac, pac, lenArray(i), readArray(i), chainsFilteredArray(i)(j))
j = j+1
}
numOfSeedsArray(i) = 0
chainsFilteredArray(i).foreach(chain => {
numOfSeedsArray(i) += chain.seeds.length
} )
if (debugLevel == 1) println("Finished the calculation of pre-results of Smith-Waterman")
if (debugLevel == 1) println("The number of reads in this pack is: " + numOfReads)
regArrays(i) = new MemAlnRegArrayType
regArrays(i).maxLength = numOfSeedsArray(i)
regArrays(i).regs = new Array[MemAlnRegType](numOfSeedsArray(i))
}
i = i+1;
}
if (debugLevel == 1) println("Finished the pre-processing part")
// ***** PROFILING *******
val filterChainEndTime = System.currentTimeMillis
profileData.filterChainTime = filterChainEndTime - generatedChainEndTime
//memChainToAlnBatched(opt, bns.l_pac, pac, lenArray, readArray, numOfReads, preResultsOfSW, chainsFilteredArray, regArrays)
memChainToAlnBatched(opt, bns.l_pac, pac, lenArray, readArray, numOfReads, preResultsOfSW, chainsFilteredArray, regArrays, profileData)
// ***** PROFILING *******
val chainToAlnEndTime = System.currentTimeMillis
profileData.chainToAlnTime = chainToAlnEndTime - filterChainEndTime
if (debugLevel == 1) println("Finished the batched-processing part")
regArrays.foreach(ele => {if (ele != null) ele.regs = ele.regs.filter(r => (r != null))})
regArrays.foreach(ele => {if (ele != null) ele.maxLength = ele.regs.length})
i = 0;
while (i < numOfReads) {
if (regArrays(i) == null) readRetArray(i).regs = null
else readRetArray(i).regs = memSortAndDedup(regArrays(i), opt.maskLevelRedun).regs
i = i+1
}
//readRetArray
// ***** PROFILING *******
val sortAndDedupEndTime = System.currentTimeMillis
profileData.sortAndDedupTime = sortAndDedupEndTime - chainToAlnEndTime
profileData
}
else {
assert (false)
null
}
}
}
| peterpengwei/bwa-spark-fpga | src/main/scala/cs/ucla/edu/bwaspark/worker1/BWAMemWorker1Batched.scala | Scala | gpl-2.0 | 6,948 |
package io.getquill
import io.getquill.context.cassandra.CassandraContext
import io.getquill.context.cassandra.CqlIdiom
class CassandraMirrorContextWithQueryProbing extends CassandraMirrorContext with QueryProbing
class CassandraMirrorContext[Naming <: NamingStrategy]
extends MirrorContext[CqlIdiom, Naming] with CassandraContext[Naming] | jcranky/quill | quill-cassandra/src/main/scala/io/getquill/CassandraMirrorContext.scala | Scala | apache-2.0 | 343 |
package com.twitter.bijection.twitter_util
import scala.concurrent.ExecutionContext
import com.twitter.util.{FuturePool, Future, Try, Return, Throw, Promise}
/**
* FuturePool adapter for ExecutionContext
*
* @author
* Moses Nakamura
*/
class ScalaFuturePool(context: ExecutionContext) extends FuturePool {
override def apply[A](f: => A): Future[A] = {
val p = Promise[A]()
val runnable = new Runnable() {
override def run(): Unit =
Try(f) match {
case Return(value) => p.setValue(value)
case Throw(e) => {
context.reportFailure(e)
p.setException(e)
}
}
}
context.execute(runnable)
p
}
}
| twitter/bijection | bijection-util/src/main/scala/com/twitter/bijection/twitter_util/ScalaFuturePool.scala | Scala | apache-2.0 | 701 |
package uk.co.morleydev.zander.client.test.unit.data.map
import java.io.File
import uk.co.morleydev.zander.client.data.map.GetCachedSourceLocation
import uk.co.morleydev.zander.client.test.gen.GenModel
import uk.co.morleydev.zander.client.test.unit.UnitTest
class GetCacheSourceLocationTests extends UnitTest {
describe("Given a cache root") {
val cachePathFile = new File("some/cache/path")
val getCacheLocationFromCache = new GetCachedSourceLocation(cachePathFile)
describe("When getting the cache location for source") {
val project = GenModel.arg.genProject()
val actual = getCacheLocationFromCache(project)
it("Then the expected cache location is returned") {
assert(actual == new File(cachePathFile, "%s/src".format(project)))
}
}
}
}
| MorleyDev/zander.client | src/test/scala/uk/co/morleydev/zander/client/test/unit/data/map/GetCacheSourceLocationTests.scala | Scala | mit | 798 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.internal.config
import java.util.concurrent.TimeUnit
import org.apache.spark.network.util.ByteUnit
private[spark] object UI {
val UI_SHOW_CONSOLE_PROGRESS = ConfigBuilder("spark.ui.showConsoleProgress")
.doc("When true, show the progress bar in the console.")
.version("1.2.1")
.booleanConf
.createWithDefault(false)
val UI_CONSOLE_PROGRESS_UPDATE_INTERVAL =
ConfigBuilder("spark.ui.consoleProgress.update.interval")
.version("2.1.0")
.timeConf(TimeUnit.MILLISECONDS)
.createWithDefault(200)
val UI_ENABLED = ConfigBuilder("spark.ui.enabled")
.doc("Whether to run the web UI for the Spark application.")
.version("1.1.1")
.booleanConf
.createWithDefault(true)
val UI_PORT = ConfigBuilder("spark.ui.port")
.doc("Port for your application's dashboard, which shows memory and workload data.")
.version("0.7.0")
.intConf
.createWithDefault(4040)
val UI_FILTERS = ConfigBuilder("spark.ui.filters")
.doc("Comma separated list of filter class names to apply to the Spark Web UI.")
.version("1.0.0")
.stringConf
.toSequence
.createWithDefault(Nil)
val UI_ALLOW_FRAMING_FROM = ConfigBuilder("spark.ui.allowFramingFrom")
.version("1.6.0")
.stringConf
.createOptional
val UI_REVERSE_PROXY = ConfigBuilder("spark.ui.reverseProxy")
.doc("Enable running Spark Master as reverse proxy for worker and application UIs. " +
"In this mode, Spark master will reverse proxy the worker and application UIs to enable " +
"access without requiring direct access to their hosts. Use it with caution, as worker " +
"and application UI will not be accessible directly, you will only be able to access them" +
"through spark master/proxy public URL. This setting affects all the workers and " +
"application UIs running in the cluster and must be set on all the workers, drivers " +
" and masters.")
.version("2.1.0")
.booleanConf
.createWithDefault(false)
val UI_REVERSE_PROXY_URL = ConfigBuilder("spark.ui.reverseProxyUrl")
.doc("This is the URL where your proxy is running. This URL is for proxy which is running " +
"in front of Spark Master. This is useful when running proxy for authentication e.g. " +
"OAuth proxy. Make sure this is a complete URL including scheme (http/https) and port to " +
"reach your proxy.")
.version("2.1.0")
.stringConf
.createOptional
val UI_KILL_ENABLED = ConfigBuilder("spark.ui.killEnabled")
.doc("Allows jobs and stages to be killed from the web UI.")
.version("1.0.0")
.booleanConf
.createWithDefault(true)
val UI_THREAD_DUMPS_ENABLED = ConfigBuilder("spark.ui.threadDumpsEnabled")
.version("1.2.0")
.booleanConf
.createWithDefault(true)
val UI_PROMETHEUS_ENABLED = ConfigBuilder("spark.ui.prometheus.enabled")
.internal()
.doc("Expose executor metrics at /metrics/executors/prometheus. " +
"For master/worker/driver metrics, you need to configure `conf/metrics.properties`.")
.version("3.0.0")
.booleanConf
.createWithDefault(false)
val UI_X_XSS_PROTECTION = ConfigBuilder("spark.ui.xXssProtection")
.doc("Value for HTTP X-XSS-Protection response header")
.version("2.3.0")
.stringConf
.createWithDefaultString("1; mode=block")
val UI_X_CONTENT_TYPE_OPTIONS = ConfigBuilder("spark.ui.xContentTypeOptions.enabled")
.doc("Set to 'true' for setting X-Content-Type-Options HTTP response header to 'nosniff'")
.version("2.3.0")
.booleanConf
.createWithDefault(true)
val UI_STRICT_TRANSPORT_SECURITY = ConfigBuilder("spark.ui.strictTransportSecurity")
.doc("Value for HTTP Strict Transport Security Response Header")
.version("2.3.0")
.stringConf
.createOptional
val UI_REQUEST_HEADER_SIZE = ConfigBuilder("spark.ui.requestHeaderSize")
.doc("Value for HTTP request header size in bytes.")
.version("2.2.3")
.bytesConf(ByteUnit.BYTE)
.createWithDefaultString("8k")
val UI_TIMELINE_TASKS_MAXIMUM = ConfigBuilder("spark.ui.timeline.tasks.maximum")
.version("1.4.0")
.intConf
.createWithDefault(1000)
val ACLS_ENABLE = ConfigBuilder("spark.acls.enable")
.version("1.1.0")
.booleanConf
.createWithDefault(false)
val UI_VIEW_ACLS = ConfigBuilder("spark.ui.view.acls")
.version("1.0.0")
.stringConf
.toSequence
.createWithDefault(Nil)
val UI_VIEW_ACLS_GROUPS = ConfigBuilder("spark.ui.view.acls.groups")
.version("2.0.0")
.stringConf
.toSequence
.createWithDefault(Nil)
val ADMIN_ACLS = ConfigBuilder("spark.admin.acls")
.version("1.1.0")
.stringConf
.toSequence
.createWithDefault(Nil)
val ADMIN_ACLS_GROUPS = ConfigBuilder("spark.admin.acls.groups")
.version("2.0.0")
.stringConf
.toSequence
.createWithDefault(Nil)
val MODIFY_ACLS = ConfigBuilder("spark.modify.acls")
.version("1.1.0")
.stringConf
.toSequence
.createWithDefault(Nil)
val MODIFY_ACLS_GROUPS = ConfigBuilder("spark.modify.acls.groups")
.version("2.0.0")
.stringConf
.toSequence
.createWithDefault(Nil)
val USER_GROUPS_MAPPING = ConfigBuilder("spark.user.groups.mapping")
.version("2.0.0")
.stringConf
.createWithDefault("org.apache.spark.security.ShellBasedGroupsMappingProvider")
val PROXY_REDIRECT_URI = ConfigBuilder("spark.ui.proxyRedirectUri")
.doc("Proxy address to use when responding with HTTP redirects.")
.version("3.0.0")
.stringConf
.createOptional
val CUSTOM_EXECUTOR_LOG_URL = ConfigBuilder("spark.ui.custom.executor.log.url")
.doc("Specifies custom spark executor log url for supporting external log service instead of " +
"using cluster managers' application log urls in the Spark UI. Spark will support " +
"some path variables via patterns which can vary on cluster manager. Please check the " +
"documentation for your cluster manager to see which patterns are supported, if any. " +
"This configuration replaces original log urls in event log, which will be also effective " +
"when accessing the application on history server. The new log urls must be permanent, " +
"otherwise you might have dead link for executor log urls.")
.version("3.0.0")
.stringConf
.createOptional
}
| spark-test/spark | core/src/main/scala/org/apache/spark/internal/config/UI.scala | Scala | apache-2.0 | 7,178 |
package org.opensplice.mobile.dev.leader.event
import org.opensplice.mobile.dev.common.DAbstractionEvent
abstract class LeaderElectionEvent(val groupId: String, val epoch: Int) extends DAbstractionEvent {
} | levitha/levitha | src/main/scala/org/opensplice/mobile/dev/leader/event/LeaderElectionEvent.scala | Scala | apache-2.0 | 209 |
package com.arcusys.valamis.slide.service.export
import java.io.{ByteArrayInputStream, File, FileInputStream, InputStream}
import javax.servlet.ServletContext
import com.arcusys.learn.liferay.util.SearchEngineUtilHelper.{SearchContentFileCharset, SearchContentFileName}
import com.arcusys.valamis.content.model.QuestionType.QuestionType
import com.arcusys.valamis.content.model._
import com.arcusys.valamis.content.service.{PlainTextService, QuestionService}
import com.arcusys.valamis.file.service.FileService
import com.arcusys.valamis.lesson.generator.tincan.file.TinCanRevealJSPackageGeneratorContract
import com.arcusys.valamis.lesson.generator.tincan.file.html.TinCanQuestionViewGenerator
import com.arcusys.valamis.lrs.serializer.DateTimeSerializer
import com.arcusys.valamis.slide.model.{SlideElementModel, SlideEntityType, SlideModel}
import com.arcusys.valamis.slide.service.{SlideServiceContract, SlideSetServiceContract}
import com.arcusys.valamis.uri.model.{TincanURI, TincanURIType}
import com.arcusys.valamis.uri.service.TincanURIService
import com.arcusys.valamis.utils.ResourceReader
import com.arcusys.valamis.util.mustache.Mustache
import com.arcusys.valamis.util.serialization.JsonHelper._
import com.escalatesoft.subcut.inject.{BindingModule, Injectable}
import com.liferay.portal.kernel.util.HtmlUtil
import org.json4s.{DefaultFormats, Formats}
import scala.collection.mutable.ListBuffer
trait SlideSetPublisherContract {
def composeTinCanPackage(servletContext: ServletContext, slideSetId: Long, title: String, description: String): File
}
abstract class SlideSetPublisher(implicit val bindingModule: BindingModule)
extends Injectable
with SlideSetExportUtils
with SlideSetPublisherContract {
private val tinCanRevealJSPackageGenerator = inject[TinCanRevealJSPackageGeneratorContract]
private lazy val slideService = inject[SlideServiceContract]
private lazy val slideSetService = inject[SlideSetServiceContract]
protected lazy val questionService = inject[QuestionService]
protected lazy val plainTextService = inject[PlainTextService]
protected lazy val fileService = inject[FileService]
private val tincanQuestionViewGenerator = new TinCanQuestionViewGenerator
private lazy val uriService = inject[TincanURIService]
def resourceReader: ResourceReader
implicit val jf: Formats = DefaultFormats + new SlidePropertiesSerializer + new SlideElementsPropertiesSerializer + DateTimeSerializer
private val lessonGeneratorClassLoader = classOf[TinCanQuestionViewGenerator].getClassLoader
private def getResourceInputStream(name: String) = lessonGeneratorClassLoader.getResourceAsStream(name)
private def flat[T](ls: List[T]): List[T] =
ls.flatten {
case ls: List[T] => flat(ls)
case value => List(value)
}
private lazy val indexTemplate = new Mustache(scala.io.Source.fromInputStream(getResourceInputStream("tincan/revealjs.html")).mkString)
def composeContentForSearchIndex(questions: List[(Question, Seq[Answer])], plaintexts: List[PlainText], slideElements: Seq[SlideElementModel]): String = {
val contentBuilder = new StringBuilder()
questions.foreach { case (q, _) =>
contentBuilder.append(q.text).append(" ")
}
plaintexts.foreach { pt =>
contentBuilder.append(pt.text).append(" ")
}
slideElements.foreach { el =>
contentBuilder.append(el.content).append(" ")
}
HtmlUtil.extractText(contentBuilder.toString())
}
override def composeTinCanPackage(servletContext: ServletContext, slideSetId: Long, title: String, description: String): File = {
val lessonSummaryRegexStr = """.*<span.+id="lesson-summary-table".*>.*</span>.*"""
val scriptRegex = "(?s)(<script>.*?</script>)".r
val sectionRegex = "(?s)<section>(.*?)</section>".r
val lessonSummaryTemplate = scala.io.Source.fromInputStream(getResourceInputStream("tincan/summary.html")).mkString
val slideElementsToIndex = new ListBuffer[SlideElementModel]()
val slides = slideService.getBySlideSetId(slideSetId, Some(false)).map { slide =>
val statementVerbWithName = slide.statementVerb
.flatMap(x =>
if (x.startsWith("http://adlnet.gov/expapi/verbs/"))
Some(TincanURI(x, x, TincanURIType.Verb, x.reverse.takeWhile(_ != '/').reverse))
else
uriService.getById(x, TincanURIType.Verb))
.map(x => x.uri + "/" + x.content)
val statementCategoryWithName = slide.statementCategoryId
.flatMap(uriService.getById(_, TincanURIType.Category))
.map(x => x.uri + "/" + x.content)
val lessonSummaryHTML = sectionRegex
.findFirstMatchIn(lessonSummaryTemplate)
.map(_.group(1))
.getOrElse("")
val slideElements = slide.slideElements
.map { slideElement =>
slideElement.slideEntityType match {
case SlideEntityType.Text if slideElement.content.matches(lessonSummaryRegexStr) =>
SlideElementModel(
slideElement.id,
slideElement.zIndex,
slideElement.content.replaceFirst(lessonSummaryRegexStr, lessonSummaryHTML),
slideElement.slideEntityType,
slideElement.slideId,
slideElement.correctLinkedSlideId,
slideElement.incorrectLinkedSlideId,
slideElement.notifyCorrectAnswer,
slideElement.properties
)
case SlideEntityType.Text =>
slideElementsToIndex += slideElement
slideElement
case _ => slideElement
}
}
SlideModel(slide.id,
slide.title,
slide.bgColor,
slide.bgImage,
slide.font,
slide.questionFont,
slide.answerFont,
slide.answerBg,
slide.duration,
slide.leftSlideId,
slide.topSlideId,
slideElements,
slide.slideSetId,
statementVerbWithName,
slide.statementObject,
statementCategoryWithName,
slide.isTemplate,
slide.isLessonSummary,
slide.playerTitle,
slide.properties)
}
val lessonSummarySlidesCount = slides.filter(_.isLessonSummary)
val slideTypes = slides.map(_.slideElements).flatMap(x => x.map(_.slideEntityType)).distinct
val additionalJSFileNames = slideTypes.collect {
case SlideEntityType.Video => PublisherFileLists.videoVendorJSFileNames
case SlideEntityType.Math => PublisherFileLists.mathVendorJSFileNames
case SlideEntityType.Webgl => PublisherFileLists.webglVendorJSFileNames
} toList
val URI = {
val uriContent = Option(Map("title" -> title, "description" -> description).toJson)
uriService.createRandom(TincanURIType.Course, uriContent)
}
val slideSet = slideSetService.getById(slideSetId)
val isSelectedContinuity =
!slides.flatMap(_.slideElements)
.map(_.slideEntityType)
.contains("randomquestion") &&
slideSet
.exists(_.isSelectedContinuity)
val (questionsMap, questions, plaintexts) = getQuestionsInfo(slides)
val contentToIndex = composeContentForSearchIndex(questions, plaintexts, slideElementsToIndex.toList)
val indexPageModel = Map(
"title" -> title,
"slidesJson" -> slides.toJson,
"isSlideJsonAvailable" -> true,
"includeVendorFiles" -> flat(additionalJSFileNames :: PublisherFileLists.vendorJSFileNames).map(fileName => "js/" + fileName),
"includeCommonFiles" -> PublisherFileLists.commonJSFileNames.map(fileName => "js/" + fileName),
"includeFiles" -> PublisherFileLists.slideSetJSFileNames.map(fileName => "js/" + fileName),
"includeCSS" -> PublisherFileLists.slideSetCSSFileNames.map(fileName => "css/" + fileName),
"includeFonts" -> PublisherFileLists.fontsFileNames.map(fileName => "fonts/" + fileName),
"rootActivityId" -> slideSet.get.activityId,
"scoreLimit" -> slideSet.get.scoreLimit.getOrElse(0.7),
"canPause" -> isSelectedContinuity,
"duration" -> slideSet.get.duration.getOrElse(0L),
"playerTitle" -> slideSet.get.playerTitle,
"version" -> slideSet.get.version,
"oneAnswerAttempt" -> slideSet.get.oneAnswerAttempt,
"modifiedDate" -> slideSet.get.modifiedDate
) ++ questionsMap
val index = new ByteArrayInputStream(indexTemplate.render(
if (lessonSummarySlidesCount.nonEmpty)
indexPageModel ++ Map(
"lessonSummaryScript" -> scriptRegex
.findFirstMatchIn(lessonSummaryTemplate)
.map(_.group(1))
.getOrElse(""))
else indexPageModel
).getBytes)
val filesToAdd: List[(String, InputStream)] =
(SearchContentFileName -> new ByteArrayInputStream(contentToIndex.getBytes(SearchContentFileCharset))) ::
("index.html" -> index) ::
getRequiredFiles(slides) :::
flat(additionalJSFileNames ::: PublisherFileLists.vendorJSFileNames).map(fileName => "js/" + fileName -> resourceReader.getResourceAsStream(servletContext, "js2.0/vendor/" + fileName)) :::
PublisherFileLists.commonJSFileNames.map(fileName => "js/" + fileName -> getResourceInputStream("common/" + fileName)) :::
PublisherFileLists.previewResourceFiles.map(fileName => "pdf/" + fileName -> resourceReader.getResourceAsStream(servletContext, "preview-resources/pdf/" + fileName)) :::
PublisherFileLists.fontsFileNames.map(fileName => "fonts/" + fileName -> resourceReader.getResourceAsStream(servletContext, "fonts/" + fileName)) :::
PublisherFileLists.slideSetJSFileNames.map(fileName => "js/" + fileName -> resourceReader.getResourceAsStream(servletContext, "js2.0/" + fileName)) :::
PublisherFileLists.slideSetCSSFileNames.map(fileName => "css/" + fileName -> resourceReader.getResourceAsStream(servletContext, "css2.0/" + fileName))
tinCanRevealJSPackageGenerator.composePackage(omitFileDuplicates(filesToAdd), slideSet.get.activityId, title, description)
}
private def getQuestionsInfo(slides: List[SlideModel]): (Map[String, Any], List[(Question, Seq[Answer])], List[PlainText]) = {
val questionsList = new ListBuffer[Map[String, Any]]()
val plaintextsList = new ListBuffer[Map[String, Any]]()
val randomQuestionsList = new ListBuffer[Map[String, Any]]()
val randomPlainTextList = new ListBuffer[Map[String, Any]]()
val questions = getRequiredQuestions(slides)
val plaintexts = getRequiredPlainTexts(slides)
val randomQuestion = getRandomQuestions(slides)
val randomPlainText = getRandomPlainText(slides)
val slidesQuestions = slides.flatMap { slide =>
slide.slideElements.filter { e => e.slideEntityType == "question" || e.slideEntityType == "plaintext" }
}
val slidesRandomQuestions = slides.flatMap { slide =>
slide.slideElements.filter(e => e.slideEntityType == "randomquestion" && e.content.nonEmpty)
}
val questionScripts = new ListBuffer[Option[String]]()
val questionMarkupTemplates = new ListBuffer[Option[String]]()
slidesQuestions.filter(_.content.nonEmpty).foreach { slideQuestion =>
if (slideQuestion.slideEntityType == "plaintext") {
plaintexts.find(_.id.contains(slideQuestion.content.toLong)).foreach { plainText =>
val questionHTML = getPlainTextHTML(plainText, slideQuestion, plaintextsList)
questionScripts += getQuestionScript(questionHTML)
val questionMarkup = getQuestionSection(questionHTML).getOrElse("")
questionMarkupTemplates +=
Some("<script type='text/html' id='" +
"PlainTextTemplate" + plainText.id.get + "_" + slideQuestion.id.get + "'>" +
questionMarkup + "</script>")
}
} else {
questions.find(_._1.id.contains(slideQuestion.content.toLong)).foreach { item =>
val (question, answers) = item
val questionHTML = getQuestionHTML(question, answers, slideQuestion, questionsList)
questionScripts += getQuestionScript(questionHTML)
val questionMarkup = getQuestionSection(questionHTML).getOrElse("")
questionMarkupTemplates +=
Some("<script type='text/html' id='" +
getQuestionTypeString(question.questionType) + "Template" + question.id.get + "_" + slideQuestion.id.get + "'>" +
questionMarkup + "</script>")
}
}
}
slidesRandomQuestions.foreach { slide =>
randomQuestion.foreach { item =>
val (question, answers) = item
val questionHTML = getQuestionHTML(question, answers, slide, randomQuestionsList)
questionScripts += getQuestionScript(questionHTML)
val questionMarkup = getQuestionSection(questionHTML).getOrElse("")
questionMarkupTemplates +=
Some("<script type='text/html' id='" +
getQuestionTypeString(question.questionType) + "TemplateRandom" + question.id.get + "_" + slide.id.get + "'>" +
questionMarkup + "</script>")
}
randomPlainText.foreach { item =>
val questionHTML = getPlainTextHTML(item, slide, randomPlainTextList)
questionScripts += getQuestionScript(questionHTML)
val questionMarkup = getQuestionSection(questionHTML).getOrElse("")
questionMarkupTemplates +=
Some("<script type='text/html' id='" +
"PlainTextTemplateRandom" + item.id.get + "_" + slide.id.get + "'>" +
questionMarkup + "</script>")
}
}
(Map(
"questionsJson" -> questionsList.toList.toJson,
"plaintextsJson" -> plaintextsList.toList.toJson,
"randomQuestionJson" -> randomQuestionsList.toList.toJson,
"randomPlaintextJson" -> randomPlainTextList.toList.toJson,
"questionScripts" -> questionScripts.toList,
"questionMarkupTemplates" -> questionMarkupTemplates.toList
), questions, plaintexts)
}
private def getQuestionScript(questionHTML: String): Option[String] = {
val scriptRegex = "(?s)(<script.*?>.*?</script>)".r
scriptRegex.findFirstMatchIn(questionHTML).map(_.group(1))
}
private def getQuestionSection(questionHTML: String): Option[String] = {
val sectionRegex = "(?s)<section.*?>(.*?)</section>".r
sectionRegex.findFirstMatchIn(questionHTML).map(_.group(1))
}
private def getQuestionHTML(question: Question,
answers: Seq[Answer],
slide: SlideElementModel,
questionsList: ListBuffer[Map[String, Any]]): String = {
val autoShowAnswer = slide.notifyCorrectAnswer.getOrElse(false)
questionsList +=
tincanQuestionViewGenerator.getViewModelFromQuestion(
question,
answers,
autoShowAnswer,
slide.id.get
) + ("questionType" -> question.questionType.id)
tincanQuestionViewGenerator.getHTMLByQuestionId(
question,
answers,
autoShowAnswer,
slide.id.get)
}
private def getPlainTextHTML(plainText: PlainText,
slide: SlideElementModel,
plainTextList: ListBuffer[Map[String, Any]]): String = {
val model = tincanQuestionViewGenerator.getViewModelFromPlainText(
plainText,
slide.id.get
) + ("questionType" -> 8)
plainTextList += model
tincanQuestionViewGenerator.getHTMLForPlainText(model)
}
//TODO: remove comments with template files
private def getQuestionTypeString(questionType: QuestionType) =
questionType match {
case QuestionType.Choice => "ChoiceQuestion"
case QuestionType.Text => "ShortAnswerQuestion"
case QuestionType.Numeric => "NumericQuestion"
case QuestionType.Positioning => "PositioningQuestion"
case QuestionType.Matching => "MatchingQuestion"
case QuestionType.Essay => "EssayQuestion"
//case 6 => "EmbeddedAnswerQuestion"
case QuestionType.Categorization => "CategorizationQuestion"
//case 8 => "PlainText"
//case 9 => "PurePlainText"
case _ => ""
}
private def getRandomQuestions(slides: List[SlideModel]): List[(Question, Seq[Answer])] = {
getRandomQuestionIds(slides)
.filter(_ startsWith "q_")
.map(x => questionService.getWithAnswers(getRandomQuestionId(x)))
}
private def getRandomPlainText(slides: List[SlideModel]): List[PlainText] = {
getRandomQuestionIds(slides)
.filter(_ startsWith "t_")
.map(x => plainTextService.getById(getRandomQuestionId(x)))
}
private def getRandomQuestionIds(slides: List[SlideModel]): List[String] = {
slides.flatMap(slide =>
slide.slideElements
.filter(_.slideEntityType == com.arcusys.valamis.slide.model.SlideEntityType.RandomQuestion)
.filter(_.content != "")
.flatMap(x => x.content.split(",").map(_.trim))
).distinct
}
private def getRandomQuestionId(id: String): Long = {
val index = id.indexOf("_") + 1
id.substring(index).toLong
}
private def filesFromDirectory(dirPaths: List[String], dirName: Option[String] = None, isRecursive: Boolean = false): List[(String, FileInputStream)] = {
var fileList: List[(String, FileInputStream)] = Nil
dirPaths.foreach { dirPath =>
val fileName = new File(dirPath).getName
fileList = listFilesForFolder(dirName.getOrElse(fileName), new File(dirPath), isRecursive) ++ fileList
}
fileList
}
private def listFilesForFolder(prefix: String, folder: File, isRecursive: Boolean): List[(String, FileInputStream)] = {
var fileList: List[(String, FileInputStream)] = Nil
folder.listFiles.foreach { fileEntry =>
if (isRecursive) {
if (fileEntry.isDirectory)
fileList = listFilesForFolder(prefix + "/" + fileEntry.getName, fileEntry, isRecursive) ++ fileList
else fileList = ((prefix + "/" + fileEntry.getName) -> new FileInputStream(fileEntry)) :: fileList
} else if (!fileEntry.isDirectory) fileList = ((prefix + fileEntry.getName) -> new FileInputStream(fileEntry)) :: fileList
}
fileList
}
} | igor-borisov/JSCORM | valamis-slide/src/main/scala/com/arcusys/valamis/slide/service/export/SlideSetPublisher.scala | Scala | gpl-3.0 | 18,029 |
package com.seanshubin.utility.filesystem
import org.scalatest.FunSuite
import scala.collection.mutable.ArrayBuffer
class BranchesTest extends FunSuite {
import FileSystemIntegrationFake._
test("create with one named branch") {
val actual = Tree.createBranches("foo")
val expected = Branches(List(Tree("foo", Branches.Empty)))
assert(expected === actual)
}
test("create with many named branches named branch") {
val actual = Tree.createBranches("foo", "bar", "baz")
val expected = Branches(List(Tree("foo", Branches.Empty), Tree("bar", Branches.Empty), Tree("baz", Branches.Empty)))
assert(expected === actual)
}
test("add nothing") {
val expected = List()
val actual = Branches.Empty.add().trees
assert(actual === expected)
}
test("simple add") {
val expected = List(Tree("aaa", Branches.Empty))
val actual = Branches.Empty.add("aaa").trees
assert(actual === expected)
}
test("add deep") {
val expected = List(Tree("aaa", Branches(List(Tree("bbb", Branches.Empty)))))
val actual = Branches.Empty.add("aaa", "bbb").trees
assert(actual === expected)
}
test("add deep in 2 steps") {
val expected = List(Tree("aaa", Branches(List(Tree("bbb", Branches.Empty)))))
val actual = Branches.Empty.add("aaa").add("aaa", "bbb").trees
assert(actual === expected)
}
test("add shallow") {
val expected = List(Tree("aaa", Branches.Empty), Tree("bbb", Branches.Empty))
val actual = Branches.Empty.add("aaa").add("bbb").trees
assert(actual === expected)
}
test("branch out") {
val expected = List(Tree("aaa", Branches(List(Tree("bbb", Branches.Empty), Tree("ccc", Branches.Empty)))))
val actual = Branches.Empty.add("aaa", "bbb").add("aaa", "ccc").trees
assert(actual === expected)
}
test("branch out three ways") {
val expected = List(Tree("aaa", Branches(List(Tree("bbb", Branches.Empty), Tree("ccc", Branches.Empty), Tree("ddd", Branches.Empty)))))
val actual = Branches.Empty.add("aaa", "bbb").add("aaa", "ccc").add("aaa", "ddd").trees
assert(actual === expected)
}
test("add two levels deep after something exists") {
val aTree = Tree("aaa", Branches.Empty)
val iTree = Tree("iii", Tree.createBranches("jjj"))
val expected = List(aTree, iTree)
val actual = Branches.Empty.
add("aaa").
add("iii", "jjj").trees
assert(actual === expected)
}
test("one level deep") {
val aTree = Tree("aaa", Tree.createBranches("bbb", "ccc", "ddd"))
val eTree = Tree("eee", Tree.createBranches("fff", "ggg", "hhh"))
val iTree = Tree("iii", Tree.createBranches("jjj", "kkk", "lll"))
val expected = List(aTree, eTree, iTree)
val actual = Branches.Empty.
add("aaa", "bbb").
add("aaa", "ccc").
add("aaa", "ddd").
add("eee", "fff").
add("eee", "ggg").
add("eee", "hhh").
add("iii", "jjj").
add("iii", "kkk").
add("iii", "lll").trees
assert(actual === expected)
}
test("traverse") {
val aTree = Tree("aaa", Tree.createBranches("bbb", "ccc"))
val eTree = Tree("eee", Tree.createBranches("fff", "ggg"))
val branches = Branches(List(aTree, eTree))
val expected = Seq(
"before aaa",
"before aaa bbb",
"after aaa bbb",
"before aaa ccc",
"after aaa ccc",
"after aaa",
"before eee",
"before eee fff",
"after eee fff",
"before eee ggg",
"after eee ggg",
"after eee"
)
val actual: ArrayBuffer[String] = new ArrayBuffer()
branches.traverse(new TreeVisitor {
private var pathParts: List[String] = Nil
private def path: String = pathParts.reverse.mkString(" ")
override def before(name: String): Unit = {
pathParts = name :: pathParts
actual.append(s"before $path")
}
override def after(name: String): Unit = {
actual.append(s"after $path")
pathParts = pathParts.tail
}
})
assert(actual === expected)
}
test("remove") {
val aTree = Tree("aaa", Tree.createBranches("bbb", "ccc", "ddd"))
val eTreeBefore = Tree("eee", Tree.createBranches("fff", "ggg", "hhh"))
val eTreeAfter = Tree("eee", Tree.createBranches("fff", "hhh"))
val iTree = Tree("iii", Tree.createBranches("jjj", "kkk", "lll"))
val branches: Branches = Branches(List(aTree, eTreeBefore, iTree))
val actual = branches.remove(Seq("eee", "ggg"))
val expected = Branches(List(aTree, eTreeAfter, iTree))
assert(actual === expected)
}
}
| SeanShubin/utility | file-system/src/test/scala/com/seanshubin/utility/filesystem/BranchesTest.scala | Scala | unlicense | 4,540 |
/**
* Copyright (c) 2002-2012 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.mutation
import org.neo4j.cypher.internal.symbols.{CypherType, SymbolTable}
import org.neo4j.cypher.internal.pipes.{QueryState, ExecutionContext}
import org.neo4j.helpers.ThisShouldNotHappenError
import org.neo4j.cypher.internal.commands.{Mutator, StartItem}
import org.neo4j.cypher.UniquePathNotUniqueException
import org.neo4j.graphdb.{Lock, PropertyContainer}
import org.neo4j.cypher.internal.commands.expressions.Expression
case class CreateUniqueAction(incomingLinks: UniqueLink*) extends StartItem("noooes") with Mutator with UpdateAction {
def exec(context: ExecutionContext, state: QueryState): Traversable[ExecutionContext] = {
var linksToDo: Seq[UniqueLink] = links
var ctx = context
while (linksToDo.nonEmpty) {
val results: Seq[(UniqueLink, CreateUniqueResult)] = executeAllRemainingPatterns(linksToDo, ctx, state)
linksToDo = results.map(_._1)
val updateCommands = extractUpdateCommands(results)
val traversals = extractTraversals(results)
if (results.isEmpty) {
Stream(ctx) //We're done
} else if (canNotAdvanced(results)) {
throw new Exception("Unbound pattern!") //None of the patterns can advance. Fail.
} else if (traversals.nonEmpty) {
ctx = traverseNextStep(traversals, ctx) //We've found some way to move forward. Let's use it
} else if (updateCommands.nonEmpty) {
val locks = updateCommands.flatMap(_.lock()) //Failed to find a way forward - lock stuff up, and check again
try {
ctx = tryAgain(linksToDo, ctx, state)
} finally {
locks.foreach(_.release())
}
} else {
throw new ThisShouldNotHappenError("Andres", "There was something in that result list I don't know how to handle.")
}
}
Stream(ctx)
}
/**
* Here we take the incoming links and prepare them to be used, by making sure that
* no named expectations contradict each other
*/
val links:Seq[UniqueLink] = {
val nodesWithProperties: Seq[NamedExpectation] = incomingLinks.flatMap(_.nodesWProps)
nodesWithProperties.foldLeft(incomingLinks) {
case (bunchOfLinks, nodeExpectation) => bunchOfLinks.map(link => link.expect(nodeExpectation))
}
}
private def tryAgain(linksToDo: Seq[UniqueLink], context: ExecutionContext, state: QueryState): ExecutionContext = {
val results: Seq[(UniqueLink, CreateUniqueResult)] = executeAllRemainingPatterns(linksToDo, context, state)
val updateCommands = extractUpdateCommands(results)
val traversals = extractTraversals(results)
if (results.isEmpty) {
throw new ThisShouldNotHappenError("Andres", "Second check should never return empty result set")
} else if (canNotAdvanced(results)) {
throw new ThisShouldNotHappenError("Andres", "Second check should never fail to move forward")
} else if (traversals.nonEmpty) {
traverseNextStep(traversals, context) //Ah, so this time we did find a traversal way forward. Great!
} else if (updateCommands.nonEmpty) {
runUpdateCommands(updateCommands.flatMap(_.cmds), context, state) //If we still can't find a way forward,
} else { // let's build one
throw new ThisShouldNotHappenError("Andres", "There was something in that result list I don't know how to handle.")
}
}
case class TraverseResult(identifier: String, element: PropertyContainer, link: UniqueLink)
private def traverseNextStep(nextSteps: Seq[TraverseResult], oldContext: ExecutionContext): ExecutionContext = {
val uniqueKVPs = nextSteps.map(x => x.identifier -> x.element).distinct
val uniqueKeys = uniqueKVPs.toMap
if (uniqueKeys.size != uniqueKVPs.size) {
fail(nextSteps)
} else {
oldContext.newWith(uniqueKeys)
}
}
private def fail(nextSteps: Seq[TraverseResult]): Nothing = {
//We can only go forward following a unique path. Fail.
val problemResultsByIdentifier: Map[String, Seq[TraverseResult]] = nextSteps.groupBy(_.identifier).
filter(_._2.size > 1)
val message = problemResultsByIdentifier.map {
case (identifier, links: Seq[TraverseResult]) =>
val hits = links.map(result => "%s found by : %s".format(result.element, result.link))
"Nodes for identifier: `%s` were found with differing values by these pattern relationships: %s".format(identifier, hits.mkString("\\n ", "\\n ", "\\n"))
}
throw new UniquePathNotUniqueException(message.mkString("CREATE UNIQUE error\\n", "\\n", "\\n"))
}
private def runUpdateCommands(cmds: Seq[UpdateWrapper], oldContext: ExecutionContext, state: QueryState): ExecutionContext = {
var context = oldContext
var todo = cmds.distinct
var done = Seq[String]()
while (todo.nonEmpty) {
val (unfiltered, temp) = todo.partition(_.canRun(context))
todo = temp
val current = unfiltered.filterNot(cmd => done.contains(cmd.cmd.identifierName))
done = done ++ current.map(_.cmd.identifierName)
context = current.foldLeft(context) {
case (currentContext, updateCommand) => {
val result = updateCommand.cmd.exec(currentContext, state)
if (result.size != 1) {
throw new UniquePathNotUniqueException("The pattern " + this + " produced multiple possible paths, and that is not allowed")
} else {
result.head
}
}
}
}
context
}
private def extractUpdateCommands(results: scala.Seq[(UniqueLink, CreateUniqueResult)]): Seq[Update] =
results.flatMap {
case (_, u: Update) => Some(u)
case _ => None
}
private def extractTraversals(results: scala.Seq[(UniqueLink, CreateUniqueResult)]): Seq[TraverseResult] =
results.flatMap {
case (link, Traverse(ctx@_*)) => ctx.map {
case (key, element) => TraverseResult(key, element, link)
}
case _ => None
}
private def executeAllRemainingPatterns(linksToDo: Seq[UniqueLink], ctx: ExecutionContext, state: QueryState): Seq[(UniqueLink, CreateUniqueResult)] = linksToDo.flatMap(link => link.exec(ctx, state))
private def canNotAdvanced(results: scala.Seq[(UniqueLink, CreateUniqueResult)]) = results.forall(_._2 == CanNotAdvance())
def filter(f: (Expression) => Boolean): Seq[Expression] = links.flatMap(_.filter(f)).distinct
def identifiers: Seq[(String,CypherType)] = links.flatMap(_.identifier2).distinct
def rewrite(f: (Expression) => Expression): UpdateAction = CreateUniqueAction(links.map(_.rewrite(f)): _*)
def assertTypes(symbols: SymbolTable) {links.foreach(l=>l.assertTypes(symbols))}
def symbolTableDependencies = links.flatMap(_.symbolTableDependencies).toSet
}
sealed abstract class CreateUniqueResult
case class CanNotAdvance() extends CreateUniqueResult
case class Traverse(result: (String, PropertyContainer)*) extends CreateUniqueResult
case class Update(cmds: Seq[UpdateWrapper], locker: () => Seq[Lock]) extends CreateUniqueResult {
def lock(): Seq[Lock] = locker()
}
case class UpdateWrapper(needs: Seq[String], cmd: StartItem with UpdateAction) {
def canRun(context: ExecutionContext) = {
lazy val keySet = context.keySet
val forall = needs.forall(keySet.contains)
forall
}
}
| dksaputra/community | cypher/src/main/scala/org/neo4j/cypher/internal/mutation/CreateUniqueAction.scala | Scala | gpl-3.0 | 8,111 |
package com.github.luzhuomi.regex.deriv.diagnosis
import scala.sys._
import scala.collection.Map._
import com.github.luzhuomi.regex.deriv.RE._
import com.github.luzhuomi.regex.deriv.Common._
import com.github.luzhuomi.regex.deriv.Parse._
object Ambiguity
{
sealed trait U
case object NilU extends U
case object EmptyU extends U
case class LetterU(c:Char) extends U
case class AltU(i:Int,u:U) extends U
case class PairU(u:U,v:U) extends U
case class ListU(us:List[U]) extends U
def flatU(u:U):String = u match
{
case NilU => ""
case EmptyU => ""
case LetterU(c) => c.toString
case AltU(i,u) => flatU(u)
case PairU(u1,u2) => flatU(u1)+flatU(u2)
case ListU(us) => us.map(flatU).foldLeft("")( (s,t) => s ++ t)
}
def nullable(r:RE)(implicit m:PosEps[RE]):Boolean = m.posEps(r)
def isPhi(r:RE)(implicit m:IsPhi[RE]):Boolean = m.isPhi(r)
// deivative operation, the additional boolean reports whether the rule A2 arises
def deriv2(r:RE,l:Char):(RE,Boolean) = r match
{
case Phi => (Phi,false)
case Eps => (Phi,false)
case L(c) if l == c => (Eps,false)
case L(c) => (Phi,false)
case Any => (Eps,false)
case Not(cs) if !cs.contains(l) => (Eps, false)
case Not(cs) => (Phi, false)
case Choice(rs,gf) =>
{
val rbs = rs map (r => deriv2(r,l))
val (rs1, bs) = rbs.unzip
(Choice(rs1,gf), bs.exists(b => b))
}
case Seq(r1,r2) if nullable(r1) =>
{
val (r1p, b1) = deriv2(r1,l)
val (r2p, b2) = deriv2(r2,l)
(Choice(List(Seq(r1p,r2),r2p),Greedy), b1 || b2 || testAmbigCase1(r1)) // where A2 possibly arises
}
case Seq(r1,r2) =>
{
val (r1p, b1) = deriv2(r1,l)
(Seq(r1p,r2), b1)
}
case Star(r,gf) =>
{
val (rp,b) = deriv2(r,l)
(Seq(rp,Star(r,gf)),b)
}
}
def deriv(r:RE,l:Char):RE = deriv2(r,l)._1
def testAmbigCase1(r:RE):Boolean = nullable(r) && (mkEmptyUs(r).length > 1)
// For a nullable expression, compute all empty parse trees.
def mkEmptyUs(r:RE):List[U] = r match
{
case Phi => List()
case Eps => List(EmptyU)
case Any => List()
case Not(_) => List()
case L(_) => List()
case Choice(rs,gf) =>
{
val idxed_rs = (0 to rs.length).toList zip rs
for { (idx,r) <- idxed_rs
; u <- mkEmptyUs(r)
; if nullable(r)
} yield AltU(idx,u)
}
case Seq(r1,r2) => for {u1 <- mkEmptyUs(r1); u2 <- mkEmptyUs(r2)} yield PairU(u1,u2)
case Star(_,_) => List(ListU(List()))
}
// Injection to obtain r's parse trees from the parse tree of the derivative.
// Note that the derivatives (d) can be only in shapes of (r,r), r+r, or Epsilon,
// hence the parse tree u can only be in shapes of Pair, LeftU, RightU or EmptyU
def injDs(r:RE, d:RE, l:Char, u:U):List[U] = (r,d,u) match {
case (Star(r,gf), Seq(rd,_), PairU(u,ListU(us))) => for
{
u1 <- injDs(r,rd,l,u)
} yield ListU(u1::us)
case (Seq(r1,r2),Choice(Seq(rd1,_)::_,gf),AltU(0,u)) =>
{ // choice must be binary b/c of deriv2
val PairU(up,upp) = u
for { us1 <- injDs(r1,rd1,l,up) } yield PairU(us1,upp)
}
case (Seq(r1,r2),Choice(_::rd2::Nil,gf),AltU(1,u)) => for
{ // choice must be binary b/c of deriv2
us1 <- mkEmptyUs(r1);
us2 <- injDs(r2,rd2,l,u)
} yield PairU(us1,us2)
case (Seq(r1,r2),Choice(Nil,_),_) => error ("not possible, parse tree and regex out of sync!")
case (Seq(r1,r2),Seq(rd1,_),PairU(up,upp)) => for
{
us <- injDs(r1,rd1,l,up)
} yield PairU(us,upp)
case (Choice(r::rs,_), Choice(rd::rds,_), AltU(0,u)) => for
{
us <- injDs(r,rd,l,u)
} yield AltU(0,us)
case (Choice(r::rs,gf), Choice(rd::rds,gf2), AltU(n,u)) => for
{
AltU(np,us) <- injDs(Choice(rs,gf),Choice(rds,gf2),l,AltU(n-1,u))
} yield AltU(np+1,us)
case (L(c), Eps, EmptyU) if (c == l) => List(LetterU(l))
case (L(c), Eps, EmptyU) => error("impossible")
case (Any, Eps, EmptyU) => List(LetterU(l))
case (Not(cs), Eps, EmptyU) if !cs.contains(l) => List(LetterU(l))
case (Not(cs), Eps, EmptyU) => error("impossible")
}
def testAmigCase1(r:RE):Boolean = nullable(r) && (mkEmptyUs(r).length > 1)
def simp(r:RE):RE = simp3(r)._1
def simpAmbig(r:RE):Boolean = simp3(r)._3
def simp3(r:RE):(RE,U=>List[U], Boolean) = fixs3(simpStep)(r)
// fix point combinators working for different type signatures
def fixs3(trans:RE => (RE, U=>List[U], Boolean)): RE => (RE, U=>List[U], Boolean) = (r:RE) =>
{
trans(r) match
{
case (rp, f, b) if (r == rp) => (rp,f,b)
case (rp, f, b) => fixs3(trans)(rp) match
{
case (rpp, g, b2) => (rpp, (u:U) =>
{
(for { up <- g(u)
; upp <- f(up)
} yield upp).distinct
},
b || b2)
}
}
}
def fix2(trans:RE => (RE, U=>U)): RE => (RE, U => U) = (r:RE) =>
{
trans(r) match
{
case (rp,f) if (r == rp) => (rp,f)
case (rp,f) => fix2(trans)(rp) match
{
case (rpp,g) => (rpp, f compose g)
}
}
}
def fixs2(trans:RE => (RE, U=>List[U])): RE => (RE, U => List[U]) = (r:RE) =>
{
trans(r) match
{
case (rp,f) if (r == rp) => (rp,f)
case (rp,f) => fixs2(trans)(rp) match
{
case (rpp,g) => (rpp, composeT(f,g))
}
}
}
// parse tree transformer composition
def composeT(f:U=>List[U], g:U=>List[U]): U => List[U] = (u:U) =>
{
g(u).flatMap(v => f(v))
}
def simpStep(r:RE):(RE, U => List[U], Boolean) = r match
{
case Seq(Eps, t) => simpStep(t) match
{
case (rp, f, b) => (rp, (u:U) => (for { v <- f(u)} yield PairU(EmptyU,v)).distinct, b)
}
case Seq(Phi, t) => (Phi, u => error("undefined"), false)
case Choice(List(r),gf) => (r, (u:U) => List(AltU(0,u)), false)
case Choice(rs, gf) =>
{
val rfbs = rs.map(simpStep)
val (rs1,fs1,bs1) = rfbs.unzip3
def f1(u:U):List[U] = u match
{
case AltU(n,v) => for { up <- (fs1.drop(n).head)(v) } yield AltU(n,up)
case _ => List(u)
}
val b1 = bs1.exists(x=>x)
val (r2,f2) = rmAltPhi(Choice(rs1,gf))
val (r3,f3) = flat(r2)
val (r4,f4,b4) = fixs3(nubChoice)(r3)
(r4, composeT(f1,composeT(f2,composeT(f3,f4))), b1 || b4)
}
case Seq(r1,r2) =>
{
val (r1p, f1, b1) = simpStep(r1)
val (r2p, f2, b2) = simpStep(r2)
def f(u:U):List[U] = u match
{
case PairU(u1,u2) => for
{ u1p <- f1(u1)
; u2p <- f2(u2)
} yield PairU(u1p,u2p)
case _ => error ("simpStep " + Seq(r1,r2).toString )
}
(Seq(r1p,r2p), f, b1||b2)
}
case _ => (r, (u:U) => List(u), false)
}
// remove Phi from alternatives / choice
def rmAltPhi(r:RE):(RE, U => List[U]) = r match
{
case Choice(List(rp),gf) => (r, u=>List(u))
case Choice(rs,gf) =>
{
val (fs, rsp) = rmAltPhiN(0,rs).unzip
def g(u:U):List[U] = u match
{
case AltU(n,v) => List((fs.drop(n).head)(u))
}
(Choice(rsp,gf),g)
}
case _ => (r, u=>List(u))
}
def rmAltPhiN(n:Int,rs:List[RE]):List[(U=>U, RE)] = rs match
{
case Nil => Nil
case (r::rsp) if isPhi(r) => rmAltPhiN(n+1,rsp)
case (r::rsp) => (((u:U) => u match { case AltU(m,v) => AltU(n+m,v)}, r)::rmAltPhiN(n,rsp))
}
// flatten the nest choice at all level in the RE
def flat(r:RE):(RE, U => List[U]) = fixs2(flatStep)(r)
def flatStep(r:RE):(RE, U => List[U]) = r match
{
case Seq(r1,r2) =>
{
val (r1p, f1) = flatStep(r1)
val (r2p, f2) = flatStep(r2)
def f(u:U):List[U] = u match
{
case PairU(u1,u2) => for { u1p <- f1(u1); u2p <- f2(u2) } yield PairU(u1p,u2p)
}
(Seq(r1p,r2p),f)
}
case Choice(rs,gf) => flatChoice(r)
}
def flatChoice(r:RE):(RE, U => List[U]) = r match
{
case Choice(List(),gf) => (r, (u:U)=>List(u))
case Choice(r@Choice(rsI,_)::rs, gf) =>
{
val (Choice(rsp,_), f) = flatChoice(Choice(rs,gf))
val l = rsI.length
def g(u:U):List[U] = u match
{
case AltU(n,v) if n < l => List(AltU(0,AltU(n,v)))
case AltU(n,v) => for { w <- f(rep(l,unRight,u)) } yield right(w)
}
(Choice(rsI++rsp,gf), g)
}
case Choice(r::rs,gf) =>
{
val (Choice(rsp,_), f) = flatChoice(Choice(rs,gf))
def g(u:U):List[U] = u match
{
case AltU(0,v) => List(AltU(0,v))
case AltU(n,v) => for { w <- f(unRight(u))} yield right(w)
}
(Choice(r::rsp,gf), g)
}
}
// repeatively apply op to v for i times
def rep(i:Int,op:U=>U, v:U):U = i match
{
case 0 => v
case n => rep(n-1, op, op(v))
}
// add a right tag
def right(u:U):U = u match
{
case AltU(x,u) => AltU(x+1,u)
case u => u
}
// remove a right tight
def unRight(u:U):U = u match
{
case AltU(0,v) => error(" unRight is applied to a Left value.")
case AltU(x,v) => AltU(x-1,v)
case _ => u
}
// remove duplicate in a choice (apply the Idemp similarity rule)
// Boolean denotes whether idemp rule is applied
def nubChoice(r:RE):(RE,U=>List[U],Boolean) = r match
{
case Choice(List(r1,r2), gf) if r1 == r2 => (r1, (u:U) => List(AltU(0,u),AltU(1,u)), !isPhi(r1))
case Choice(_,_) =>
{
val (rp, f, m, idx, b) = nubChoiceWith(r,0, empty)
(rp, f, b)
}
case _ => (r, (u:U) => List(u), false) // todo: check why this is needed
}
def nubChoiceWith(r:RE, idx:Int, m:Map[RE,List[Int]]):(RE, U=>List[U], Map[RE,List[Int]], Int, Boolean) = r match
{
case Choice(r1::rs, gf) => m.get(r1) match
{
case Some(idxs) =>
// r1 \\in M M |- r2...rN --> r2'...rM'
// -----------------------------
// M |- r1 + r2...rN --> r2'...rM'
{
val mp = m.updated(r1, idxs++List(idx))
val (Choice(rsp,_), g, mpp, idxp, b) = nubChoiceWith(Choice(rs,gf),idx+1, mp)
def f(u:U) : List[U] = for { v <- g(unRight(u)) } yield right(v)
(Choice(rsp,gf), f, mpp, idxp, !isPhi(r1)) // not isPhi is required, if r1 is Phi does not implies it is ambiguous
}
case None =>
// r1 \\not \\in M M U {r1} |- r2...rN --> r2'...rM'
// ---------------------------------------
// M |- r1 + r2 --> r1 + r2'...rM'
{
val mp = m.+(r1 -> List(idx))
val (Choice(rsp,_),g,mpp,idxp,b) = nubChoiceWith(Choice(rs,gf),idx+1,mp)
val idxs = mpp.get(r1) match
{
case None => Nil
case Some(idxsp) => idxsp
}
def f(u:U):List[U] = u match
{
case AltU(0,v) => idxs.map(i => mkCons(i-idx,v))
case AltU(n,v) => for { w <- g(unRight(u)) } yield right(w)
}
(Choice(r1::rsp,gf),f,mpp,idxp,b)
}
}
case (Choice(Nil,gf)) => (Choice(Nil,gf), (u:U)=>List(u), m, idx, false)
case r => (r,(u:U) => List(u), m, idx, false) // todo: check why this is needed
}
def mkCons(n:Int,u:U):U = if (n <= 0) { AltU(0,u) } else { AltU(n,u) }
// build a finite state trans
case class FSX( start: RE
, finals: List[RE]
, states: List[RE]
, transitions: List[(RE,Char,RE,U=>List[U])]
, ambig1 : List[RE]
, ambig2 : List[(RE,Char,RE)]
, ambig3 : List[(RE,Char,RE)]
)
def buildFSX(r:RE):FSX =
{
val sig = sigma(r)
def mkTransitions(r:RE,l:Char) : List[(RE, Char, RE, U=>List[U])] =
{
val d = deriv(r,l)
val (rpp,fSimp, _) = simp3(d)
if (isPhi(rpp)) { List() }
else {
List((r,l,rpp, (u:U)=> {
fSimp(u).flatMap(up => injDs(r,d,l,up)).distinct
}
))
}
}
def go(rs:List[RE],fsx:FSX, curr_rs:List[RE]) : FSX =
{
val new_ts = (for { r <- curr_rs
; l <- sig
} yield (l,r)).flatMap( (lr:(Char,RE))=> mkTransitions(lr._2,lr._1) )
val new_rs = (for { (_,_,r,_) <- new_ts
; if !isPhi(r) && !rs.contains(r)} yield r).distinct
val new_ambig1 = rs.filter( r => testAmbigCase1(r))
/*
val new_ambig2 = ( for { r <- rs
; l <- sig
; val (rd,bd) = deriv2(r,l)
; val (rs,fs,bs) = simp3 (rd) } yield ((r,l,rs), bd)
).filter( x=> x._2 && !(isPhi(x._1._3))).map(_._1)
val new_ambig3 = ( for { r <- rs
; l <- sig
; val (rd,bd) = deriv2(r,l)
; val (rs,fs,bs) = simp3 (rd) } yield ((r,l,rs), bs)
).filter( x=> x._2 && !(isPhi(x._1._3))).map(_._1)
*/
// optimized
val trans_flags = (for { r <- rs
; l <- sig
; val (rd,bd) = deriv2(r,l)
; val (rs,fs,bs) = simp3 (rd)
} yield ((r,l,rs), bd, bs)).filter(x => !(isPhi(x._1._3)))
val new_ambig2 = trans_flags.filter( x=> x._2 ).map(_._1)
val new_ambig3 = trans_flags.filter( x=> x._3 ).map(_._1)
val new_fsx = fsx match
{
case FSX(start,finals,states,transitions,ambig1,ambig2,ambig3) =>
{
FSX(start,finals ++ new_rs.filter(nullable), states ++ new_rs,
transitions ++ new_ts, (ambig1 ++ new_ambig1).distinct,
(ambig2 ++ new_ambig2).distinct, (ambig3 ++ new_ambig3).distinct)
}
}
if (new_rs.length == 0)
{
new_fsx
} else {
go (rs ++ new_rs, new_fsx, new_rs)
}
}
val fsx = FSX(r, (if (nullable(r)) { List(r) } else List()), List(r), List(), List(), List(), List() )
go(List(r),fsx,List(r))
}
sealed trait AmbigTrans
case class A1(s:RE, l:Char, t:RE, f:U=>List[U], prefix:List[(RE,Char,U=>List[U])]) extends AmbigTrans
case class A2(s:RE, l:Char, t:RE, f:U=>List[U], prefix:List[(RE,Char,U=>List[U])]) extends AmbigTrans
case class A3(s:RE, l:Char, t:RE, f:U=>List[U], prefix:List[(RE,Char,U=>List[U])]) extends AmbigTrans
def findMinCounterEx(fsx:FSX):List[U] =
{
val FSX(start, finals, states, transitions, ambig1, ambig2, ambig3) = fsx
def findNextTrans(r:RE): List[(RE, Char, RE, U=> List[U])] = transitions.filter (_._1 == r)
def nub123[A,B,C,D,E](l:List[(A,B,C,D,E)]):List[(A,B,C,D,E)] = nubBy(l,(x:(A,B,C,D,E))=>(x._1,x._2,x._3))
def goUntilAmbig(curr_states_prefices:List[(RE,List[(RE,Char,U=>List[U])])], trans_sofar:List[(RE, Char, RE)]):Option[AmbigTrans] =
{
val next_trans_prefices = nub123(curr_states_prefices.flatMap( r_prefix =>
{
val r = r_prefix._1
val prefix = r_prefix._2
findNextTrans(r).map((sltf:(RE, Char, RE, U=> List[U]))=>(sltf._1,sltf._2,sltf._3,sltf._4,prefix))
})).filter (sltfp => !trans_sofar.contains((sltfp._1,sltfp._2,sltfp._3)))
val ambigs1 = next_trans_prefices.filter(sltfp => ambig1.contains(sltfp._1))
val ambigs2 = next_trans_prefices.filter(sltfp => ambig2.contains((sltfp._1,sltfp._2,sltfp._3)))
val ambigs3 = next_trans_prefices.filter(sltfp => ambig3.contains((sltfp._1,sltfp._2,sltfp._3)))
if (next_trans_prefices.nonEmpty)
{
(ambigs1,ambigs2,ambigs3) match
{
case ((trans::_),_,_) => Some(A1(trans._1,trans._2,trans._3,trans._4,trans._5))
case (Nil,(trans::_),_) => Some(A2(trans._1,trans._2,trans._3,trans._4,trans._5))
case (Nil,Nil,(trans::_)) => Some(A3(trans._1,trans._2,trans._3,trans._4,trans._5))
case (Nil,Nil,Nil) =>
{ // no ambiguity found so far
val next_stats_prefices = next_trans_prefices.map(rltfp =>
{
val (r,l,t,f,p) = rltfp
(t,(r,l,f)::p)
})
val next_trans_sofar = trans_sofar ++ next_trans_prefices.map(rltfp =>
{
val (r,l,t,f,p) = rltfp
(r,l,t)
})
goUntilAmbig(next_stats_prefices,next_trans_sofar)
}
}
} else {
None
}
}
goUntilAmbig(List((start,List())),List()) match
{
case None => List()
case Some(A1(r,l,t,f,pf)) =>
{
val ut = genV(t)
val urs = f(ut)
val (s,us) = pf.foldLeft((r,urs))((tus, rlf) =>
{
val (t,us) = tus
val (r,l,f) = rlf
(r, us.flatMap(u=>f(u)))
})
us
}
case Some(A2(r,l,t,f,pf)) =>
{
val ut = genV(t)
val urs = f(ut)
val (s,us) = pf.foldLeft((r,urs))((tus, rlf) =>
{
val (t,us) = tus
val (r,l,f) = rlf
(r, us.flatMap(u=>f(u)))
})
us
}
case Some(A3(r,l,t,f,pf)) =>
{
val ut = genV(t)
val urs = f(ut)
val (s,us) = pf.foldLeft((r,urs))((tus, rlf) =>
{
val (t,us) = tus
val (r,l,f) = rlf
(r, us.flatMap(u=>f(u)))
})
us
}
}
}
// generate a minimal parse tree given a RE
def genV(r:RE):U = r match
{
case Eps => EmptyU
case L(c) => LetterU(c)
case Any => LetterU('a')
case Not(cs) => LetterU((0 to 255).toList.map(_.toChar).filter((c:Char)=>(!(cs.contains(c)))).head) // todo: what if cs is all the 256 chars?
case Seq(r1,r2) => PairU(genV(r1),genV(r2))
case Choice(Nil,_) => error("genV is applied ot an empty choice")
case Choice(rs,_) => AltU(0,genV(rs.head)) // todo: what if rs is empty
case Star(r,_) => ListU(List())
}
// Compute alphabet of a regular expression
def sigma(r:RE):List[Char] = r match {
case Phi => List()
case Eps => List()
case L(c) => List(c)
case Any => List()
case Not(cs) => List()
case Seq(r1,r2) => (sigma(r1)++sigma(r2)).distinct
case Choice(rs,_) => rs.flatMap(sigma(_)).distinct
case Star(r,_) => sigma(r)
}
def diagnoseU(regex:String):Either[String,List[U]] = parse(regex) match
{
case None => Left("Parsing failed. The input is not a regex.")
case Some(r) =>
{
val fsx = buildFSX(r)
Right(findMinCounterEx(fsx))
}
}
def diagnose(regex:String):Either[String,List[String]] = diagnoseU(regex) match
{
case Left(s) => Left(s)
case Right(us) => Right( us.map(flatU) )
}
def diagnoseRE(r:RE):List[U] = {
val fsx = buildFSX(r)
findMinCounterEx(fsx)
}
def isAmbiguous(regex:String):Either[String,Boolean] = parse(regex) match
{
case None => Left("Parsing failed. The input is not a regex.")
case Some(r) =>
{
buildFSX(r) match
{
case FSX(start,finals,states,transitions,Nil,Nil,Nil) => Right(false)
case FSX(start,finals,states,transitions,ambig1,ambig2,ambig3) => Right(true)
}
}
}
val a = L('a')
def star(x:RE):RE = Star(x,Greedy)
val e1 = Seq(Eps, Seq(star(a),star(a)))
// running bigger expression requires incrase of JAVA heap memory, for sbt -mem 2048
// or edit /usr/local/etc/sbtopts
}
| luzhuomi/scala-deriv | src/main/scala/com/github/luzhuomi/regex/deriv/diagnosis/Ambiguity.scala | Scala | apache-2.0 | 17,915 |
package name.brian_gordon.securitydemo
import java.util.concurrent.atomic.AtomicInteger
import scala.concurrent.Promise
import org.joda.time.DateTime
import org.joda.time.Period
import org.joda.time.format.PeriodFormat
import org.joda.time.format.PeriodFormatterBuilder
import dispatch.Http
import grizzled.slf4j.Logging
/**
* @author Brian Gordon
*/
object ExploitTaskRunner extends App with Logging {
val startTime = DateTime.now()
val numberOfWorkers = conf.getInt("number_of_workers");
val resultPromise = Promise[String]()
val attemptCounter = new AtomicInteger()
val http = Http.configure(builder => {
builder.setFollowRedirect(true)
builder.setAcceptAnyCertificate(true)
})
// Start the workers.
for (workerNum <- 1 to numberOfWorkers) {
info(s"Starting worker $workerNum...")
executor.execute(new ExploitTask(http, resultPromise, attemptCounter))
}
// Block until a worker returns the result.
for (secret <- resultPromise.future) {
val period = new Period(startTime, DateTime.now())
info(s"Exploit successful after $attemptCounter attempts.")
info(s"${period.toString(PeriodFormat.getDefault)} elapsed.")
info(s"The secret is $secret.")
}
}
| briangordon/securitydemo | src/main/scala/name/brian_gordon/securitydemo/ExploitTaskRunner.scala | Scala | mit | 1,269 |
package basic
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import io.gatling.jdbc.Predef._
import io.gatling.http.Headers.Names._
import scala.concurrent.duration._
import bootstrap._
import assertions._
class BasicExampleSimulation extends Simulation {
val httpConf = httpConfig
.baseURL("http://excilys-bank-web.cloudfoundry.com")
.acceptCharsetHeader("ISO-8859-1,utf-8;q=0.7,*;q=0.7")
.acceptHeader("text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
.acceptEncodingHeader("gzip, deflate")
.acceptLanguageHeader("fr,fr-fr;q=0.8,en-us;q=0.5,en;q=0.3")
.disableFollowRedirect
val headers_1 = Map(
"Keep-Alive" -> "115")
val headers_3 = Map(
"Keep-Alive" -> "115",
"Content-Type" -> "application/x-www-form-urlencoded")
val headers_6 = Map(
"Accept" -> "application/json, text/javascript, */*; q=0.01",
"Keep-Alive" -> "115",
"X-Requested-With" -> "XMLHttpRequest")
val scn = scenario("Scenario name")
.group("Login") {
exec(
http("request_1")
.get("/")
.headers(headers_1)
.check(status.is(302)))
.pause(0 milliseconds, 100 milliseconds)
.exec(
http("request_2")
.get("/public/login.html")
.headers(headers_1))
.pause(12, 13)
.feed(csv("user_information.csv"))
.exec(
http("request_3")
.post("/login")
.param("username", "${username}")
.param("password", "${password}")
.headers(headers_3)
.check(status.is(302)))
}
.pause(0 milliseconds, 100 milliseconds)
.repeat(5) {
exec(
http("request_4")
.get("/private/bank/accounts.html")
.headers(headers_1))
.pause(7, 8)
.exec(
http("request_5")
.get("/private/bank/account/ACC${account_id}/operations.html")
.headers(headers_1))
.pause(100 milliseconds, 200 milliseconds)
.exec(
http("request_6")
.get("/private/bank/account/ACC${account_id}/year/2011/month/12/page/0/operations.json")
.headers(headers_6))
.pause(4, 5)
.exec(
http("request_7")
.get("/private/bank/account/ACC${account_id}/year/2011/month/11/operations.html")
.headers(headers_1))
.pause(100 milliseconds, 200 milliseconds)
.exec(
http("request_8")
.get("/private/bank/account/ACC${account_id}/year/2011/month/11/page/0/operations.json")
.headers(headers_6))
.pause(6, 7)
}.exec(
http("request_9")
.get("/logout")
.headers(headers_1)
.check(status.is(302)))
.pause(0 milliseconds, 100 milliseconds)
.exec(
http("request_10")
.get("/public/login.html")
.headers(headers_1))
setUp(scn.inject(ramp(3 users) over (10 seconds)).protocolConfig(httpConf))
assertThat(global.successfulRequests.percent.is(100), details("Login" / "request_2").responseTime.max.lessThan(2000))
assertThat(details("request_9").requestsPerSec.greaterThan(10))
}
| ksmpartners/ernie | ernie-gatling/gatling/simulations/basic/BasicExampleSimulation.scala | Scala | apache-2.0 | 3,162 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.connector.catalog
import java.util
import java.util.Collections
import scala.collection.JavaConverters._
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.analysis.{NamedRelation, NoSuchDatabaseException, NoSuchNamespaceException, NoSuchTableException, UnresolvedV2Relation}
import org.apache.spark.sql.catalyst.plans.logical.{AlterTable, CreateTableAsSelectStatement, CreateTableStatement, ReplaceTableAsSelectStatement, ReplaceTableStatement, SerdeInfo}
import org.apache.spark.sql.connector.catalog.TableChange._
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation
import org.apache.spark.sql.types.{ArrayType, DataType, MapType, NullType, StructField, StructType}
import org.apache.spark.sql.util.CaseInsensitiveStringMap
import org.apache.spark.util.Utils
private[sql] object CatalogV2Util {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
/**
* The list of reserved table properties, which can not be removed or changed directly by
* the syntax:
* {{
* ALTER TABLE ... SET TBLPROPERTIES ...
* }}
*
* They need specific syntax to modify
*/
val TABLE_RESERVED_PROPERTIES =
Seq(TableCatalog.PROP_COMMENT,
TableCatalog.PROP_LOCATION,
TableCatalog.PROP_PROVIDER,
TableCatalog.PROP_OWNER)
/**
* The list of reserved namespace properties, which can not be removed or changed directly by
* the syntax:
* {{
* ALTER NAMESPACE ... SET PROPERTIES ...
* }}
*
* They need specific syntax to modify
*/
val NAMESPACE_RESERVED_PROPERTIES =
Seq(SupportsNamespaces.PROP_COMMENT,
SupportsNamespaces.PROP_LOCATION,
SupportsNamespaces.PROP_OWNER)
/**
* Apply properties changes to a map and return the result.
*/
def applyNamespaceChanges(
properties: Map[String, String],
changes: Seq[NamespaceChange]): Map[String, String] = {
applyNamespaceChanges(properties.asJava, changes).asScala.toMap
}
/**
* Apply properties changes to a Java map and return the result.
*/
def applyNamespaceChanges(
properties: util.Map[String, String],
changes: Seq[NamespaceChange]): util.Map[String, String] = {
val newProperties = new util.HashMap[String, String](properties)
changes.foreach {
case set: NamespaceChange.SetProperty =>
newProperties.put(set.property, set.value)
case unset: NamespaceChange.RemoveProperty =>
newProperties.remove(unset.property)
case _ =>
// ignore non-property changes
}
Collections.unmodifiableMap(newProperties)
}
/**
* Apply properties changes to a map and return the result.
*/
def applyPropertiesChanges(
properties: Map[String, String],
changes: Seq[TableChange]): Map[String, String] = {
applyPropertiesChanges(properties.asJava, changes).asScala.toMap
}
/**
* Apply properties changes to a Java map and return the result.
*/
def applyPropertiesChanges(
properties: util.Map[String, String],
changes: Seq[TableChange]): util.Map[String, String] = {
val newProperties = new util.HashMap[String, String](properties)
changes.foreach {
case set: SetProperty =>
newProperties.put(set.property, set.value)
case unset: RemoveProperty =>
newProperties.remove(unset.property)
case _ =>
// ignore non-property changes
}
Collections.unmodifiableMap(newProperties)
}
/**
* Apply schema changes to a schema and return the result.
*/
def applySchemaChanges(schema: StructType, changes: Seq[TableChange]): StructType = {
changes.foldLeft(schema) { (schema, change) =>
change match {
case add: AddColumn =>
add.fieldNames match {
case Array(name) =>
val field = StructField(name, add.dataType, nullable = add.isNullable)
val newField = Option(add.comment).map(field.withComment).getOrElse(field)
addField(schema, newField, add.position())
case names =>
replace(schema, names.init, parent => parent.dataType match {
case parentType: StructType =>
val field = StructField(names.last, add.dataType, nullable = add.isNullable)
val newField = Option(add.comment).map(field.withComment).getOrElse(field)
Some(parent.copy(dataType = addField(parentType, newField, add.position())))
case _ =>
throw new IllegalArgumentException(s"Not a struct: ${names.init.last}")
})
}
case rename: RenameColumn =>
replace(schema, rename.fieldNames, field =>
Some(StructField(rename.newName, field.dataType, field.nullable, field.metadata)))
case update: UpdateColumnType =>
replace(schema, update.fieldNames, field => {
Some(field.copy(dataType = update.newDataType))
})
case update: UpdateColumnNullability =>
replace(schema, update.fieldNames, field => {
Some(field.copy(nullable = update.nullable))
})
case update: UpdateColumnComment =>
replace(schema, update.fieldNames, field =>
Some(field.withComment(update.newComment)))
case update: UpdateColumnPosition =>
def updateFieldPos(struct: StructType, name: String): StructType = {
val oldField = struct.fields.find(_.name == name).getOrElse {
throw new IllegalArgumentException("Field not found: " + name)
}
val withFieldRemoved = StructType(struct.fields.filter(_ != oldField))
addField(withFieldRemoved, oldField, update.position())
}
update.fieldNames() match {
case Array(name) =>
updateFieldPos(schema, name)
case names =>
replace(schema, names.init, parent => parent.dataType match {
case parentType: StructType =>
Some(parent.copy(dataType = updateFieldPos(parentType, names.last)))
case _ =>
throw new IllegalArgumentException(s"Not a struct: ${names.init.last}")
})
}
case delete: DeleteColumn =>
replace(schema, delete.fieldNames, _ => None)
case _ =>
// ignore non-schema changes
schema
}
}
}
private def addField(
schema: StructType,
field: StructField,
position: ColumnPosition): StructType = {
if (position == null) {
schema.add(field)
} else if (position.isInstanceOf[First]) {
StructType(field +: schema.fields)
} else {
val afterCol = position.asInstanceOf[After].column()
val fieldIndex = schema.fields.indexWhere(_.name == afterCol)
if (fieldIndex == -1) {
throw new IllegalArgumentException("AFTER column not found: " + afterCol)
}
val (before, after) = schema.fields.splitAt(fieldIndex + 1)
StructType(before ++ (field +: after))
}
}
private def replace(
struct: StructType,
fieldNames: Seq[String],
update: StructField => Option[StructField]): StructType = {
val pos = struct.getFieldIndex(fieldNames.head)
.getOrElse(throw new IllegalArgumentException(s"Cannot find field: ${fieldNames.head}"))
val field = struct.fields(pos)
val replacement: Option[StructField] = (fieldNames.tail, field.dataType) match {
case (Seq(), _) =>
update(field)
case (names, struct: StructType) =>
val updatedType: StructType = replace(struct, names, update)
Some(StructField(field.name, updatedType, field.nullable, field.metadata))
case (Seq("key"), map @ MapType(keyType, _, _)) =>
val updated = update(StructField("key", keyType, nullable = false))
.getOrElse(throw new IllegalArgumentException(s"Cannot delete map key"))
Some(field.copy(dataType = map.copy(keyType = updated.dataType)))
case (Seq("key", names @ _*), map @ MapType(keyStruct: StructType, _, _)) =>
Some(field.copy(dataType = map.copy(keyType = replace(keyStruct, names, update))))
case (Seq("value"), map @ MapType(_, mapValueType, isNullable)) =>
val updated = update(StructField("value", mapValueType, nullable = isNullable))
.getOrElse(throw new IllegalArgumentException(s"Cannot delete map value"))
Some(field.copy(dataType = map.copy(
valueType = updated.dataType,
valueContainsNull = updated.nullable)))
case (Seq("value", names @ _*), map @ MapType(_, valueStruct: StructType, _)) =>
Some(field.copy(dataType = map.copy(valueType = replace(valueStruct, names, update))))
case (Seq("element"), array @ ArrayType(elementType, isNullable)) =>
val updated = update(StructField("element", elementType, nullable = isNullable))
.getOrElse(throw new IllegalArgumentException(s"Cannot delete array element"))
Some(field.copy(dataType = array.copy(
elementType = updated.dataType,
containsNull = updated.nullable)))
case (Seq("element", names @ _*), array @ ArrayType(elementStruct: StructType, _)) =>
Some(field.copy(dataType = array.copy(elementType = replace(elementStruct, names, update))))
case (names, dataType) =>
throw new IllegalArgumentException(
s"Cannot find field: ${names.head} in ${dataType.simpleString}")
}
val newFields = struct.fields.zipWithIndex.flatMap {
case (_, index) if pos == index =>
replacement
case (other, _) =>
Some(other)
}
new StructType(newFields)
}
def loadTable(catalog: CatalogPlugin, ident: Identifier): Option[Table] =
try {
Option(catalog.asTableCatalog.loadTable(ident))
} catch {
case _: NoSuchTableException => None
case _: NoSuchDatabaseException => None
case _: NoSuchNamespaceException => None
}
def loadRelation(catalog: CatalogPlugin, ident: Identifier): Option[NamedRelation] = {
loadTable(catalog, ident).map(DataSourceV2Relation.create(_, Some(catalog), Some(ident)))
}
def isSessionCatalog(catalog: CatalogPlugin): Boolean = {
catalog.name().equalsIgnoreCase(CatalogManager.SESSION_CATALOG_NAME)
}
def convertTableProperties(c: CreateTableStatement): Map[String, String] = {
convertTableProperties(
c.properties, c.options, c.serde, c.location, c.comment, c.provider, c.external)
}
def convertTableProperties(c: CreateTableAsSelectStatement): Map[String, String] = {
convertTableProperties(
c.properties, c.options, c.serde, c.location, c.comment, c.provider, c.external)
}
def convertTableProperties(r: ReplaceTableStatement): Map[String, String] = {
convertTableProperties(r.properties, r.options, r.serde, r.location, r.comment, r.provider)
}
def convertTableProperties(r: ReplaceTableAsSelectStatement): Map[String, String] = {
convertTableProperties(r.properties, r.options, r.serde, r.location, r.comment, r.provider)
}
private def convertTableProperties(
properties: Map[String, String],
options: Map[String, String],
serdeInfo: Option[SerdeInfo],
location: Option[String],
comment: Option[String],
provider: Option[String],
external: Boolean = false): Map[String, String] = {
properties ++
options ++ // to make the transition to the "option." prefix easier, add both
options.map { case (key, value) => TableCatalog.OPTION_PREFIX + key -> value } ++
convertToProperties(serdeInfo) ++
(if (external) Some(TableCatalog.PROP_EXTERNAL -> "true") else None) ++
provider.map(TableCatalog.PROP_PROVIDER -> _) ++
comment.map(TableCatalog.PROP_COMMENT -> _) ++
location.map(TableCatalog.PROP_LOCATION -> _)
}
/**
* Converts Hive Serde info to table properties. The mapped property keys are:
* - INPUTFORMAT/OUTPUTFORMAT: hive.input/output-format
* - STORED AS: hive.stored-as
* - ROW FORMAT SERDE: hive.serde
* - SERDEPROPERTIES: add "option." prefix
*/
private def convertToProperties(serdeInfo: Option[SerdeInfo]): Map[String, String] = {
serdeInfo match {
case Some(s) =>
s.formatClasses.map { f =>
Map("hive.input-format" -> f.input, "hive.output-format" -> f.output)
}.getOrElse(Map.empty) ++
s.storedAs.map("hive.stored-as" -> _) ++
s.serde.map("hive.serde" -> _) ++
s.serdeProperties.map {
case (key, value) => TableCatalog.OPTION_PREFIX + key -> value
}
case None =>
Map.empty
}
}
def withDefaultOwnership(properties: Map[String, String]): Map[String, String] = {
properties ++ Map(TableCatalog.PROP_OWNER -> Utils.getCurrentUserName())
}
def createAlterTable(
originalNameParts: Seq[String],
catalog: CatalogPlugin,
tableName: Seq[String],
changes: Seq[TableChange]): AlterTable = {
val tableCatalog = catalog.asTableCatalog
val ident = tableName.asIdentifier
val unresolved = UnresolvedV2Relation(originalNameParts, tableCatalog, ident)
AlterTable(tableCatalog, ident, unresolved, changes)
}
def getTableProviderCatalog(
provider: SupportsCatalogOptions,
catalogManager: CatalogManager,
options: CaseInsensitiveStringMap): TableCatalog = {
Option(provider.extractCatalog(options))
.map(catalogManager.catalog)
.getOrElse(catalogManager.v2SessionCatalog)
.asTableCatalog
}
def failNullType(dt: DataType): Unit = {
def containsNullType(dt: DataType): Boolean = dt match {
case ArrayType(et, _) => containsNullType(et)
case MapType(kt, vt, _) => containsNullType(kt) || containsNullType(vt)
case StructType(fields) => fields.exists(f => containsNullType(f.dataType))
case _ => dt.isInstanceOf[NullType]
}
if (containsNullType(dt)) {
throw new AnalysisException(
s"Cannot create tables with ${NullType.simpleString} type.")
}
}
def assertNoNullTypeInSchema(schema: StructType): Unit = {
schema.foreach { f =>
failNullType(f.dataType)
}
}
}
| witgo/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala | Scala | apache-2.0 | 15,063 |
package app
import net.liftweb.common.Logger
object Control {
def logAndSuppressExceptions(logger: Logger)(f: ⇒ Unit) { try { f } catch { case e: Throwable ⇒ logger.error(e) } }
def fatalOnFailure(f: ⇒ Unit)(implicit logger: Logger) {
try {
f
} catch {
case e: Throwable ⇒
logger.error("A Fatal Error has occurred. The system will be shutdown immediately.", e)
e.printStackTrace()
System.exit(1)
}
}
} | alltonp/reprobate | src/main/scala/app/Control.scala | Scala | apache-2.0 | 466 |
package play.boilerplate.api.server.dsl
import play.api.http.Writeable
import play.api.mvc.AnyContent
object Compat extends AbstractCompat {
implicit class AnyContentOps(val body: AnyContent) extends AnyVal {
def dataPart(key: String): Option[String] = {
body.asMultipartFormData.flatMap(_.dataParts.get(key)).flatMap(_.headOption)
}
def file(key: String): Option[java.io.File] = {
body.asMultipartFormData.flatMap(_.file(key)).map(_.ref.file)
}
def formValue(key: String): Option[String] = {
body.asFormUrlEncoded.flatMap(_.get(key)).flatMap(_.headOption)
}
}
implicit def contentOps[C](implicit wr: Writeable[C]): PrintableContent[C] =
PrintableContent(content => wr.transform(content).utf8String)
}
| Romastyi/sbt-play-boilerplate | api-server/play25/src/main/scala/play/boilerplate/api/server/dsl/Compat.scala | Scala | apache-2.0 | 763 |
package com.airbnb.aerosolve.training
import java.util
import com.airbnb.aerosolve.core.models.BoostedStumpsModel
import com.airbnb.aerosolve.core.models.DecisionTreeModel
import com.airbnb.aerosolve.core.Example
import com.airbnb.aerosolve.core.ModelRecord
import com.airbnb.aerosolve.core.util.Util
import com.typesafe.config.Config
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.slf4j.{Logger, LoggerFactory}
import scala.util.Random
import scala.util.Try
import scala.collection.JavaConversions._
// Types of split criteria
object SplitCriteriaTypes extends Enumeration {
val Classification, Regression, Multiclass = Value
}
// Split criteria instances
object SplitCriteria extends Enumeration {
val Gini, InformationGain, Hellinger, Variance, MulticlassHellinger, MulticlassGini = Value
def getCriteriaType(criteria : Value) : SplitCriteriaTypes.Value = {
criteria match {
case Gini => SplitCriteriaTypes.Classification
case InformationGain => SplitCriteriaTypes.Classification
case Hellinger => SplitCriteriaTypes.Classification
case Variance => SplitCriteriaTypes.Regression
case MulticlassHellinger => SplitCriteriaTypes.Multiclass
case MulticlassGini => SplitCriteriaTypes.Multiclass
}
}
def splitCriteriaFromName(name : String): SplitCriteria.Value = {
name match {
case "gini" => Gini
case "information_gain" => InformationGain
case "hellinger" => Hellinger
case "variance" => Variance
case "multiclass_hellinger" => MulticlassHellinger
case "multiclass_gini" => MulticlassGini
}
}
}
// The decision tree is meant to be a prior for the spline model / linear model
object DecisionTreeTrainer {
private final val log: Logger = LoggerFactory.getLogger("DecisionTreeTrainer")
def train(
sc : SparkContext,
input : RDD[Example],
config : Config,
key : String) : DecisionTreeModel = {
val candidateSize : Int = config.getInt(key + ".num_candidates")
val rankKey : String = config.getString(key + ".rank_key")
val rankThreshold : Double = config.getDouble(key + ".rank_threshold")
val maxDepth : Int = config.getInt(key + ".max_depth")
val minLeafCount : Int = config.getInt(key + ".min_leaf_items")
val numTries : Int = config.getInt(key + ".num_tries")
val splitCriteriaName : String = Try(config.getString(key + ".split_criteria"))
.getOrElse("gini")
val examples = LinearRankerUtils
.makePointwiseFloat(input, config, key)
.map(x => Util.flattenFeature(x.example(0)))
.filter(x => x.contains(rankKey))
.take(candidateSize)
val stumps = new util.ArrayList[ModelRecord]()
stumps.append(new ModelRecord)
buildTree(
stumps,
examples,
0,
0,
maxDepth,
rankKey,
rankThreshold,
numTries,
minLeafCount,
SplitCriteria.splitCriteriaFromName(splitCriteriaName)
)
val model = new DecisionTreeModel()
model.setStumps(stumps)
model
}
def buildTree(
stumps : util.ArrayList[ModelRecord],
examples : Array[util.Map[java.lang.String, util.Map[java.lang.String, java.lang.Double]]],
currIdx : Int,
currDepth : Int,
maxDepth : Int,
rankKey : String,
rankThreshold : Double,
numTries : Int,
minLeafCount : Int,
splitCriteria : SplitCriteria.Value) : Unit = {
if (currDepth >= maxDepth) {
stumps(currIdx) = makeLeaf(examples, rankKey, rankThreshold, splitCriteria)
return
}
val split = getBestSplit(
examples,
rankKey,
rankThreshold,
numTries,
minLeafCount,
splitCriteria
)
if (split.isEmpty) {
stumps(currIdx) = makeLeaf(examples, rankKey, rankThreshold, splitCriteria)
return
}
// This is a split node.
stumps(currIdx) = split.get
val left = stumps.size
stumps.append(new ModelRecord())
val right = stumps.size
stumps.append(new ModelRecord())
stumps(currIdx).setLeftChild(left)
stumps(currIdx).setRightChild(right)
val (rightExamples, leftExamples) = examples.partition(
x => BoostedStumpsModel.getStumpResponse(stumps(currIdx), x))
buildTree(
stumps,
leftExamples,
left,
currDepth + 1,
maxDepth,
rankKey,
rankThreshold,
numTries,
minLeafCount,
splitCriteria
)
buildTree(
stumps,
rightExamples,
right,
currDepth + 1,
maxDepth,
rankKey,
rankThreshold,
numTries,
minLeafCount,
splitCriteria
)
}
def makeLeaf(
examples : Array[util.Map[java.lang.String, util.Map[java.lang.String, java.lang.Double]]],
rankKey : String,
rankThreshold : Double,
splitCriteria : SplitCriteria.Value) = {
val rec = new ModelRecord()
SplitCriteria.getCriteriaType(splitCriteria) match {
case SplitCriteriaTypes.Classification =>
var numPos = 0.0
var numNeg = 0.0
for (example <- examples) {
val label = example.get(rankKey).values().iterator().next() > rankThreshold
if (label) numPos += 1.0 else numNeg += 1.0
}
val sum = numPos + numNeg
if (sum > 0.0) {
// Convert from percentage positive to the -1 to 1 range
val frac = numPos / sum
rec.setFeatureWeight(2.0 * frac - 1.0)
} else {
rec.setFeatureWeight(0.0)
}
case SplitCriteriaTypes.Regression =>
var count : Double = 0.0
var sum : Double = 0.0
for (example <- examples) {
val labelValue = example.get(rankKey).values().iterator().next()
count += 1.0
sum += labelValue
}
// In regression case, leaf is the average of all the associated values
rec.setFeatureWeight(sum / count)
case SplitCriteriaTypes.Multiclass =>
val labelDistribution = new java.util.HashMap[java.lang.String, java.lang.Double]()
rec.setLabelDistribution(labelDistribution)
var sum = 0.0
for (example <- examples) {
for (kv <- example.get(rankKey).entrySet()) {
val key = kv.getKey
val value = kv.getValue
val count = if (labelDistribution.containsKey(key)) {
labelDistribution.get(key)
} else {
new java.lang.Double(0.0)
}
sum = sum + value
labelDistribution.put(key, count + value)
}
}
if (sum > 0.0) {
val scale = 1.0 / sum
for (kv <- labelDistribution.entrySet()) {
val key = kv.getKey
val value = kv.getValue
labelDistribution.put(key, scale * value)
}
}
}
rec
}
// Returns the best split if one exists.
def getBestSplit(
examples : Array[util.Map[java.lang.String, util.Map[java.lang.String, java.lang.Double]]],
rankKey : String,
rankThreshold : Double,
numTries : Int,
minLeafCount : Int,
splitCriteria : SplitCriteria.Value) : Option[ModelRecord] = {
if (examples.length <= minLeafCount) {
// If we're at or below the minLeafCount, then there's no point in splitting
None
} else {
var bestRecord: Option[ModelRecord] = None
var bestValue: Double = -1e10
val rnd = new Random()
for (i <- 0 until numTries) {
// Pick an example index randomly
val idx = rnd.nextInt(examples.length)
val ex = examples(idx)
val candidateOpt = getCandidateSplit(ex, rankKey, rnd)
if (candidateOpt.isDefined) {
val candidateValue = SplitCriteria.getCriteriaType(splitCriteria) match {
case SplitCriteriaTypes.Classification =>
evaluateClassificationSplit(
examples, rankKey,
rankThreshold,
minLeafCount,
splitCriteria, candidateOpt
)
case SplitCriteriaTypes.Regression =>
evaluateRegressionSplit(
examples, rankKey,
minLeafCount,
splitCriteria, candidateOpt
)
case SplitCriteriaTypes.Multiclass =>
evaluateMulticlassSplit(
examples, rankKey,
minLeafCount,
splitCriteria, candidateOpt
)
}
if (candidateValue.isDefined && candidateValue.get > bestValue) {
bestValue = candidateValue.get
bestRecord = candidateOpt
}
}
}
bestRecord
}
}
// Evaluate a classification-type split
def evaluateClassificationSplit(
examples : Array[util.Map[java.lang.String, util.Map[java.lang.String, java.lang.Double]]],
rankKey : String,
rankThreshold : Double,
minLeafCount : Int,
splitCriteria : SplitCriteria.Value,
candidateOpt : Option[ModelRecord]): Option[Double] = {
var leftPos : Double = 0.0
var rightPos : Double = 0.0
var leftNeg : Double = 0.0
var rightNeg : Double = 0.0
for (example <- examples) {
val response = BoostedStumpsModel.getStumpResponse(candidateOpt.get, example)
val label = example.get(rankKey).values().iterator().next() > rankThreshold
if (response) {
if (label) {
rightPos += 1.0
} else {
rightNeg += 1.0
}
} else {
if (label) {
leftPos += 1.0
} else {
leftNeg += 1.0
}
}
}
val rightCount = rightPos + rightNeg
val leftCount = leftPos + leftNeg
if (rightCount >= minLeafCount && leftCount >= minLeafCount) {
val p1 = rightPos / rightCount
val n1 = rightNeg / rightCount
val f1 = rightCount / (leftCount + rightCount)
val p2 = leftPos / leftCount
val n2 = leftNeg / leftCount
val f2 = leftCount / (leftCount + rightCount)
splitCriteria match {
case SplitCriteria.Gini =>
// Using negative gini since we are maximizing.
val gini = -(
f1 * (p1 * (1.0 - p1) + n1 * (1.0 - n1)) +
f2 * (n2 * (1.0 - n2) + p2 * (1.0 - p2))
)
Some(gini)
case SplitCriteria.InformationGain =>
var ig = 0.0
if (p1 > 0) {
ig += f1 * p1 * scala.math.log(p1)
}
if (n1 > 0) {
ig += f1 * n1 * scala.math.log(n1)
}
if (p2 > 0) {
ig += f2 * p2 * scala.math.log(p2)
}
if (n2 > 0) {
ig += f2 * n2 * scala.math.log(n2)
}
Some(ig)
case SplitCriteria.Hellinger =>
val scale = 1.0 / (leftCount * rightCount)
// http://en.wikipedia.org/wiki/Bhattacharyya_distance
val bhattacharyya =
math.sqrt(leftPos * rightPos * scale) + math.sqrt(leftNeg * rightNeg * scale)
// http://en.wikipedia.org/wiki/Hellinger_distance
val hellinger = math.sqrt(1.0 - bhattacharyya)
Some(hellinger)
}
} else {
None
}
}
def giniImpurity(dist : scala.collection.mutable.Map[String, Double]) : Double = {
val sum = dist.values.sum
val scale = 1.0 / (sum * sum)
var impurity : Double = 0.0
for (kv1 <- dist) {
for (kv2 <- dist) {
if (kv1._1 != kv2._1) {
impurity += kv1._2 * kv2._2
}
}
}
impurity * scale
}
// Evaluate a multiclass classification-type split
def evaluateMulticlassSplit(
examples : Array[util.Map[java.lang.String, util.Map[java.lang.String, java.lang.Double]]],
rankKey : String,
minLeafCount : Int,
splitCriteria : SplitCriteria.Value,
candidateOpt : Option[ModelRecord]): Option[Double] = {
val leftDist = scala.collection.mutable.HashMap[String, Double]()
val rightDist = scala.collection.mutable.HashMap[String, Double]()
var leftCount = 0
var rightCount = 0
for (example <- examples) {
val response = BoostedStumpsModel.getStumpResponse(candidateOpt.get, example)
for (kv <- example.get(rankKey).entrySet()) {
val key = kv.getKey
val value = kv.getValue
if (response) {
val v = rightDist.getOrElse(key, 0.0)
rightDist.put(key, value + v)
rightCount = rightCount + 1
} else {
val v = leftDist.getOrElse(key, 0.0)
leftDist.put(key, value + v)
leftCount = leftCount + 1
}
}
}
if (rightCount >= minLeafCount && leftCount >= minLeafCount) {
splitCriteria match {
case SplitCriteria.MulticlassHellinger =>
val total = rightDist.values.sum * leftDist.values.sum
val scale = 1.0 / total
// http://en.wikipedia.org/wiki/Bhattacharyya_distance
val bhattacharyya = rightDist
.map(x => math.sqrt(scale * x._2 * leftDist.getOrElse(x._1, 0.0)))
.sum
// http://en.wikipedia.org/wiki/Hellinger_distance
val hellinger = math.sqrt(1.0 - bhattacharyya)
Some(hellinger)
case SplitCriteria.MulticlassGini =>
val impurity = giniImpurity(leftDist) + giniImpurity(rightDist)
Some(-impurity)
}
} else {
None
}
}
// Evaluate a regression-type split
// See http://www.stat.cmu.edu/~cshalizi/350-2006/lecture-10.pdf for overview of algorithm used
def evaluateRegressionSplit(
examples : Array[util.Map[java.lang.String, util.Map[java.lang.String, java.lang.Double]]],
rankKey : String,
minLeafCount : Int,
splitCriteria : SplitCriteria.Value,
candidateOpt : Option[ModelRecord]): Option[Double] = {
var rightCount : Double = 0.0
var rightMean : Double = 0.0
var rightSumSq : Double = 0.0
var leftCount : Double = 0.0
var leftMean : Double = 0.0
var leftSumSq : Double = 0.0
for (example <- examples) {
val response = BoostedStumpsModel.getStumpResponse(candidateOpt.get, example)
val labelValue = example.get(rankKey).values().iterator().next()
// Using Welford's Method for computing mean and sum-squared errors in numerically stable way;
// more details can be found in
// http://jonisalonen.com/2013/deriving-welfords-method-for-computing-variance
//
// See unit test for verification that it is consistent with standard, two-pass approach
if (response) {
rightCount += 1
val delta = labelValue - rightMean
rightMean += delta / rightCount
rightSumSq += delta * (labelValue - rightMean)
} else {
leftCount += 1
val delta = labelValue - leftMean
leftMean += delta / leftCount
leftSumSq += delta * (labelValue - leftMean)
}
}
if (rightCount >= minLeafCount && leftCount >= minLeafCount) {
splitCriteria match {
case SplitCriteria.Variance =>
Some(-(leftSumSq + rightSumSq))
}
} else {
None
}
}
// Returns a candidate split sampled from an example.
def getCandidateSplit(
ex : util.Map[java.lang.String, util.Map[java.lang.String, java.lang.Double]],
rankKey : String,
rnd : Random) : Option[ModelRecord] = {
// Flatten the features and pick one randomly.
val features = collection.mutable.ArrayBuffer[(String, String, Double)]()
for (family <- ex) {
if (!family._1.equals(rankKey)) {
for (feature <- family._2) {
features.append((family._1, feature._1, feature._2))
}
}
}
if (features.isEmpty) {
None
} else {
val idx = rnd.nextInt(features.size)
val rec = new ModelRecord()
rec.setFeatureFamily(features(idx)._1)
rec.setFeatureName(features(idx)._2)
rec.setThreshold(features(idx)._3)
Some(rec)
}
}
def trainAndSaveToFile(
sc : SparkContext,
input : RDD[Example],
config : Config,
key : String) = {
val model = train(sc, input, config, key)
TrainingUtils.saveModel(model, config, key + ".model_output")
}
}
| ralic/aerosolve | training/src/main/scala/com/airbnb/aerosolve/training/DecisionTreeTrainer.scala | Scala | apache-2.0 | 16,237 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan
import org.apache.flink.table.typeutils.TypeInfoCheckUtils
/**
* Generic base class for trees that can be transformed and traversed.
*/
abstract class TreeNode[A <: TreeNode[A]] extends Product { self: A =>
/**
* List of child nodes that should be considered when doing transformations. Other values
* in the Product will not be transformed, only handed through.
*/
private[flink] def children: Seq[A]
/**
* Tests for equality by first testing for reference equality.
*/
private[flink] def fastEquals(other: TreeNode[_]): Boolean = this.eq(other) || this == other
/**
* Do tree transformation in post order.
*/
private[flink] def postOrderTransform(rule: PartialFunction[A, A]): A = {
def childrenTransform(rule: PartialFunction[A, A]): A = {
var changed = false
val newArgs = productIterator.map {
case arg: TreeNode[_] if children.contains(arg) =>
val newChild = arg.asInstanceOf[A].postOrderTransform(rule)
if (!(newChild fastEquals arg)) {
changed = true
newChild
} else {
arg
}
case args: Traversable[_] => args.map {
case arg: TreeNode[_] if children.contains(arg) =>
val newChild = arg.asInstanceOf[A].postOrderTransform(rule)
if (!(newChild fastEquals arg)) {
changed = true
newChild
} else {
arg
}
case other => other
}
case nonChild: AnyRef => nonChild
case null => null
}.toArray
if (changed) makeCopy(newArgs) else this
}
val afterChildren = childrenTransform(rule)
if (afterChildren fastEquals this) {
rule.applyOrElse(this, identity[A])
} else {
rule.applyOrElse(afterChildren, identity[A])
}
}
/**
* Runs the given function first on the node and then recursively on all its children.
*/
private[flink] def preOrderVisit(f: A => Unit): Unit = {
f(this)
children.foreach(_.preOrderVisit(f))
}
/**
* Creates a new copy of this expression with new children. This is used during transformation
* if children change.
*/
private[flink] def makeCopy(newArgs: Array[AnyRef]): A = {
val ctors = getClass.getConstructors.filter(_.getParameterTypes.length > 0)
if (ctors.isEmpty) {
throw new RuntimeException(s"No valid constructor for ${getClass.getSimpleName}")
}
val defaultCtor = ctors.find { ctor =>
if (ctor.getParameterTypes.length != newArgs.length) {
false
} else if (newArgs.contains(null)) {
false
} else {
val argsClasses: Array[Class[_]] = newArgs.map(_.getClass)
TypeInfoCheckUtils.isAssignable(argsClasses, ctor.getParameterTypes)
}
}.getOrElse(ctors.maxBy(_.getParameterTypes.length))
try {
defaultCtor.newInstance(newArgs: _*).asInstanceOf[A]
} catch {
case e: Throwable =>
throw new RuntimeException(
s"Fail to copy tree node ${getClass.getName}.", e)
}
}
}
| shaoxuan-wang/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/plan/TreeNode.scala | Scala | apache-2.0 | 3,917 |
/*
* Copyright (c) 2017 Xavier Defago (Tokyo Institute of Technology)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ocelot
import java.io.PrintStream
import ocelot.kernel.{ Dispatcher, Protocol, Scheduler }
import ocelot.util.SequentialPrintStream
/**
* Created by defago on 2017/03/28.
*/
class OcelotProcessImpl(
val system: OcelotSystem,
val pid: PID,
out0: PrintStream = Console.out,
val dispatcher: Dispatcher = Dispatcher.withMessage(Console.err)
) extends OcelotProcess
{
private var _program = Option.empty[ActiveProtocol]
private var _protocols = Set.empty[ReactiveProtocol]
private var _locked = false
val out: PrintStream = new SequentialPrintStream(out0)
def scheduler: Scheduler = system.scheduler
def program: Option[ActiveProtocol] = _program
private lazy val _networkSender = system.network.senderFor(pid)
def network = _networkSender
def protocols = _protocols.toSet[Protocol]
final def start () =
{
_protocols.foreach(_.preStart())
_program.foreach(_.preStart())
_locked = true
_protocols.foreach(_.start() )
_program.foreach( _.start() )
}
def isRunning = _program.fold(false)(_.isRunning)
def hasPendingMessage = _program.fold(false)(_.hasPendingMessage)
protected[ocelot] def register (proto: ReactiveProtocol) =
{
if ( _locked ) throw new IllegalStateException(s"Attempt to add protocol ${proto.id} to process $name after it was locked")
_protocols += proto
}
protected[ocelot] def register (active: ActiveProtocol) =
{
if ( _locked ) throw new IllegalStateException(s"Attempt to add program ${active.id} to process $name after it was locked")
_program.foreach { other =>
throw new IllegalStateException(s"Attempt to add program ${active.id} to process $name after ${other.id}")
}
_program = Some(active)
}
}
| xdefago/ocelot | src/main/scala/ocelot/OcelotProcessImpl.scala | Scala | apache-2.0 | 2,420 |
/*
* Copyright 2020 Precog Data
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.impl.datasources
import quasar.impl.storage.mapdb.MapDbPrefixStore
import scala.Predef.classOf
import cats.Eq
import cats.effect.{Blocker, IO, Resource}
import cats.implicits._
import org.mapdb.{DBMaker, Serializer}
import java.lang.Integer
import scala.concurrent.ExecutionContext.Implicits.global
import shapeless._
import PrefixByteStoresSpec._
final class PrefixByteStoresSpec extends ByteStoresSpec[IO, Integer] {
val byteStores =
Resource.make(IO(DBMaker.memoryDB().make()))(db => IO(db.close())) evalMap { db =>
val prefixStore =
MapDbPrefixStore[IO](
"prefix-bytestores-spec",
db,
Serializer.INTEGER :: Serializer.STRING :: HNil,
Serializer.BYTE_ARRAY,
Blocker.liftExecutionContext(global))
prefixStore.map(PrefixByteStores(_))
}
val k1 = new Integer(3)
val k2 = new Integer(7)
}
object PrefixByteStoresSpec {
implicit val jIntegerEq: Eq[Integer] =
Eq.by(_.intValue)
}
| djspiewak/quasar | impl/src/test/scala/quasar/impl/datasources/PrefixByteStoresSpec.scala | Scala | apache-2.0 | 1,590 |
package core.material
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.all._
import scala.scalajs.js
/**
* Created by Janos on 12/9/2015.
*/
object MaterialComponent {
val rc = ReactComponentB[(ReactTag, Boolean)]("MaterialComponent")
.renderP(($, p) => {
p._1
})
.componentDidMount(afterMount)
.componentDidUpdate(afterUpdate)
.build
def apply(props: (ReactTag, Boolean)): ReactComponentU[(ReactTag, Boolean), Unit, Unit, TopNode] = {
rc(props)
}
private def upgrade(scope: CompScope.DuringCallbackM[(ReactTag, Boolean), Unit, Unit, TopNode]): Callback = {
js.Dynamic.global.window.componentHandler.upgradeElement(scope.getDOMNode())
if (scope.props._2) {
val children = scope.getDOMNode().children
(0 until children.length).foreach(i => {
js.Dynamic.global.window.componentHandler.upgradeElement(children(i))
}
)
}
Callback.empty
}
def afterMount(scope: CompScope.DuringCallbackM[(ReactTag, Boolean), Unit, Unit, TopNode]): Callback = {
upgrade(scope)
}
def afterUpdate(scope: ComponentDidUpdate[(ReactTag, Boolean), Unit, Unit, TopNode]): Callback = {
upgrade(scope.$)
}
}
| b0c1/scalajs-play-core-react | client/src/main/scala/core/material/MaterialComponent.scala | Scala | apache-2.0 | 1,210 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala.actors
package scheduler
import java.lang.ref.{Reference, WeakReference, ReferenceQueue}
import scala.collection.mutable.HashSet
/**
* ActorGC keeps track of the number of live actors being managed by a
* a scheduler so that it can shutdown when all of the actors it manages have
* either been explicitly terminated or garbage collected.
*
* When an actor is started, it is registered with the ActorGC via the
* <code>newActor</code> method, and when an actor is knowingly terminated
* (e.g. act method finishes, exit explicitly called, an exception is thrown),
* the ActorGC is informed via the <code>terminated</code> method.
*/
trait ActorGC extends TerminationMonitor {
self: IScheduler =>
/** Actors are added to refQ in newActor. */
private val refQ = new ReferenceQueue[TrackedReactor]
/**
* This is a set of references to all the actors registered with
* this ActorGC. It is maintained so that the WeakReferences will not be GC'd
* before the actors to which they point.
*/
private val refSet = new HashSet[Reference[t] forSome { type t <: TrackedReactor }]
/** newActor is invoked whenever a new actor is started. */
override def newActor(a: TrackedReactor) = synchronized {
// registers a reference to the actor with the ReferenceQueue
val wr = new WeakReference[TrackedReactor](a, refQ)
refSet += wr
activeActors += 1
}
/** Checks for actors that have become garbage. */
protected override def gc() = synchronized {
// check for unreachable actors
def drainRefQ() {
val wr = refQ.poll
if (wr != null) {
activeActors -= 1
refSet -= wr
// continue draining
drainRefQ()
}
}
drainRefQ()
}
/** Prints some status information on currently managed actors. */
protected def status() {
println(this+": size of refSet: "+refSet.size)
}
/** Checks whether all actors have terminated. */
override private[actors] def allActorsTerminated: Boolean = synchronized {
activeActors <= 0
}
override def onTerminate(a: TrackedReactor)(f: => Unit): Unit = synchronized {
terminationHandlers += (a -> (() => f))
}
override def terminated(a: TrackedReactor) = {
super.terminated(a)
synchronized {
// find the weak reference that points to the terminated actor, if any
refSet.find((ref: Reference[t] forSome { type t <: TrackedReactor }) => ref.get() == a) match {
case Some(r) =>
// invoking clear will not cause r to be enqueued
r.clear()
refSet -= r.asInstanceOf[Reference[t] forSome { type t <: TrackedReactor }]
case None =>
// do nothing
}
}
}
private[actors] def getPendingCount = synchronized {
activeActors
}
private[actors] def setPendingCount(cnt: Int) = synchronized {
activeActors = cnt
}
}
| cran/rkafkajars | java/scala/actors/scheduler/ActorGC.scala | Scala | apache-2.0 | 3,406 |
package pl.touk.nussknacker.openapi.http.backend
import org.asynchttpclient.{AsyncHttpClient, DefaultAsyncHttpClient}
import pl.touk.nussknacker.engine.api.MetaData
import pl.touk.nussknacker.engine.api.runtimecontext.EngineRuntimeContext
import pl.touk.nussknacker.engine.util.sharedservice.{SharedService, SharedServiceHolder}
import sttp.client.SttpBackend
import sttp.client.asynchttpclient.future.AsyncHttpClientFutureBackend
import scala.concurrent.{ExecutionContext, Future}
class SharedHttpClientBackendProvider(httpClientConfig: HttpClientConfig) extends HttpBackendProvider {
private var httpClient: SharedHttpClient = _
override def open(context: EngineRuntimeContext): Unit = {
httpClient = SharedHttpClientBackendProvider.retrieveService(httpClientConfig)(context.jobData.metaData)
}
override def httpBackendForEc(implicit ec: ExecutionContext): SttpBackend[Future, Nothing, Nothing] =
AsyncHttpClientFutureBackend.usingClient(httpClient.httpClient)
override def close(): Unit = Option(httpClient).foreach(_.close())
}
object SharedHttpClientBackendProvider extends SharedServiceHolder[HttpClientConfig, SharedHttpClient] {
override protected def createService(config: HttpClientConfig, metaData: MetaData): SharedHttpClient = {
val httpClientConfig = config.toAsyncHttpClientConfig(Option(metaData.id))
new SharedHttpClient(new DefaultAsyncHttpClient(httpClientConfig.build()), config)
}
}
class SharedHttpClient(val httpClient: AsyncHttpClient, config: HttpClientConfig) extends SharedService[HttpClientConfig] {
override def creationData: HttpClientConfig = config
override protected def sharedServiceHolder: SharedHttpClientBackendProvider.type = SharedHttpClientBackendProvider
override def internalClose(): Unit = httpClient.close()
}
| TouK/nussknacker | components/openapi/src/main/scala/pl/touk/nussknacker/openapi/http/backend/SharedHttpClient.scala | Scala | apache-2.0 | 1,806 |
package ru.izebit.dao
import java.util
import java.util.function.Consumer
import scala.collection.JavaConversions.{asScalaBuffer, asScalaSet}
import org.bson.Document
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.data.mongodb.core.MongoTemplate
import org.springframework.data.mongodb.core.query.{Criteria, Query}
import org.springframework.stereotype.{Component, Repository}
import ru.izebit.model.Account
import scala.collection.mutable
@Component
class AccountDao {
private val userTableName = "accounts"
@Autowired
private var mongoTemplate: MongoTemplate = _
def dropAll(): Unit = {
mongoTemplate.getCollectionNames.foreach(mongoTemplate.dropCollection)
}
def getAllFrom(tableName: String): List[String] = {
mongoTemplate.findAll(classOf[Document], tableName).map(document => document.getString("_id")).toList
}
def getAccount(id: String): Account = {
val document = mongoTemplate
.findById(id, classOf[Document], userTableName)
if (document == null)
null
else
document.get("account", classOf[Account])
}
def insertAccount(account: Account) =
mongoTemplate.save(
new Document()
.append("_id", account.id)
.append("account", account), userTableName)
def removeFromSearchTable(tableName: String, id: String) =
mongoTemplate.remove(new Query().addCriteria(Criteria.where("_id").is(id)), tableName)
def insertToSearchTable(tableName: String, id: String) =
mongoTemplate.insert(new Document("_id", id), tableName)
def getFromSearchTable(tableName: String, offset: Int, count: Int): (mutable.Set[String], Int) = {
val tableSize = mongoTemplate.count(new Query(), classOf[Document], tableName).asInstanceOf[Int]
if (tableSize < count)
(getCandidates(tableName, 0, tableSize), tableSize)
else if (count + offset <= tableSize)
(getCandidates(tableName, offset, count), offset + count)
else {
val size = offset + count - tableSize
(getCandidates(tableName, offset, tableSize) ++= getCandidates(tableName, 0, size), size)
}
}
private def getCandidates(tableName: String, offset: Int, count: Int): mutable.Set[String] = {
val query = new Query().skip(offset).limit(count)
val result = new util.HashSet[Document](mongoTemplate.find(query, classOf[Document], tableName))
result.map(document => document.getString("_id"))
}
}
| android-group/night-meet | src/main/scala/ru/izebit/dao/AccountDao.scala | Scala | apache-2.0 | 2,439 |
package cc.factorie.util.namejuggler
import java.util.regex.Pattern
/**
* Shamelessly yoinked from edu.umass.cs.iesl.scalacommons
*/
object StringUtils {
implicit def toOptionNonempty(s: String): Option[NonemptyString] = if (s.trim.isEmpty) None else Some(new NonemptyString(s.trim))
// don't make this implicit; that would mask implicit conversions in Predef, providing String.size, String.nonEmpty, etc.
//def toSingletonSetNonempty(s: String): Set[NonemptyString] = toOptionNonempty(s).toSet
implicit def toSetNonempty[T <: Set[String]](ss: T): Set[NonemptyString] = ss.flatMap(toOptionNonempty)
implicit def toSeqNonempty[T <: Seq[String]](ss: T): Seq[NonemptyString] = ss.flatMap(toOptionNonempty)
//** need to understand CanBuildFrom etc. to make this work right
//implicit def toTraversableNonempty[T <: Traversable[String]](ss: T): T[NonemptyString] = ss.flatMap(toOptionNonempty)
//def toTraversableNonempty2[B, That, Repr](ss: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {}
// def flatMap[B, That](f: A => TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {}
// this interferes with toOptionNonempty
//implicit def stringToOptionInt(s: String): Option[Int] = if (s.trim.isEmpty) None else Some(s.toInt)
implicit def enrichString(s: String): RichString = new RichString(s)
//** just use NonemptyString.unapply
implicit def unwrapNonemptyString(n: NonemptyString): String = n.s
// this is bad because they confound map, size, etc. operations from String and Option
//implicit def unwrapNonemptyString(n: Option[NonemptyString]): String = n.map(unwrapNonemptyString).getOrElse("")
// but this should be OK, because it requires an explicit "unwrap" or whatever
implicit def enrichNonemptyString(n: Option[NonemptyString]): OptionNonemptyString = new OptionNonemptyString(n)
//implicit def wrapNonemptyString(s: String) = NonemptyString(s)
}
class OptionNonemptyString(val o: Option[NonemptyString]) {
def unwrap: String = o.map(_.s).getOrElse("")
}
object RichString {
final private val deAccentPattern = Pattern.compile("\\\\p{InCombiningDiacriticalMarks}+")
final private val trimPunctuationRE = "^\\\\p{Punct}*(.*?)\\\\p{Punct}*$".r
}
class RichString(val s: String) {
import java.text.Normalizer
import RichString._
def maskNewlines: String = s.replaceAll("[\\\\n\\\\r]+", " ")
def maskNewlinesAndTabs: String = s.replaceAll("[\\\\n\\\\r\\\\t]+", " ")
def stripWhitespace: String = s.replaceAll("\\\\s", "")
def maskPunctuation: String = s.replaceAll("\\\\p{Punct}+", " ")
def stripPunctuation: String = s.replaceAll("\\\\p{Punct}+", "")
def trimPunctuation: String = {
val trimPunctuationRE(result) = s
result
}
def maskAllButWord: String = s.replaceAll("[^\\\\w\\\\s]+", " ")
def stripVowels: String = s.replaceAll("[AEIOUaeiou]", "")
def collapseWhitespace: String = s.replaceAll("\\\\s+", " ")
def opt: Option[NonemptyString] = StringUtils.toOptionNonempty(s)
def n: NonemptyString = new NonemptyString(s.trim)
def just: Set[NonemptyString] = opt.toSet
def limit(len: Int): String = s.substring(0, math.min(s.length, len))
def firstLine = {
// surely there is a more idiomatic solution?
val i: Int = s.indexOf("\\n")
i match {
case -1 => s
case _ => limit(i)
}
}
def limitAtWhitespace(len: Int, suffixIfLimited: String) = {
val l = limit(len + 1) // allow for a space after the last retained word
if (l.length < s.length) {
val i = l.lastIndexOf(" ")
if (i >= 0) {
val r = l.substring(0, i)
(r + suffixIfLimited)
}
else l + suffixIfLimited
} else s
}
//http://stackoverflow.com/questions/1008802/converting-symbols-accent-letters-to-english-alphabet
// see also icu4j Transliterator-- better, but a 7 MB jar, yikes.
// Note this does not catch all interesting Unicode characters, e.g. Norwegian O-slash. http://stackoverflow.com/questions/8043935/normalizing-unaccenting-text-in-java
lazy val deAccent: String = {
val nfdNormalizedString = Normalizer.normalize(s, Normalizer.Form.NFD)
val result = deAccentPattern.matcher(nfdNormalizedString).replaceAll("")
result
}
def containsLowerCase: Boolean = deAccent.find(_.isLower).isDefined
/*{
val lc = """[a-z]""".r
val r = lc.findFirstIn(deAccent)
r.nonEmpty
}*/
def containsUpperCase: Boolean = deAccent.find(_.isUpper).isDefined
/*{
val lc = """[A-Z]""".r
val r = lc.findFirstIn(deAccent)
r.nonEmpty
}*/
def isAllUpperCase: Boolean = containsUpperCase && !containsLowerCase
def isAllLowerCase: Boolean = containsLowerCase && !containsUpperCase
def isMixedCase: Boolean = containsLowerCase && containsUpperCase
}
case class NonemptyString(s: String) extends Ordered[NonemptyString] {
require(s.nonEmpty, "Expected non-empty String")
override def toString = s
override def equals(other: Any): Boolean = other match {
case that: NonemptyString => this.s == that.s
case _ => false
}
override def hashCode: Int = s.hashCode
//def +(that:NonemptyString) = new NonemptyString(s + that.s)
def compare(that: NonemptyString) = s.compare(that.s)
}
| hlin117/factorie | src/main/scala/cc/factorie/util/namejuggler/StringUtils.scala | Scala | apache-2.0 | 5,211 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.javalib.util
import java.{util => ju, lang => jl}
import org.scalajs.jasminetest.JasmineTest
import org.scalajs.testsuite.utils.ExpectExceptions
import scala.collection.JavaConversions._
trait CollectionTest extends JasmineTest with ExpectExceptions {
def testCollectionApi(factory: CollectionFactory): Unit = {
it("should store strings") {
val coll = factory.empty[String]
expect(coll.size()).toEqual(0)
coll.add("one")
expect(coll.size()).toEqual(1)
coll.clear()
expect(coll.size()).toEqual(0)
expect(coll.addAll(Seq.empty[String])).toBeFalsy
expect(coll.size()).toEqual(0)
expect(coll.addAll(Seq("one"))).toBeTruthy
expect(coll.size()).toEqual(1)
coll.clear()
expect(coll.addAll(Seq("one", "two", "one"))).toBeTruthy
expect(coll.size() >= 1).toBeTruthy
}
it("should store integers") {
val coll = factory.empty[Int]
expect(coll.size()).toEqual(0)
coll.add(1)
expect(coll.size()).toEqual(1)
coll.clear()
expect(coll.size()).toEqual(0)
expect(coll.addAll(Seq.empty[Int])).toBeFalsy
expect(coll.size()).toEqual(0)
expect(coll.addAll(Seq(1))).toBeTruthy
expect(coll.size()).toEqual(1)
coll.clear()
expect(coll.addAll(Seq(1, 2, 1))).toBeTruthy
expect(coll.size() >= 1).toBeTruthy
}
it("should store doubles") {
val coll = factory.empty[Double]
expect(coll.size()).toEqual(0)
coll.add(1.234)
expect(coll.size()).toEqual(1)
coll.clear()
expect(coll.size()).toEqual(0)
expect(coll.addAll(Seq.empty[Double])).toBeFalsy
expect(coll.size()).toEqual(0)
expect(coll.addAll(Seq(1.234))).toBeTruthy
expect(coll.size()).toEqual(1)
coll.clear()
expect(coll.addAll(Seq(1.234, 2.345, 1.234))).toBeTruthy
expect(coll.size() >= 1).toBeTruthy
coll.clear()
coll.add(+0.0)
expect(coll.contains(+0.0)).toBeTruthy
expect(coll.contains(-0.0)).toBeFalsy
coll.clear()
coll.add(-0.0)
expect(coll.contains(+0.0)).toBeFalsy
expect(coll.contains(-0.0)).toBeTruthy
coll.clear()
coll.add(Double.NaN)
expect(coll.size()).toEqual(1)
expect(coll.contains(Double.NaN)).toBeTruthy
}
it("should store custom objects") {
case class TestObj(num: Int) extends jl.Comparable[TestObj] {
def compareTo(o: TestObj): Int =
o.num.compareTo(num)
}
val coll = factory.empty[TestObj]
coll.add(TestObj(100))
expect(coll.size()).toEqual(1)
expect(coll.contains(TestObj(100))).toBeTruthy
expect(coll.contains(TestObj(200))).toBeFalsy
}
it("should remove stored elements") {
val coll = factory.empty[String]
coll.add("one")
coll.add("two")
coll.add("three")
coll.add("two")
val initialSize = coll.size()
expect(coll.remove("four")).toBeFalsy
expect(coll.size()).toEqual(initialSize)
expect(coll.remove("two")).toBeTruthy
expect(coll.size()).toEqual(initialSize - 1)
expect(coll.remove("one")).toBeTruthy
expect(coll.size()).toEqual(initialSize - 2)
}
it("should remove stored elements on double corner cases") {
val coll = factory.empty[Double]
coll.add(1.234)
coll.add(2.345)
coll.add(Double.NaN)
coll.add(+0.0)
coll.add(-0.0)
// coll == ArrayCollection(1.234, 2.345, NaN, +0.0, -0.0)
expect(coll.remove(Double.NaN)).toBeTruthy
// coll == ArrayCollection(1.234, 2.345, +0.0, -0.0)
expect(coll.size()).toEqual(4)
expect(coll.remove(2.345)).toBeTruthy
// coll == ArrayCollection(1.234, +0.0, -0.0)
expect(coll.size()).toEqual(3)
expect(coll.remove(1.234)).toBeTruthy
// coll == ArrayCollection(+0.0, -0.0)
expect(coll.size()).toEqual(2)
expect(coll.remove(-0.0)).toBeTruthy
// coll == ArrayCollection(NaN, +0.0)
expect(coll.size()).toEqual(1)
coll.clear()
expect(coll.isEmpty).toBeTruthy
}
it("should be cleared with one operation") {
val coll = factory.empty[String]
coll.add("one")
coll.add("two")
expect(coll.size).toEqual(2)
coll.clear()
expect(coll.size).toEqual(0)
}
it("should check contained presence") {
val coll = factory.empty[String]
coll.add("one")
expect(coll.contains("one")).toBeTruthy
expect(coll.contains("two")).toBeFalsy
if (factory.allowsNullElementQuery)
expect(coll.contains(null)).toBeFalsy
else
expectThrows[Exception](coll.contains(null))
}
it("should check contained presence for double corner cases") {
val coll = factory.empty[Double]
coll.add(-0.0)
expect(coll.contains(-0.0)).toBeTruthy
expect(coll.contains(+0.0)).toBeFalsy
coll.clear()
coll.add(+0.0)
expect(coll.contains(-0.0)).toBeFalsy
expect(coll.contains(+0.0)).toBeTruthy
}
it("should give proper iterator over elements") {
val coll = factory.empty[String]
coll.add("one")
coll.add("two")
coll.add("three")
coll.add("three")
coll.add("three")
expect(Set("one", "two", "three") == coll.iterator().toSet).toBeTruthy
}
}
}
object CollectionFactory {
def allFactories: Iterator[CollectionFactory] =
ListFactory.allFactories ++ SetFactory.allFactories ++ DequeFactory.allFactories
}
trait CollectionFactory {
def implementationName: String
def empty[E]: ju.Collection[E]
def allowsMutationThroughIterator: Boolean = true
def allowsNullElementQuery: Boolean = true
}
| jasonchaffee/scala-js | test-suite/js/src/test/scala/org/scalajs/testsuite/javalib/util/CollectionTest.scala | Scala | bsd-3-clause | 6,234 |
/* Copyright 2017-19, Emmanouil Antonios Platanios. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.platanios.tensorflow
import org.platanios.tensorflow.api.core.Shape
import org.platanios.tensorflow.api.core.types.DataType
import org.platanios.tensorflow.api.implicits.helpers._
import org.platanios.tensorflow.api.ops.Output
import org.platanios.tensorflow.api.ops.rnn.cell.LSTMState
/**
* @author Emmanouil Antonios Platanios
*/
package object examples {
// Implicit helpers for Scala 2.11.
val evOutputStructureFloatLong : OutputStructure[(Output[Float], Output[Long])] = OutputStructure[(Output[Float], Output[Long])]
val evOutputStructureIntInt : OutputStructure[(Output[Int], Output[Int])] = OutputStructure[(Output[Int], Output[Int])]
val evOutputStructureLSTMStateFloat: OutputStructure[LSTMState[Float]] = OutputStructure[LSTMState[Float]]
val evOutputToDataTypeFloatLong : OutputToDataType.Aux[(Output[Float], Output[Long]), (DataType[Float], DataType[Long])] = OutputToDataType[(Output[Float], Output[Long])]
val evOutputToDataTypeIntInt : OutputToDataType.Aux[(Output[Int], Output[Int]), (DataType[Int], DataType[Int])] = OutputToDataType[(Output[Int], Output[Int])]
val evOutputToDataTypeLSTMStateFloat: OutputToDataType.Aux[LSTMState[Float], (DataType[Float], DataType[Float])] = OutputToDataType[LSTMState[Float]]
val evOutputToShapeFloatLong : OutputToShape.Aux[(Output[Float], Output[Long]), (Shape, Shape)] = OutputToShape[(Output[Float], Output[Long])]
val evOutputToShapeIntInt : OutputToShape.Aux[(Output[Int], Output[Int]), (Shape, Shape)] = OutputToShape[(Output[Int], Output[Int])]
val evOutputToShapeLSTMStateFloat: OutputToShape.Aux[LSTMState[Float], (Shape, Shape)] = OutputToShape[LSTMState[Float]]
val evZeroLSTMStateFloat: Zero.Aux[LSTMState[Float], (Shape, Shape)] = Zero[LSTMState[Float]]
}
| eaplatanios/tensorflow_scala | modules/examples/src/main/scala/org/platanios/tensorflow/examples/package.scala | Scala | apache-2.0 | 2,486 |
extension (x: Int): // error
def foo = x
| dotty-staging/dotty | tests/neg/extension-colon.scala | Scala | apache-2.0 | 43 |
package net.shift
package engine
import org.scalatest._
import net.shift.engine.http.MultipartParser
import net.shift.engine.http.BinReader
import net.shift.engine.http.TextPart
import net.shift.engine.http.BinaryPart
import net.shift.io.LocalFileSystem
import net.shift.common.Path
import net.shift.io.IO
import net.shift.io.LocalFileSystem
class HttpTest extends FlatSpec with Matchers {
def test1 = {
val bin = Array[Byte](1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)
val body = "------WebKitFormBoundaryePkpFF7tjBAqx29L\\r\\n\\r\\n------WebKitForm\\r\\n------WebKitFormBoundaryePkpFF7tjBAqx29L\\r\\nContent-Disposition: form-data; name=\\"MAX_FILE_SIZE\\"\\r\\n\\r\\n<span style=\\"color:rgb(0,0,255);\\">Cea mai tare <b style=\\"color:rgb(0,0,255);\\">soseta</b> din Ardeal.</span><p></p>\\r\\n------WebKitFormBoundaryePkpFF7tjBAqx29L\\r\\nContent-Disposition: form-data; name=\\"uploadedfile\\"; filename=\\"hello.o\\"\\r\\nContent-Type: application/x-object\\r\\n\\r\\n".getBytes("UTF-8") ++
bin ++ "\\r\\n------WebKitFormBoundaryePkpFF7tjBAqx29L--".getBytes("UTF-8")
val parser = new MultipartParser("----WebKitFormBoundaryePkpFF7tjBAqx29L".getBytes("UTF-8"))
parser.multiParser(BinReader(body, 0)) map { v =>
println(v)
v.parts.map {
_ match {
case BinaryPart(h, c) =>
val value = c.map(String.valueOf(_)).mkString
println(value)
//assert(c === bin)
case TextPart(h, c) =>
println(c)
//assert(c === "100000")
case _ =>
}
}
}
}
def test2 = {
val body = "-----------------------------902056036781473372360087476\\r\\nContent-Disposition: form-data; name=\\"create_title\\"\\r\\n\\r\\nSosetuta\\r\\n-----------------------------902056036781473372360087476\\r\\nContent-Disposition: form-data; name=\\"create_price\\"\\r\\n\\r\\n44\\r\\n-----------------------------902056036781473372360087476\\r\\nContent-Disposition: form-data; name=\\"create_categories\\"\\r\\n\\r\\n5479c53fe4b04cb784a98b8f\\r\\n-----------------------------902056036781473372360087476\\r\\nContent-Disposition: form-data; name=\\"create_keywords\\"\\r\\n\\r\\ns\\r\\n-----------------------------902056036781473372360087476\\r\\nContent-Disposition: form-data; name=\\"create_description\\"\\r\\n\\r\\n<span style=\\"color:rgb(0,0,255);\\">Cea mai tare <b style=\\"color:rgb(0,0,255);\\">soseta</b> din Ardeal.</span><p></p>\\r\\n-----------------------------902056036781473372360087476--"
val parser = new MultipartParser("---------------------------902056036781473372360087476".getBytes("UTF-8"))
parser.multiParser(BinReader(body.getBytes("utf-8"), 0)) map { v =>
v.parts.map {
_ match {
case BinaryPart(h, c) =>
val value = c.map(String.valueOf(_)).mkString
println(value)
//assert(c === bin)
case TextPart(h, c) =>
println(c)
//assert(c === "100000")
case _ =>
}
}
}
}
def test3 = {
val v = for {
prod <- LocalFileSystem.reader(Path("c:/work/upload-1442129190413.bin"))
array <- IO.producerToArray(prod)
} yield {
val parser = new MultipartParser("---------------------------3357733724543".getBytes("UTF-8"))
parser.multiParser(BinReader(array)) map { v =>
v.parts.map {
_ match {
case b @ BinaryPart(h, c) =>
println("Binary : " + h + " ... " + c.length)
b
//assert(c === bin)
case b @ TextPart(h, c) =>
println("Text : " + c)
b
case _ =>
}
}
}
}
println(v)
}
}
object Run extends App {
new HttpTest().test2
}
| mariusdanciu/shift | shift-engine/src/test/scala/net/shift/engine/HttpTest.scala | Scala | apache-2.0 | 3,684 |
package net.zzorn.appearance
import org.scalaprops.Bean
import simplex3d.math.float.functions._
import simplex3d.math.float._
import net.zzorn.utils.MathUtils
import util.Random
import net.zzorn.Settings
/**
*
*/
class SurfaceSettings extends Settings {
val convexity = p('convexity, 6f).editor(makeSlider(0, 20))
val offset = p('offset, 2f).editor(makeSlider(0, 20))
val amplitude = p('amplitude, 6f).editor(makeSlider(0, 20))
val scale = p('amplitude, 1.5f).editor(makeSlider(0, 5))
val detailAmplitude = p('detailAmplitude, 1f).editor(makeSlider(0, 10))
val detailScale = p('detailScale, 3f).editor(makeSlider(0, 10))
val material = p('material, new MaterialSettings )
val radial = p('radial, false)
val convexityCutoff = p('convexityCutoff, 0.6f).editor(makeSlider(0, MathUtils.Tau * 2))
def surfaceFunction(seed: Float): (Float, Float) => Float = { (side: Float, distance: Float) =>
val sc = scale()
val dsc = detailScale()
val a = amplitude()
val detailSeed = seed+ 342.1234f
val c = convexity() * (math.cos(distance * math.Pi * convexityCutoff()).toFloat + 1f) * 0.5f // Scale cos output to range 0..1
if (radial()) {
offset() + c +
MathUtils.horizontallySeamlessNoise2(side * sc, distance * sc, seed, sc) * amplitude() +
MathUtils.horizontallySeamlessNoise2(side * dsc, distance * dsc, detailSeed, dsc) * detailAmplitude()
}
else {
offset() + c +
noise1(Vec3(seed, MathUtils.polarToCartesian(distance, side * MathUtils.Tau) * sc )) * amplitude() +
noise1(Vec3(detailSeed, MathUtils.polarToCartesian(distance, side * MathUtils.Tau) * dsc)) * detailAmplitude()
}
}
} | zzorn/ludumdare20 | src/main/scala/net/zzorn/appearance/SurfaceSettings.scala | Scala | gpl-2.0 | 1,692 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package k.grid.service
import akka.actor.Actor
import akka.actor.Actor.Receive
import akka.pattern.ask
import com.typesafe.scalalogging.LazyLogging
import k.grid.{GridJvm, Grid}
import k.grid.service.messages._
import scala.concurrent.duration._
import cmwell.util.concurrent._
import scala.util.Random
import scala.concurrent.ExecutionContext.Implicits.global
/**
* Created by michael on 2/9/16.
*/
object ServiceCoordinator {
val name = "ServiceCoordinator"
def init = Grid.createSingleton(classOf[ServiceCoordinator], ServiceCoordinator.name, None)
}
class ServiceCoordinator extends Actor with LazyLogging {
implicit val timeout = akka.util.Timeout(15.seconds)
private[this] case class UpdateServiceMapping(name : String, ogm : Option[GridJvm])
private[this] case object SendRegistrations
private[this] case object BroadcastServiceMapping
private[this] var mapping = Map.empty[String, Option[GridJvm]]
@throws[Exception](classOf[Exception])
override def preStart(): Unit = {
Grid.system.scheduler.schedule(0.seconds, 30.seconds, self, SendRegistrations)
}
override def receive: Receive = {
case UpdateServiceMapping(name, ogm) => mapping = mapping.updated(name, ogm)
case BroadcastServiceMapping =>
logger.debug(s"[BroadcastServiceMapping] current mapping: $mapping")
val jvms = Grid.jvmsAll
jvms.foreach {
jvm =>
Grid.selectActor(LocalServiceManager.name, jvm) ! ServiceMapping(mapping)
}
case SendRegistrations => {
val jvms = Grid.jvmsAll
logger.debug(s"[SendRegistrations] currentJvms: $jvms")
val futures = jvms.map {
jvm =>
(Grid.selectActor(LocalServiceManager.name, jvm) ? RegisterServices(Grid.thisMember)).mapTo[ServiceInstantiationRequest]
}
val future = successes(futures)
future.foreach {
stat =>
val candidatesSet = stat.flatMap {
cc => cc.s.map(_ -> cc.member)
}
val candidatesMap = candidatesSet.groupBy(_._1.name).map(t => t._1 -> t._2)
candidatesMap.foreach {
case (serviceName, ssg) if ssg.count(_._1.isRunning) == 0 => // No one is running the service. Will choose one candidate to run it.
logger.warn(s"[ServiceInstantiation] No one is running the service $serviceName. Will choose one candidate to run it")
val winner = ssg.find(_._1.preferredJVM.isDefined) match {
case Some((ServiceStatus(_, _, Some(preferredJvm)), _)) if(stat.exists(_.member == preferredJvm)) =>
logger info s"[ServiceInstantiation] choosing preferred JVM as target for service"
preferredJvm
case _ =>
logger info s"[ServiceInstantiation] choosing random JVM as target for service "
val vec = ssg.toVector
val candIndex = Random.nextInt(vec.size)
vec(candIndex)._2
}
logger.info(s"[ServiceInstantiation] will run $serviceName on $winner")
Grid.selectActor(LocalServiceManager.name, winner) ! RunService(serviceName)
// We will update that currently no one is running the service, we will know if it runs only in the next sample.
self ! UpdateServiceMapping(serviceName, None)
case (serviceName, ssg) if ssg.count(_._1.isRunning) > 1 => // We have more then one service for some reason. We will keep only one.
logger.warn(s"[ServiceInstantiation] We have more then one $serviceName service for some reason. We will keep only one.")
ssg.collect{case e@(ServiceStatus(_, true, Some(pJVM)), rJVM) if pJVM == rJVM => e} match {
// if one of them is running on its preffered JVM, keep it and stop the rest of them
case runningOnPref if runningOnPref.size > 0 =>
val keep = runningOnPref.head
val running = ssg.filter(_._1.isRunning)
val stop = running - keep
logger info s"[ServiceInstantiation] keeping preferred JVM and stopping the rest"
stop.foreach{ m => Grid.selectActor(LocalServiceManager.name, m._2) ! StopService(serviceName) }
self ! UpdateServiceMapping(serviceName, Some(keep._2))
// otherwise, keep one and stop all rest
case _ =>
logger info s"[ServiceInstantiation] keeping the first JVM and stopping the rest"
ssg.filter(_._1.isRunning).tail.foreach(m => Grid.selectActor(LocalServiceManager.name, m._2) ! StopService(serviceName))
self ! UpdateServiceMapping(serviceName, Some(ssg.head._2))
}
case (serviceName, ssg) if ssg.count(_._1.isRunning) == 1 => // All is good!
logger.debug(s"[ServiceInstantiation] All is good! There is one instance of $serviceName")
// we know that the get will succeed
val runner = ssg.find(_._1.isRunning).get
// if runner is not preferred and preferred jvm is available
if(runner._1.preferredJVM.isEmpty && candidatesMap(serviceName).exists(_._1.preferredJVM.isDefined)){
val preferredJvm = candidatesMap(serviceName).collect{case (ServiceStatus(_, _, Some(preferred)), _) => preferred}.head
logger warn s"we've found that service: ${runner._1.name} is not running on its preferred JVM stopping it. "
Grid.selectActor(LocalServiceManager.name, runner._2) ! StopService(serviceName)
// We will update that currently no one is running the service, we will know if it runs only in the next sample.
self ! UpdateServiceMapping(serviceName, None)
} else {
if (mapping.get(runner._1.name).flatten != Some(runner._2))
self ! UpdateServiceMapping(serviceName, ssg.find(_._1.isRunning).map(_._2))
}
}
// will broadcast the current service mapping to the rest of the members.
self ! BroadcastServiceMapping
}
}
}
}
| nruppin/CM-Well | server/cmwell-grid/src/main/scala/k/grid/service/ServiceCoordinator.scala | Scala | apache-2.0 | 6,778 |
/**
* Copyright 2011-2016 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.stats.message
import io.gatling.commons.stats.{ KO, OK, Status }
import org.scalacheck.Gen.alphaStr
import io.gatling.BaseSpec
class StatusSpec extends BaseSpec {
"Status.apply" should "return OK when passing 'OK'" in {
Status("OK") shouldBe OK
}
it should "return OK when passing 'KO'" in {
Status("KO") shouldBe KO
}
it should "throw an IllegalArgumentException on any other string" in {
forAll(alphaStr.suchThat(s => s != "OK" && s != "KO")) { string =>
an[IllegalArgumentException] should be thrownBy Status(string)
}
}
}
| thkluge/gatling | gatling-core/src/test/scala/io/gatling/core/stats/message/StatusSpec.scala | Scala | apache-2.0 | 1,209 |
/*
* Copyright (C) 2014 - 2017 Contributors as noted in the AUTHORS.md file
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.wegtam.tensei.agent
import com.wegtam.tensei.adt._
class SortTransformationMappingsFunctionsTest
extends DefaultSpec
with SortTransformationMappingsFunctions {
describe("SortTransformationMappingsFunctions") {
describe("findFirstId") {
describe("given empty lists") {
it("should throw an exception") {
an[NoSuchElementException] should be thrownBy findFirstId(Vector.empty[ElementReference],
List.empty[ElementReference])
}
}
describe("given an empty haystack") {
it("should return -1") {
val elements =
List(ElementReference("T", "A"), ElementReference("T", "B"), ElementReference("T", "C"))
findFirstId(Vector.empty[ElementReference], elements) should be(-1L)
}
}
describe("given an empty element list") {
it("should throw an exception") {
val haystack = Vector(ElementReference("T", "A"),
ElementReference("T", "B"),
ElementReference("T", "C"))
an[NoSuchElementException] should be thrownBy findFirstId(haystack,
List.empty[ElementReference])
}
}
describe("given valid parameters") {
describe("containing the first haystack element") {
it("should return 0") {
val elements = List(ElementReference("T", "B"),
ElementReference("T", "C"),
ElementReference("T", "A"))
val haystack = Vector(ElementReference("T", "A"),
ElementReference("T", "B"),
ElementReference("T", "C"))
findFirstId(haystack, elements) should be(0)
}
}
describe("containing the last haystack element") {
it("should return the last index") {
val elements = List(ElementReference("T", "C"))
val haystack = Vector(ElementReference("T", "A"),
ElementReference("T", "B"),
ElementReference("T", "C"))
findFirstId(haystack, elements) should be(haystack.length - 1)
}
}
describe("containing only an element from within the haystack") {
it("should return the index of that element") {
val elements = List(ElementReference("T", "B"))
val haystack = Vector(ElementReference("T", "A"),
ElementReference("T", "B"),
ElementReference("T", "C"))
findFirstId(haystack, elements) should be(haystack.indexOf(elements.head))
}
}
}
}
describe("sortAllToAllMappingPairs") {
describe("given an empty mapping") {
it("should return the original mapping") {
val m = MappingTransformation(List(), List(ElementReference("T", "B")))
val sortedElements = Vector(ElementReference("T", "A"), ElementReference("T", "B"))
sortAllToAllMappingPairs(m)(sortedElements) should be(m)
}
}
describe("given an empty sorted element list") {
it("should return the original mapping") {
val m =
MappingTransformation(List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "B"), ElementReference("T", "A")))
val sortedElements = Vector.empty[ElementReference]
sortAllToAllMappingPairs(m)(sortedElements) should be(m)
}
}
describe("given a proper mapping and sorted element list") {
it("should sort the mapping pairs correctly") {
val m = MappingTransformation(
List(ElementReference("S", "1"),
ElementReference("S", "2"),
ElementReference("S", "3")),
List(ElementReference("T", "B"), ElementReference("T", "C"), ElementReference("T", "A"))
)
val sortedElements = Vector(ElementReference("T", "A"),
ElementReference("T", "B"),
ElementReference("T", "C"))
val expectedM = MappingTransformation(
List(ElementReference("S", "1"),
ElementReference("S", "2"),
ElementReference("S", "3")),
List(ElementReference("T", "A"), ElementReference("T", "B"), ElementReference("T", "C"))
)
sortAllToAllMappingPairs(m)(sortedElements) should be(expectedM)
}
}
}
describe("sortOneToOneMappingPairs") {
describe("given an empty mapping") {
it("should throw an exception") {
val m = MappingTransformation(List(), List(ElementReference("T", "B")))
val sortedElements = Vector(ElementReference("T", "A"), ElementReference("T", "B"))
an[IllegalArgumentException] should be thrownBy sortOneToOneMappingPairs(m)(
sortedElements
)
}
}
describe("given an empty sorted element list") {
it("should return the original mapping") {
val m =
MappingTransformation(List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "B"), ElementReference("T", "A")))
val sortedElements = Vector.empty[ElementReference]
sortOneToOneMappingPairs(m)(sortedElements) should be(m)
}
}
describe("given a proper mapping and sorted element list") {
it("should sort the mapping pairs correctly") {
val m = MappingTransformation(
List(ElementReference("S", "1"),
ElementReference("S", "2"),
ElementReference("S", "3")),
List(ElementReference("T", "B"), ElementReference("T", "C"), ElementReference("T", "A"))
)
val sortedElements = Vector(ElementReference("T", "A"),
ElementReference("T", "B"),
ElementReference("T", "C"))
val expectedM = MappingTransformation(
List(ElementReference("S", "3"),
ElementReference("S", "1"),
ElementReference("S", "2")),
List(ElementReference("T", "A"), ElementReference("T", "B"), ElementReference("T", "C"))
)
sortOneToOneMappingPairs(m)(sortedElements) should be(expectedM)
}
}
}
describe("sortMappings") {
describe("given an empty recipe") {
it("should return the original recipe") {
val recipe = Recipe(
id = "RECIPE",
mode = Recipe.MapAllToAll,
mappings = List.empty[MappingTransformation]
)
val sortedElements = Vector(ElementReference("T", "A"),
ElementReference("T", "B"),
ElementReference("T", "C"))
sortMappings(recipe)(sortedElements) should be(recipe)
}
}
describe("given an empty sorted element list") {
it("should return the original recipe") {
val recipe = Recipe(
id = "RECIPE",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "A"))
),
MappingTransformation(
List(ElementReference("S", "3")),
List(ElementReference("T", "B"), ElementReference("T", "C"))
)
)
)
val sortedElements = Vector.empty[ElementReference]
sortMappings(recipe)(sortedElements) should be(recipe)
}
}
describe("given a proper recipe and sorted element list") {
it("should sort the mapping list correctly") {
val recipe = Recipe(
id = "RECIPE",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "C"))
),
MappingTransformation(
List(ElementReference("S", "3")),
List(ElementReference("T", "B"), ElementReference("T", "C"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "A"))
)
)
)
val sortedElements = Vector(ElementReference("T", "A"),
ElementReference("T", "B"),
ElementReference("T", "C"))
val expectedRecipe = Recipe(
id = "RECIPE",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "A"))
),
MappingTransformation(
List(ElementReference("S", "3")),
List(ElementReference("T", "B"), ElementReference("T", "C"))
),
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "C"))
)
)
)
sortMappings(recipe)(sortedElements) should be(expectedRecipe)
}
}
}
describe("sortRecipes") {
describe("given an empty cookbook") {
it("should return the original cookbook") {
val cookbook = Cookbook(
id = "COOKBOOK",
sources = List.empty[DFASDL],
target = None,
recipes = List.empty[Recipe]
)
sortRecipes(cookbook) should be(cookbook)
}
}
describe("given a cookbook with an empty target DFASDL") {
it("should return the original cookbook") {
val sourceDfasdl = DFASDL(
id = "S",
content =
"""
|<dfasdl xmlns="http://www.dfasdl.org/DFASDL" default-encoding="utf-8" semantic="niem">
| <elem id="sources">
| <str id="1" stop-sign=","/>
| <str id="2" stop-sign=","/>
| <str id="3"/>
| </elem>
|</dfasdl>
| """.stripMargin
)
val recipes = List(
Recipe(
id = "RECIPE-01",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "C"))
),
MappingTransformation(
List(ElementReference("S", "3")),
List(ElementReference("T", "B"), ElementReference("T", "C"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "B"))
)
)
),
Recipe(
id = "RECIPE-02",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "C"), ElementReference("T", "A"))
),
MappingTransformation(
List(ElementReference("S", "3"), ElementReference("S", "2")),
List(ElementReference("T", "C"), ElementReference("T", "B"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "A"))
)
)
)
)
val cookbook = Cookbook(
id = "COOKBOOK",
sources = List(sourceDfasdl),
target = None,
recipes = recipes
)
sortRecipes(cookbook) should be(cookbook)
}
}
describe("given a proper cookbook") {
it("should sort the cookbook recipes and mappings correctly") {
val sourceDfasdl = DFASDL(
id = "S",
content =
"""
|<dfasdl xmlns="http://www.dfasdl.org/DFASDL" default-encoding="utf-8" semantic="niem">
| <elem id="sources">
| <str id="1" stop-sign=","/>
| <str id="2" stop-sign=","/>
| <str id="3"/>
| </elem>
|</dfasdl>
| """.stripMargin
)
val targetDfasdl = DFASDL(
id = "T",
content =
"""
|<dfasdl xmlns="http://www.dfasdl.org/DFASDL" default-encoding="utf-8" semantic="niem">
| <elem id="targets">
| <str id="A" stop-sign=","/>
| <str id="B" stop-sign=","/>
| <str id="C"/>
| </elem>
|</dfasdl>
| """.stripMargin
)
val recipes = List(
Recipe(
id = "RECIPE-01",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "C"))
),
MappingTransformation(
List(ElementReference("S", "3")),
List(ElementReference("T", "B"), ElementReference("T", "C"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "B"))
)
)
),
Recipe(
id = "RECIPE-02",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "C"), ElementReference("T", "A"))
),
MappingTransformation(
List(ElementReference("S", "3"), ElementReference("S", "2")),
List(ElementReference("T", "C"), ElementReference("T", "B"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "A"))
)
)
)
)
val sortedRecipes = List(
Recipe(
id = "RECIPE-02",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "2"), ElementReference("S", "1")),
List(ElementReference("T", "A"), ElementReference("T", "C"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "A"))
),
MappingTransformation(
List(ElementReference("S", "2"), ElementReference("S", "3")),
List(ElementReference("T", "B"), ElementReference("T", "C"))
)
)
),
Recipe(
id = "RECIPE-01",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "3")),
List(ElementReference("T", "B"), ElementReference("T", "C"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "B"))
),
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "C"))
)
)
)
)
val cookbook = Cookbook(
id = "COOKBOOK",
sources = List(sourceDfasdl),
target = Option(targetDfasdl),
recipes = recipes
)
val sortedCookbook = cookbook.copy(recipes = sortedRecipes)
sortRecipes(cookbook) should be(sortedCookbook)
}
}
describe("given a proper cookbook using foreign keys") {
describe("with one foreign key") {
it("should sort the cookbook recipes and mappings correctly") {
val sourceDfasdl = DFASDL(
id = "S",
content =
"""
|<dfasdl xmlns="http://www.dfasdl.org/DFASDL" default-encoding="utf-8" semantic="niem">
| <seq id="sources">
| <elem id="source">
| <num id="1" stop-sign=","/>
| <str id="2" stop-sign=","/>
| <str id="3"/>
| </elem>
| </seq>
|</dfasdl>
| """.stripMargin
)
val targetDfasdl =
DFASDL(
id = "T",
content =
"""
|<dfasdl xmlns="http://www.dfasdl.org/DFASDL" default-encoding="utf-8" semantic="niem">
| <seq id="target1">
| <elem id="target1-row">
| <num id="A" db-column-name="id"/>
| <str id="B" db-column-name="name"/>
| </elem>
| </seq>
| <seq id="target2">
| <elem id="target2-row">
| <num id="C" db-column-name="id" db-auto-inc="true"/>
| <str id="D" db-column-name="firstname"/>
| <num id="E" db-column-name="my_name" db-foreign-key="F"/>
| </elem>
| </seq>
| <seq id="target3">
| <elem id="target3-row">
| <num id="F" db-column-name="id"/>
| <str id="G" db-column-name="name"/>
| </elem>
| </seq>
|</dfasdl>
| """.stripMargin
)
val recipes = List(
Recipe(
id = "RECIPE-01",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "B"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "A"))
)
)
),
Recipe(
id = "RECIPE-02",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "D"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "3"), ElementReference("S", "2")),
List(ElementReference("T", "C"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "D"))
)
)
),
Recipe(
id = "RECIPE-03",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "G"))
),
MappingTransformation(
List(ElementReference("S", "3")),
List(ElementReference("T", "F"))
)
)
)
)
val sortedRecipes =
List(
Recipe(
id = "RECIPE-01",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "A"))
),
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "B"))
)
)
),
Recipe(
id = "RECIPE-03",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "3")),
List(ElementReference("T", "F"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "G"))
)
)
),
Recipe(
id = "RECIPE-02",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "3"), ElementReference("S", "2")),
List(ElementReference("T", "C"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "D"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "D"))
)
)
)
)
val cookbook = Cookbook(
id = "COOKBOOK",
sources = List(sourceDfasdl),
target = Option(targetDfasdl),
recipes = recipes
)
val sortedCookbook = cookbook.copy(recipes = sortedRecipes)
sortRecipes(cookbook) should be(sortedCookbook)
}
}
describe("with multiple foreign keys") {
it("should sort the cookbook recipes and mappings correctly ignoring unused targets") {
val sourceDfasdl = DFASDL(
id = "S",
content =
"""
|<dfasdl xmlns="http://www.dfasdl.org/DFASDL" default-encoding="utf-8" semantic="niem">
| <seq id="sources">
| <elem id="source">
| <num id="1" stop-sign=","/>
| <str id="2" stop-sign=","/>
| <str id="3"/>
| </elem>
| </seq>
|</dfasdl>
| """.stripMargin
)
val targetDfasdl =
DFASDL(
id = "T",
content =
"""
|<dfasdl xmlns="http://www.dfasdl.org/DFASDL" default-encoding="utf-8" semantic="niem">
| <seq id="target1">
| <elem id="target1-row">
| <num id="A" db-column-name="id"/>
| <str id="B" db-column-name="name"/>
| </elem>
| </seq>
| <seq id="target2">
| <elem id="target2-row">
| <num id="C" db-column-name="id" db-auto-inc="true"/>
| <str id="D" db-column-name="firstname" db-foreign-key="A"/>
| <num id="E" db-column-name="my_name" db-foreign-key="F"/>
| </elem>
| </seq>
| <seq id="target3">
| <elem id="target3-row">
| <num id="F" db-column-name="id"/>
| <str id="G" db-column-name="name"/>
| </elem>
| </seq>
| <seq id="target4">
| <elem id="target4-row">
| <num id="H" db-column-name="id"/>
| <str id="I" db-column-name="name"/>
| <num id="J" db-column-name="another_id" db-foreign-key="K"/>
| </elem>
| </seq>
| <seq id="target5">
| <elem id="target5-row">
| <num id="K" db-column-name="id"/>
| <str id="L" db-column-name="name"/>
| </elem>
| </seq>
|</dfasdl>
| """.stripMargin
)
val recipes = List(
Recipe(
id = "RECIPE-01",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "B"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "A"))
)
)
),
Recipe(
id = "RECIPE-02",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "D"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "3"), ElementReference("S", "2")),
List(ElementReference("T", "C"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "D"))
)
)
),
Recipe(
id = "RECIPE-03",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "G"))
),
MappingTransformation(
List(ElementReference("S", "3")),
List(ElementReference("T", "F"))
)
)
)
)
val sortedRecipes =
List(
Recipe(
id = "RECIPE-01",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "A"))
),
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "B"))
)
)
),
Recipe(
id = "RECIPE-03",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "3")),
List(ElementReference("T", "F"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "G"))
)
)
),
Recipe(
id = "RECIPE-02",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "3"), ElementReference("S", "2")),
List(ElementReference("T", "C"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "D"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "D"))
)
)
)
)
val cookbook = Cookbook(
id = "COOKBOOK",
sources = List(sourceDfasdl),
target = Option(targetDfasdl),
recipes = recipes
)
val sortedCookbook = cookbook.copy(recipes = sortedRecipes)
sortRecipes(cookbook) should be(sortedCookbook)
}
}
describe("with multiple foreign keys") {
it("should sort the cookbook recipes and mappings correctly") {
val sourceDfasdl = DFASDL(
id = "S",
content =
"""
|<dfasdl xmlns="http://www.dfasdl.org/DFASDL" default-encoding="utf-8" semantic="niem">
| <seq id="sources">
| <elem id="source">
| <num id="1" stop-sign=","/>
| <str id="2" stop-sign=","/>
| <str id="3"/>
| </elem>
| </seq>
|</dfasdl>
| """.stripMargin
)
val targetDfasdl =
DFASDL(
id = "T",
content =
"""
|<dfasdl xmlns="http://www.dfasdl.org/DFASDL" default-encoding="utf-8" semantic="niem">
| <seq id="target1">
| <elem id="target1-row">
| <num id="A" db-column-name="id"/>
| <str id="B" db-column-name="name"/>
| </elem>
| </seq>
| <seq id="target2">
| <elem id="target2-row">
| <num id="C" db-column-name="id" db-auto-inc="true"/>
| <str id="D" db-column-name="firstname" db-foreign-key="A"/>
| <num id="E" db-column-name="my_name" db-foreign-key="F"/>
| </elem>
| </seq>
| <seq id="target3">
| <elem id="target3-row">
| <num id="F" db-column-name="id"/>
| <str id="G" db-column-name="name"/>
| </elem>
| </seq>
| <seq id="target4">
| <elem id="target4-row">
| <num id="H" db-column-name="id"/>
| <str id="I" db-column-name="name"/>
| <num id="J" db-column-name="another_id" db-foreign-key="K"/>
| </elem>
| </seq>
| <seq id="target5">
| <elem id="target5-row">
| <num id="K" db-column-name="id"/>
| <str id="L" db-column-name="name"/>
| </elem>
| </seq>
|</dfasdl>
| """.stripMargin
)
val recipes = List(
Recipe(
id = "RECIPE-01",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "B"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "A"))
)
)
),
Recipe(
id = "RECIPE-02",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "D"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "3"), ElementReference("S", "2")),
List(ElementReference("T", "C"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "D"))
)
)
),
Recipe(
id = "RECIPE-03",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "G"))
),
MappingTransformation(
List(ElementReference("S", "3")),
List(ElementReference("T", "F"))
)
)
),
Recipe(
id = "RECIPE-04",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"),
ElementReference("S", "2"),
ElementReference("S", "3")),
List(ElementReference("T", "H"),
ElementReference("T", "I"),
ElementReference("T", "J"))
)
)
),
Recipe(
id = "RECIPE-05",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1")),
List(ElementReference("T", "L"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "K"))
)
)
)
)
val sortedRecipes =
List(
Recipe(
id = "RECIPE-01",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "A"))
),
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "B"))
)
)
),
Recipe(
id = "RECIPE-03",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "3")),
List(ElementReference("T", "F"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "G"))
)
)
),
Recipe(
id = "RECIPE-02",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "3"), ElementReference("S", "2")),
List(ElementReference("T", "C"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "D"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "D"))
)
)
),
Recipe(
id = "RECIPE-05",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "K"))
),
MappingTransformation(
List(ElementReference("S", "1")),
List(ElementReference("T", "L"))
)
)
),
Recipe(
id = "RECIPE-04",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"),
ElementReference("S", "2"),
ElementReference("S", "3")),
List(ElementReference("T", "H"),
ElementReference("T", "I"),
ElementReference("T", "J"))
)
)
)
)
val cookbook = Cookbook(
id = "COOKBOOK",
sources = List(sourceDfasdl),
target = Option(targetDfasdl),
recipes = recipes
)
val sortedCookbook = cookbook.copy(recipes = sortedRecipes)
sortRecipes(cookbook) should be(sortedCookbook)
}
}
describe("with multiple foreign keys using cross references") {
it("should sort the cookbook recipes and mappings correctly") {
val sourceDfasdl = DFASDL(
id = "S",
content =
"""
|<dfasdl xmlns="http://www.dfasdl.org/DFASDL" default-encoding="utf-8" semantic="niem">
| <seq id="sources">
| <elem id="source">
| <num id="1" stop-sign=","/>
| <str id="2" stop-sign=","/>
| <str id="3"/>
| </elem>
| </seq>
|</dfasdl>
| """.stripMargin
)
val targetDfasdl =
DFASDL(
id = "T",
content =
"""
|<dfasdl xmlns="http://www.dfasdl.org/DFASDL" default-encoding="utf-8" semantic="niem">
| <seq id="target1">
| <elem id="target1-row">
| <num id="A" db-column-name="id"/>
| <str id="B" db-column-name="name"/>
| </elem>
| </seq>
| <seq id="target2">
| <elem id="target2-row">
| <num id="C" db-column-name="id" db-auto-inc="true"/>
| <str id="D" db-column-name="firstname" db-foreign-key="I"/>
| <num id="E" db-column-name="my_name" db-foreign-key="F"/>
| </elem>
| </seq>
| <seq id="target3">
| <elem id="target3-row">
| <num id="F" db-column-name="id"/>
| <str id="G" db-column-name="name"/>
| </elem>
| </seq>
| <seq id="target4">
| <elem id="target4-row">
| <num id="H" db-column-name="id"/>
| <str id="I" db-column-name="name"/>
| <num id="J" db-column-name="another_id" db-foreign-key="K"/>
| </elem>
| </seq>
| <seq id="target5">
| <elem id="target5-row">
| <num id="K" db-column-name="id"/>
| <str id="L" db-column-name="name"/>
| </elem>
| </seq>
|</dfasdl>
| """.stripMargin
)
val recipes = List(
Recipe(
id = "RECIPE-01",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "B"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "A"))
)
)
),
Recipe(
id = "RECIPE-02",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "D"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "3"), ElementReference("S", "2")),
List(ElementReference("T", "C"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "D"))
)
)
),
Recipe(
id = "RECIPE-03",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "G"))
),
MappingTransformation(
List(ElementReference("S", "3")),
List(ElementReference("T", "F"))
)
)
),
Recipe(
id = "RECIPE-04",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"),
ElementReference("S", "2"),
ElementReference("S", "3")),
List(ElementReference("T", "H"),
ElementReference("T", "I"),
ElementReference("T", "J"))
)
)
),
Recipe(
id = "RECIPE-05",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1")),
List(ElementReference("T", "L"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "K"))
)
)
)
)
val sortedRecipes =
List(
Recipe(
id = "RECIPE-01",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "A"))
),
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "B"))
)
)
),
Recipe(
id = "RECIPE-05",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "K"))
),
MappingTransformation(
List(ElementReference("S", "1")),
List(ElementReference("T", "L"))
)
)
),
Recipe(
id = "RECIPE-04",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"),
ElementReference("S", "2"),
ElementReference("S", "3")),
List(ElementReference("T", "H"),
ElementReference("T", "I"),
ElementReference("T", "J"))
)
)
),
Recipe(
id = "RECIPE-03",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "3")),
List(ElementReference("T", "F"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "G"))
)
)
),
Recipe(
id = "RECIPE-02",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "3"), ElementReference("S", "2")),
List(ElementReference("T", "C"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "D"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "D"))
)
)
)
)
val cookbook = Cookbook(
id = "COOKBOOK",
sources = List(sourceDfasdl),
target = Option(targetDfasdl),
recipes = recipes
)
val sortedCookbook = cookbook.copy(recipes = sortedRecipes)
sortRecipes(cookbook) should be(sortedCookbook)
}
}
describe("with multiple foreign keys using even more cross references") {
it("should sort the cookbook recipes and mappings correctly") {
val sourceDfasdl = DFASDL(
id = "S",
content =
"""
|<dfasdl xmlns="http://www.dfasdl.org/DFASDL" default-encoding="utf-8" semantic="niem">
| <seq id="sources">
| <elem id="source">
| <num id="1" stop-sign=","/>
| <str id="2" stop-sign=","/>
| <str id="3"/>
| </elem>
| </seq>
|</dfasdl>
| """.stripMargin
)
val targetDfasdl =
DFASDL(
id = "T",
content =
"""
|<dfasdl xmlns="http://www.dfasdl.org/DFASDL" default-encoding="utf-8" semantic="niem">
| <seq id="target1">
| <elem id="target1-row">
| <num id="A" db-column-name="id"/>
| <str id="B" db-column-name="name"/>
| </elem>
| </seq>
| <seq id="target2">
| <elem id="target2-row">
| <num id="C" db-column-name="id" db-auto-inc="true"/>
| <str id="D" db-column-name="firstname" db-foreign-key="I"/>
| <num id="E" db-column-name="my_name" db-foreign-key="F"/>
| </elem>
| </seq>
| <seq id="target3">
| <elem id="target3-row">
| <num id="F" db-column-name="id"/>
| <str id="G" db-column-name="name" db-foreign-key="L"/>
| </elem>
| </seq>
| <seq id="target4">
| <elem id="target4-row">
| <num id="H" db-column-name="id"/>
| <str id="I" db-column-name="name"/>
| <num id="J" db-column-name="another_id" db-foreign-key="K"/>
| <str id="J2" db-column-name="yet_another_foreigner" db-foreign-key="G"/>
| </elem>
| </seq>
| <seq id="target5">
| <elem id="target5-row">
| <num id="K" db-column-name="id"/>
| <str id="L" db-column-name="name"/>
| </elem>
| </seq>
|</dfasdl>
| """.stripMargin
)
val recipes = List(
Recipe(
id = "RECIPE-01",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "B"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "A"))
)
)
),
Recipe(
id = "RECIPE-02",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "D"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "3"), ElementReference("S", "2")),
List(ElementReference("T", "C"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "D"))
)
)
),
Recipe(
id = "RECIPE-03",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "G"))
),
MappingTransformation(
List(ElementReference("S", "3")),
List(ElementReference("T", "F"))
)
)
),
Recipe(
id = "RECIPE-04",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"),
ElementReference("S", "2"),
ElementReference("S", "3")),
List(ElementReference("T", "H"),
ElementReference("T", "I"),
ElementReference("T", "J"),
ElementReference("T", "J2"))
)
)
),
Recipe(
id = "RECIPE-05",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1")),
List(ElementReference("T", "L"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "K"))
)
)
)
)
val sortedRecipes =
List(
Recipe(
id = "RECIPE-01",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "A"))
),
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "B"))
)
)
),
Recipe(
id = "RECIPE-05",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "K"))
),
MappingTransformation(
List(ElementReference("S", "1")),
List(ElementReference("T", "L"))
)
)
),
Recipe(
id = "RECIPE-03",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "3")),
List(ElementReference("T", "F"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "G"))
)
)
),
Recipe(
id = "RECIPE-04",
mode = Recipe.MapAllToAll,
mappings = List(
MappingTransformation(
List(ElementReference("S", "1"),
ElementReference("S", "2"),
ElementReference("S", "3")),
List(ElementReference("T", "H"),
ElementReference("T", "I"),
ElementReference("T", "J"),
ElementReference("T", "J2"))
)
)
),
Recipe(
id = "RECIPE-02",
mode = Recipe.MapOneToOne,
mappings = List(
MappingTransformation(
List(ElementReference("S", "3"), ElementReference("S", "2")),
List(ElementReference("T", "C"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "1"), ElementReference("S", "2")),
List(ElementReference("T", "D"), ElementReference("T", "E"))
),
MappingTransformation(
List(ElementReference("S", "2")),
List(ElementReference("T", "D"))
)
)
)
)
val cookbook = Cookbook(
id = "COOKBOOK",
sources = List(sourceDfasdl),
target = Option(targetDfasdl),
recipes = recipes
)
val sortedCookbook = cookbook.copy(recipes = sortedRecipes)
sortRecipes(cookbook) should be(sortedCookbook)
}
}
}
}
}
}
| Tensei-Data/tensei-agent | src/test/scala/com/wegtam/tensei/agent/SortTransformationMappingsFunctionsTest.scala | Scala | agpl-3.0 | 57,974 |
/*
* Copyright (C) 2015 Vladimir Konstantinov, Yuriy Gintsyak
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package io.cafebabe.util.protocol.jsonrpc
import org.json4s._
/**
* TODO: Add description.
*
* @author Vladimir Konstantinov
*/
case class JsonRpcResult(result: JValue, id: Int) extends JsonRpcMessage {
override val toJson = JObject(
("result", result),
("id", JInt(id))
)
}
/**
* TODO: Add description.
*
* @author Vladimir Konstantinov
*/
object JsonRpcResult {
def from(json: JValue): Option[JsonRpcResult] = json match {
case JObject(fields) =>
(for {
("result", result) <- fields
("id", JInt(id)) <- fields
} yield JsonRpcResult(result, id.toInt)).headOption
case _ => None
}
}
| oxy-development/util | src/main/scala/io/cafebabe/util/protocol/jsonrpc/JsonRpcResult.scala | Scala | lgpl-3.0 | 1,386 |
package org.openapitools.client.api
import argonaut._
import argonaut.EncodeJson._
import argonaut.DecodeJson._
import org.http4s.{EntityDecoder, EntityEncoder}
import org.http4s.argonaut._
import org.joda.time.DateTime
import GithubRepositorieslinks._
case class GithubRepositorieslinks (
self: Option[Link],
`class`: Option[String])
object GithubRepositorieslinks {
import DateTimeCodecs._
implicit val GithubRepositorieslinksCodecJson: CodecJson[GithubRepositorieslinks] = CodecJson.derive[GithubRepositorieslinks]
implicit val GithubRepositorieslinksDecoder: EntityDecoder[GithubRepositorieslinks] = jsonOf[GithubRepositorieslinks]
implicit val GithubRepositorieslinksEncoder: EntityEncoder[GithubRepositorieslinks] = jsonEncoderOf[GithubRepositorieslinks]
}
| cliffano/swaggy-jenkins | clients/scalaz/generated/src/main/scala/org/openapitools/client/api/GithubRepositorieslinks.scala | Scala | mit | 780 |
/*
* Copyright 2013 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.storehaus.algebra
import com.twitter.algebird.{SummingQueue, Semigroup, Monoid}
import com.twitter.storehaus.StoreProperties
import org.scalacheck.Properties
object BufferingStoreProperties extends Properties("BufferingStore") {
import StoreProperties.sparseStoreTest
import MergeableStoreProperties._
import MergeableStore.enrich
property("BufferingStore [Map[Int,String]] obeys the store properties") =
sparseStoreTest { opt: Option[Map[Int, String]] =>
opt.filter(Monoid.isNonZero(_)).orElse(Some(Monoid.zero[Map[Int, String]]))
} {
newSparseStore[String, Map[Int, String]].withSummer(new SummerConstructor[String] {
def apply[V](sg: Semigroup[V]): SummingQueue[Map[String, V]] = {
implicit val semi = sg
SummingQueue[Map[String, V]](10)
}
})
}
property("BufferingStore [Map[Int,Int]] obeys the store properties") =
sparseStoreTest { opt: Option[Map[Int, Int]] =>
opt.filter(Monoid.isNonZero(_)).orElse(Some(Monoid.zero[Map[Int, Int]]))
} {
newSparseStore[String, Map[Int, Int]].withSummer(new SummerConstructor[String] {
def apply[V](sg: Semigroup[V]): SummingQueue[Map[String, V]] = {
implicit val semi = sg
SummingQueue[Map[String, V]](10)
}
})
}
}
| twitter/storehaus | storehaus-algebra/src/test/scala/com/twitter/storehaus/algebra/BufferingStoreProperties.scala | Scala | apache-2.0 | 1,925 |
package skeleton.persistence
import scala.slick.driver.PostgresDriver.simple._
import scala.slick.model.ForeignKeyAction.Cascade
case class Book(id: Option[Long], title: String, storeId: Long)
class Books(tag: Tag) extends Table[Book](tag, "books") {
def id = column[Long]("id", O.AutoInc)
def title = column[String]("title", O.DBType("VARCHAR(255)"), O.NotNull)
def storeId = column[Long]("store_id", O.NotNull)
def pk = primaryKey("books_id_pk", id)
def fk_storeId = foreignKey("books_store_id_fk", storeId, Stores)(_.id, onDelete = Cascade)
def * = (id.?, title, storeId) <>(Book.tupled, Book.unapply)
}
object Books extends TableQuery(new Books(_)) {
def insertWithGenId(book: Book)(implicit session: Session): Long = Books returning Books.map(_.id) insert book
val byStoreIdCompiled = Compiled {
(storeId: Column[Long]) => Books filter (_.storeId === storeId)
}
}
| constantingerstberger/spray-slick-blueprint | src/main/scala/skeleton/persistence/Books.scala | Scala | mit | 903 |
package server
import org.scalatestplus.play.{OneServerPerTest, PlaySpec}
import play.api.test.{DefaultAwaitTimeout, FutureAwaits}
import play.mvc.Http.Status.NOT_FOUND
class PacksControllerTests extends PlaySpec with OneServerPerTest with FutureAwaits with DefaultAwaitTimeout {
"The view pack page" should {
"return not found if the pack doesn't exist" in {
val response = await(wsUrl("/packs/1").get())
response.status mustBe NOT_FOUND
}
}
"The create and add item to pack action" should {
"return not found if the pack doesn't exist" in {
val response = await(wsUrl("/packs/1/items/1").post(""))
response.status mustBe NOT_FOUND
}
}
}
| notclive/backpack | test/server/PacksControllerTests.scala | Scala | mit | 692 |
package codecheck.github.events
import org.json4s.JValue
import codecheck.github.models.AbstractJson
import codecheck.github.models.PullRequest
import codecheck.github.models.PullRequestAction
case class PullRequestEvent(name: String, value: JValue) extends AbstractJson(value) with GitHubEvent {
def number = get("number").toLong
lazy val action = PullRequestAction.fromString(get("action"))
lazy val pull_request = PullRequest(value \\ "pull_request")
}
| code-check/github-api-scala | src/main/scala/codecheck/github/events/PullRequestEvent.scala | Scala | mit | 464 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.build
import com.github.tkawachi.doctest.DoctestPlugin
import coursier.CoursierPlugin
import net.virtualvoid.sbt.graph.DependencyGraphPlugin
import sbt.Keys._
import sbt._
import scala.concurrent._
import scala.util.{Failure, Success, Try}
object CMWellBuild extends AutoPlugin {
type PartialFunction2[-T1,-T2,+R] = PartialFunction[Tuple2[T1,T2],R]
object autoImport {
// val configSettingsResource = TaskKey[Seq[sbt.File]]("config-settings-resource", "gets the .conf resource")
val dependenciesManager = settingKey[PartialFunction2[String, String, ModuleID]]("a setting containing versions for dependencies. if we only use it to declare dependencies, we can avoid a lot of version collisions.")
val iTestsLightMode = settingKey[Boolean]("a flag, which if turns on, does not take cm-well down after integration tests, but rather purge everything in it, so it will be ready for next time. on startup, it checks if there is an instance running, and if so, does not re-install cm-well.")
val peScript = TaskKey[sbt.File]("pe-script", "returns the script that executes cmwell in PE mode.")
val uploadInitContent = TaskKey[sbt.File]("upload-init-content", "returns the script that uploads the initial content in to PE Cm-Well.")
val packageCMWell = TaskKey[Seq[java.io.File]]("package-cmwell", "get components from dependencies and sibling projects.")
val getLib = TaskKey[java.io.File]("get-lib", "creates a lib directory in cmwell-cons/app/ that has all cm-wll jars and their dependency jars.")
val getCons = TaskKey[java.io.File]("get-cons", "get cons from cons project.")
val getWs = TaskKey[java.io.File]("get-ws", "get ws from ws project.")
val getTlog = TaskKey[java.io.File]("get-tlog", "get tlog tool tlog project.")
val getBg = TaskKey[java.io.File]("get-bg", "get bg from batch project.")
val getCtrl = TaskKey[java.io.File]("get-ctrl", "get ctrl from ctrl project.")
val getDc = TaskKey[java.io.File]("get-dc", "get dc from dc project.")
val getGremlin = TaskKey[java.io.File]("get-gremlin", "get gremlin plugin into cons.")
val install = TaskKey[Map[Artifact, File]]("install", "build + test, much like 'mvn install'")
val dataFolder = TaskKey[File]("data-folder", "returns the directory of static data to be uploaded")
val printDate = TaskKey[Unit]("print-date", "prints the date")
val fullTest = TaskKey[Unit]("full-test", "executes all tests in project in parallel (with respect to dependent tests)")
val getData = TaskKey[Seq[java.io.File]]("get-data", "get data to upload to cm-well")
val getExternalComponents = TaskKey[Iterable[File]]("get-external-components", "get external dependencies binaries")
}
import autoImport._
import DoctestPlugin.autoImport._
import CoursierPlugin.autoImport._
lazy val apacheMirror = {
val zoneID = java.util.TimeZone
.getDefault()
.getID
if(zoneID.startsWith("America") || zoneID.startsWith("Pacific") || zoneID.startsWith("Etc")) "us"
else "eu"
}
def fetchZookeeperApacheMirror(version: String): Future[File] = {
val ext = "tar.gz"
val url = s"http://www-$apacheMirror.apache.org/dist/zookeeper/zookeeper-$version/zookeeper-$version.$ext"
fetchArtifact(url, ext)
}
def fetchZookeeperApacheArchive(version: String): Future[File] = {
val ext = "tar.gz"
val url = s"https://archive.apache.org/dist/zookeeper/zookeeper-$version/zookeeper-$version.$ext"
fetchArtifact(url, ext)
}
def fetchZookeeperSourcesFromGithub(version: String, ext: String): Future[File] = {
require(ext == "zip" || ext == "tar.gz", s"invalid sources extension [$ext]")
alternateUnvalidatedFetchArtifact(s"https://github.com/apache/zookeeper/archive/release-$version.$ext", ext)
}
def fetchZookeeper(version: String, buildFromSources: Option[(String,File => Future[File])] = None) = {
import scala.concurrent.ExecutionContext.Implicits.global
import CMWellCommon.combineThrowablesAsCauseAsync
fetchZookeeperApacheMirror(version).recoverWith {
case err1: Throwable => fetchZookeeperApacheArchive(version).recoverWith {
case err2: Throwable => {
buildFromSources.fold(combineThrowablesAsCauseAsync[File](err1, err2){ cause =>
new Exception("was unable to fetch zookeeper binaries, and build from sources function isn't supplied", cause)
}) {
case (ext, build) => fetchZookeeperSourcesFromGithub(version, ext).flatMap(build)
}
}
}
}
}
def fetchKafkaApacheMirror(scalaVersion: String, version: String): Future[File] = {
val ext = "tgz"
val url = s"http://www-$apacheMirror.apache.org/dist/kafka/$version/kafka_$scalaVersion-$version.$ext"
fetchArtifact(url, ext)
}
def fetchKafkaApacheArchive(scalaVersion: String, version: String): Future[File] = {
val ext = "tgz"
val url = s"https://archive.apache.org/dist/kafka/$version/kafka_$scalaVersion-$version.$ext"
fetchArtifact(url, ext)
}
def fetchKafkaSourcesFromGithub(version: String, ext: String): Future[File] = {
require(ext == "zip" || ext == "tar.gz", s"invalid sources extension [$ext]")
alternateUnvalidatedFetchArtifact(s"https://github.com/apache/kafka/archive/$version.$ext", ext)
}
def fetchKafka(scalaVersion: String, version: String, buildFromSources: Option[(String,File => Future[File])] = None) = {
import scala.concurrent.ExecutionContext.Implicits.global
import CMWellCommon.combineThrowablesAsCauseAsync
fetchKafkaApacheMirror(scalaVersion, version).recoverWith {
case err1: Throwable => fetchKafkaApacheArchive(scalaVersion, version).recoverWith {
case err2: Throwable => {
buildFromSources.fold(combineThrowablesAsCauseAsync[File](err1, err2) { cause =>
new Exception("was unable to fetch kafka binaries, and build from sources function isn't supplied", cause)
}) {
case (ext, build) => fetchKafkaSourcesFromGithub(version, ext).flatMap(build)
}
}
}
}
}
def fetchMvnArtifact(moduleID: ModuleID, scalaVersion: String, scalaBinaryVersion: String, logger: Logger): Future[Seq[java.io.File]] = {
import coursier._
import java.io.File
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import scalaz.EitherT
import scalaz.concurrent.Task
val (module, version) = FromSbt.moduleVersion(moduleID, scalaVersion, scalaBinaryVersion)
val repositories: Seq[coursier.Repository] = Seq(MavenRepository("https://repo1.maven.org/maven2"))
val fetch = Cache.fetch()
val tasks = FromSbt.dependencies(moduleID, scalaVersion, scalaBinaryVersion).map {
case (_, dep) =>
coursier.Fetch.find(repositories, module, version, fetch)
.fold[Seq[Artifact]](
{ errorString =>
logger.error(s"coursier fetch mvn artifact delegation encountered an error (skipping task): $errorString")
Seq.empty[Artifact]
}, { case (src, p) => src.artifacts(dep, p, None) })
}
val farts: Future[Seq[List[File]]] = Future.traverse(tasks) { task =>
CMWellCommon.scalazTaskAsScalaFuture(task.flatMap { arts =>
val x = arts.map { art =>
Cache.file(art).bimap(e => List(e), f => List(f))
}
if(x.isEmpty) Task[Try[List[File]]](Failure(new IllegalStateException("empty sequence")))
else {
val y = x.reduce[EitherT[Task, List[FileError], List[File]]] {
case (a, b) =>
a.flatMap(files => b.map(_ ::: files).orElse(a)).orElse(b)
}
y.fold({ errs =>
Failure[List[File]](new Exception(errs.map(err => err.message + ": " + err.describe).mkString("[\\n\\t", ",\\n\\t", "\\n]")))
}, Success.apply)
}
})
}
farts.map(_.flatten)
}
def fetchArtifact(url: String, ext: String) = {
import coursier.core.{Artifact, Attributes}
val sig = Artifact(
url + ".asc",
Map.empty,
Map.empty,
Attributes("asc", ""),
changing = false,
authentication = None
)
val art = Artifact(
url,
Map(
"MD5" -> (url + ".md5"),
"SHA-1" -> (url + ".sha1")),
Map("sig" -> sig),
Attributes(ext, ""),
changing = false,
None)
val task = coursier.Cache.file(art).fold({ err =>
Failure[java.io.File](new Exception(err.message + ": " + err.describe))
},Success.apply)
CMWellCommon.scalazTaskAsScalaFuture(task)
}
def alternateUnvalidatedFetchArtifact(url: String, ext: String) = {
import coursier.core.{Artifact, Attributes}
val art = Artifact(
url,
Map.empty,
Map.empty,
Attributes(ext, ""),
changing = false,
None)
val task = coursier.Cache.file(art).fold({ err =>
Failure[java.io.File](new Exception(err.message + ": " + err.describe))
},Success.apply)
CMWellCommon.scalazTaskAsScalaFuture(task)
}
override def requires = CoursierPlugin && DoctestPlugin && DependencyGraphPlugin
override def projectSettings = Seq(
coursierMaxIterations := 200,
Keys.fork in Test := true,
// doctestWithDependencies := false,
libraryDependencies ++= {
val dm = dependenciesManager.value
Seq(
dm("org.scalatest","scalatest") % "test",
dm("org.scalacheck","scalacheck") % "test")
},
testListeners := Seq(new sbt.JUnitXmlTestsListener(target.value.getAbsolutePath)),
doctestTestFramework := DoctestTestFramework.ScalaTest,
exportJars := true,
shellPrompt := { s => Project.extract(s).currentProject.id + " > " },
fullTest := {},
install in Compile := {
fullTest.value
packagedArtifacts.value
},
concurrentRestrictions in ThisBuild ++= Seq(
Tags.limit(CMWellCommon.Tags.ES, 1),
Tags.limit(CMWellCommon.Tags.Cassandra, 1),
Tags.limit(CMWellCommon.Tags.Kafka, 1),
Tags.limit(CMWellCommon.Tags.Grid, 1),
Tags.exclusive(CMWellCommon.Tags.IntegrationTests)
)
)
}
| nruppin/CM-Well | server/project/cmwell-build-plugin/src/main/scala/cmwell/build/CMWellBuild.scala | Scala | apache-2.0 | 10,235 |
package scala.quoted
/** Stop code generation after an error has been reported */
class StopQuotedContext extends Throwable
| som-snytt/dotty | library/src/scala/quoted/StopQuotedContext.scala | Scala | apache-2.0 | 125 |
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v2
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtInteger}
case class B1(value: Int) extends CtBoxIdentifier(name = "Total turnover from trade or profession") with CtInteger
| ahudspith-equalexperts/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600/v2/B1.scala | Scala | apache-2.0 | 809 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.batch.sql
import org.apache.flink.api.scala._
import org.apache.flink.table.api.scala._
import org.apache.flink.table.runtime.utils.JavaUserDefinedTableFunctions.JavaVarsArgTableFunc0
import org.apache.flink.table.utils.TableTestUtil._
import org.apache.flink.table.utils.{HierarchyTableFunction, PojoTableFunc, TableFunc2, _}
import org.junit.Test
class CorrelateTest extends TableTestBase {
@Test
def testCrossJoin(): Unit = {
val util = batchTestUtil()
val func1 = new TableFunc1
util.addTable[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
util.addFunction("func1", func1)
val sqlQuery = "SELECT c, s FROM MyTable, LATERAL TABLE(func1(c)) AS T(s)"
val expected = unaryNode(
"DataSetCalc",
unaryNode(
"DataSetCorrelate",
batchTableNode(0),
term("invocation", "func1($cor0.c)"),
term("correlate", s"table(func1($$cor0.c))"),
term("select", "a", "b", "c", "f0"),
term("rowType",
"RecordType(INTEGER a, BIGINT b, VARCHAR(65536) c, VARCHAR(65536) f0)"),
term("joinType", "INNER")
),
term("select", "c", "f0 AS s")
)
util.verifySql(sqlQuery, expected)
// test overloading
val sqlQuery2 = "SELECT c, s FROM MyTable, LATERAL TABLE(func1(c, '$')) AS T(s)"
val expected2 = unaryNode(
"DataSetCalc",
unaryNode(
"DataSetCorrelate",
batchTableNode(0),
term("invocation", "func1($cor0.c, '$')"),
term("correlate", s"table(func1($$cor0.c, '$$'))"),
term("select", "a", "b", "c", "f0"),
term("rowType",
"RecordType(INTEGER a, BIGINT b, VARCHAR(65536) c, VARCHAR(65536) f0)"),
term("joinType", "INNER")
),
term("select", "c", "f0 AS s")
)
util.verifySql(sqlQuery2, expected2)
}
@Test
def testLeftOuterJoinWithLiteralTrue(): Unit = {
val util = batchTestUtil()
val func1 = new TableFunc1
util.addTable[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
util.addFunction("func1", func1)
val sqlQuery = "SELECT c, s FROM MyTable LEFT JOIN LATERAL TABLE(func1(c)) AS T(s) ON TRUE"
val expected = unaryNode(
"DataSetCalc",
unaryNode(
"DataSetCorrelate",
batchTableNode(0),
term("invocation", "func1($cor0.c)"),
term("correlate", s"table(func1($$cor0.c))"),
term("select", "a", "b", "c", "f0"),
term("rowType",
"RecordType(INTEGER a, BIGINT b, VARCHAR(65536) c, VARCHAR(65536) f0)"),
term("joinType", "LEFT")
),
term("select", "c", "f0 AS s")
)
util.verifySql(sqlQuery, expected)
}
@Test
def testLeftOuterJoinAsSubQuery(): Unit = {
val util = batchTestUtil()
val func1 = new TableFunc1
util.addTable[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
util.addTable[(Int, Long, String)]("MyTable2", 'a2, 'b2, 'c2)
util.addFunction("func1", func1)
val sqlQuery =
"""
| SELECT *
| FROM MyTable2 LEFT OUTER JOIN
| (SELECT c, s
| FROM MyTable LEFT OUTER JOIN LATERAL TABLE(func1(c)) AS T(s) on true)
| ON c2 = s """.stripMargin
val expected = binaryNode(
"DataSetJoin",
batchTableNode(1),
unaryNode(
"DataSetCalc",
unaryNode(
"DataSetCorrelate",
batchTableNode(0),
term("invocation", "func1($cor0.c)"),
term("correlate", "table(func1($cor0.c))"),
term("select", "a", "b", "c", "f0"),
term("rowType", "RecordType(INTEGER a, BIGINT b, VARCHAR(65536) c, VARCHAR(65536) f0)"),
term("joinType","LEFT")
),
term("select", "c", "f0 AS s")
),
term("where", "=(c2, s)"),
term("join", "a2", "b2", "c2", "c", "s"),
term("joinType", "LeftOuterJoin")
)
util.verifySql(sqlQuery, expected)
}
@Test
def testCustomType(): Unit = {
val util = batchTestUtil()
val func2 = new TableFunc2
util.addTable[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
util.addFunction("func2", func2)
val sqlQuery = "SELECT c, name, len FROM MyTable, LATERAL TABLE(func2(c)) AS T(name, len)"
val expected = unaryNode(
"DataSetCalc",
unaryNode(
"DataSetCorrelate",
batchTableNode(0),
term("invocation", "func2($cor0.c)"),
term("correlate", s"table(func2($$cor0.c))"),
term("select", "a", "b", "c", "f0", "f1"),
term("rowType",
"RecordType(INTEGER a, BIGINT b, VARCHAR(65536) c, " +
"VARCHAR(65536) f0, INTEGER f1)"),
term("joinType", "INNER")
),
term("select", "c", "f0 AS name", "f1 AS len")
)
util.verifySql(sqlQuery, expected)
}
@Test
def testHierarchyType(): Unit = {
val util = batchTestUtil()
util.addTable[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
val function = new HierarchyTableFunction
util.addFunction("hierarchy", function)
val sqlQuery = "SELECT c, T.* FROM MyTable, LATERAL TABLE(hierarchy(c)) AS T(name, adult, len)"
val expected = unaryNode(
"DataSetCalc",
unaryNode(
"DataSetCorrelate",
batchTableNode(0),
term("invocation", "hierarchy($cor0.c)"),
term("correlate", s"table(hierarchy($$cor0.c))"),
term("select", "a", "b", "c", "f0", "f1", "f2"),
term("rowType",
"RecordType(INTEGER a, BIGINT b, VARCHAR(65536) c," +
" VARCHAR(65536) f0, BOOLEAN f1, INTEGER f2)"),
term("joinType", "INNER")
),
term("select", "c", "f0 AS name", "f1 AS adult", "f2 AS len")
)
util.verifySql(sqlQuery, expected)
}
@Test
def testPojoType(): Unit = {
val util = batchTestUtil()
util.addTable[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
val function = new PojoTableFunc
util.addFunction("pojo", function)
val sqlQuery = "SELECT c, name, age FROM MyTable, LATERAL TABLE(pojo(c))"
val expected = unaryNode(
"DataSetCalc",
unaryNode(
"DataSetCorrelate",
batchTableNode(0),
term("invocation", "pojo($cor0.c)"),
term("correlate", s"table(pojo($$cor0.c))"),
term("select", "a", "b", "c", "age", "name"),
term("rowType",
"RecordType(INTEGER a, BIGINT b, VARCHAR(65536) c," +
" INTEGER age, VARCHAR(65536) name)"),
term("joinType", "INNER")
),
term("select", "c", "name", "age")
)
util.verifySql(sqlQuery, expected)
}
@Test
def testFilter(): Unit = {
val util = batchTestUtil()
val func2 = new TableFunc2
util.addTable[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
util.addFunction("func2", func2)
val sqlQuery = "SELECT c, name, len FROM MyTable, LATERAL TABLE(func2(c)) AS T(name, len) " +
"WHERE len > 2"
val expected = unaryNode(
"DataSetCalc",
unaryNode(
"DataSetCorrelate",
batchTableNode(0),
term("invocation", "func2($cor0.c)"),
term("correlate", s"table(func2($$cor0.c))"),
term("select", "a", "b", "c", "f0", "f1"),
term("rowType",
"RecordType(INTEGER a, BIGINT b, VARCHAR(65536) c, " +
"VARCHAR(65536) f0, INTEGER f1)"),
term("joinType", "INNER"),
term("condition", ">($1, 2)")
),
term("select", "c", "f0 AS name", "f1 AS len")
)
util.verifySql(sqlQuery, expected)
}
@Test
def testScalarFunction(): Unit = {
val util = batchTestUtil()
val func1 = new TableFunc1
util.addTable[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
util.addFunction("func1", func1)
val sqlQuery = "SELECT c, s FROM MyTable, LATERAL TABLE(func1(SUBSTRING(c, 2))) AS T(s)"
val expected = unaryNode(
"DataSetCalc",
unaryNode(
"DataSetCorrelate",
batchTableNode(0),
term("invocation", "func1(SUBSTRING($cor0.c, 2))"),
term("correlate", s"table(func1(SUBSTRING($$cor0.c, 2)))"),
term("select", "a", "b", "c", "f0"),
term("rowType",
"RecordType(INTEGER a, BIGINT b, VARCHAR(65536) c, VARCHAR(65536) f0)"),
term("joinType", "INNER")
),
term("select", "c", "f0 AS s")
)
util.verifySql(sqlQuery, expected)
}
@Test
def testTableFunctionWithVariableArguments(): Unit = {
val util = batchTestUtil()
val func1 = new JavaVarsArgTableFunc0
util.addTable[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
util.addFunction("func1", func1)
var sqlQuery = "SELECT c, s FROM MyTable, LATERAL TABLE(func1('hello', 'world', c)) AS T(s)"
var expected = unaryNode(
"DataSetCalc",
unaryNode(
"DataSetCorrelate",
batchTableNode(0),
term("invocation", "func1('hello', 'world', $cor0.c)"),
term("correlate", s"table(func1('hello', 'world', $$cor0.c))"),
term("select", "a", "b", "c", "f0"),
term("rowType",
"RecordType(INTEGER a, BIGINT b, VARCHAR(65536) c, VARCHAR(65536) f0)"),
term("joinType", "INNER")
),
term("select", "c", "f0 AS s")
)
util.verifySql(sqlQuery, expected)
// test scala var arg function
val func2 = new VarArgsFunc0
util.addFunction("func2", func2)
sqlQuery = "SELECT c, s FROM MyTable, LATERAL TABLE(func2('hello', 'world', c)) AS T(s)"
expected = unaryNode(
"DataSetCalc",
unaryNode(
"DataSetCorrelate",
batchTableNode(0),
term("invocation", "func2('hello', 'world', $cor0.c)"),
term("correlate", s"table(func2('hello', 'world', $$cor0.c))"),
term("select", "a", "b", "c", "f0"),
term("rowType",
"RecordType(INTEGER a, BIGINT b, VARCHAR(65536) c, VARCHAR(65536) f0)"),
term("joinType", "INNER")
),
term("select", "c", "f0 AS s")
)
util.verifySql(sqlQuery, expected)
}
}
| ueshin/apache-flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/batch/sql/CorrelateTest.scala | Scala | apache-2.0 | 10,704 |
package cn.changhong.core.thrift.impl
import cn.changhong.core.thrift.{User, AccountService}
import com.twitter.util.Future
import scala.collection.Map
/**
* Created by Administrator on 2014/12/29.
*/
class AccountServiceImpl extends AccountService.FutureIface{
override def create(user: User): Future[Boolean] = {
Future.value(true)
}
override def get(id: Long): Future[User] = {
Future.value{
new User.Immutable(id,"username","iphone","email","passwd")
}
}
override def list(): Future[Seq[User]] = {
val res=(1 to 10).map{index=>
new User.Immutable(index,"username","iphone","email","passwd")
}
Future.value(res)
}
override def map(): Future[Map[String, User]] = {
val res:scala.collection.mutable.Map[String,User] = scala.collection.mutable.Map()
(1 to 10).foreach{index=>
res+=(index.toString->new User.Immutable(index,"username","iphone","email","passwd"))
}
Future.value(res)
}
override def creates(users: Seq[User]): Future[Seq[Boolean]] = {
val res=users.map{user=>
if(user.id%2==0) true
else false
}
println(res.map(_.toString).mkString(","))
Future.value(res)
}
}
| guoyang2011/myfinagle | finagle-thrift-zipkin-cluster/src/main/scala/cn/changhong/core/thrift/impl/AccountServiceImpl.scala | Scala | apache-2.0 | 1,240 |
package sangria.parser
import org.parboiled2.ParserInput
import sangria.ast.AstLocation
/** Set of functions that convert a [[AstLocation GraphQL source code location]] to human-readable
* strings.
*
* When rendering the results of a GraphQL document parse, it's helpful to describe where parsing
* failed. This is the interface to that facility.
*/
trait SourceMapper {
/** Identifier for the GraphQL document being parsed. Should be unique. */
def id: String
/** The GraphQL source code mapped by this object. */
def source: String
/** Return a description of the given location. */
def renderLocation(location: AstLocation): String
/** Return an indication of the line position of the given location.
*
* Useful for pointing to the location of a parsing error.
*
* @param prefix
* prefix to attach to the returned string
*/
def renderLinePosition(location: AstLocation, prefix: String = ""): String
}
/** [[SourceMapper]] for a single GraphQL document. */
class DefaultSourceMapper(val id: String, val parserInput: ParserInput) extends SourceMapper {
override lazy val source: String = parserInput.sliceString(0, parserInput.length)
override def renderLocation(location: AstLocation) =
s"(line ${location.line}, column ${location.column})"
override def renderLinePosition(location: AstLocation, prefix: String = ""): String =
parserInput
.getLine(location.line)
.replace("\r", "") + "\n" + prefix + (" " * (location.column - 1)) + "^"
}
/** [[SourceMapper]] for potentially multiple GraphQL documents.
*
* Sometimes it's necessary to compose a GraphQL document from multiple component documents; this
* class provides the corresponding `SourceMapper` to support that.
*
* @param id
* Identifier for the combined document.
* @param delegates
* The component documents.
*/
class AggregateSourceMapper(val id: String, val delegates: Vector[SourceMapper])
extends SourceMapper {
lazy val delegateById: Map[String, SourceMapper] = delegates.iterator.map(d => d.id -> d).toMap
override lazy val source: String = delegates.map(_.source.trim).mkString("\n\n")
override def renderLocation(location: AstLocation): String =
delegateById.get(location.sourceId).fold("")(sm => sm.renderLocation(location))
override def renderLinePosition(location: AstLocation, prefix: String = ""): String =
delegateById.get(location.sourceId).fold("")(sm => sm.renderLinePosition(location, prefix))
}
object AggregateSourceMapper {
private[this] def expand(sm: SourceMapper): Vector[SourceMapper] = sm match {
case agg: AggregateSourceMapper => agg.delegates.flatMap(expand)
case m => Vector(m)
}
def merge(mappers: Vector[SourceMapper]): AggregateSourceMapper =
new AggregateSourceMapper("merged", mappers.flatMap(expand))
}
| OlegIlyenko/sangria | modules/core/src/main/scala/sangria/parser/SourceMapper.scala | Scala | apache-2.0 | 2,854 |
package mesosphere.marathon.integration.setup
import java.io.File
import java.util.concurrent.{ Executors, TimeUnit }
import com.google.common.util.concurrent.{ AbstractIdleService, Service }
import com.google.inject.Guice
import mesosphere.chaos.http.{ HttpConf, HttpModule, HttpService }
import mesosphere.chaos.metrics.MetricsModule
import org.apache.commons.io.FileUtils
import org.apache.log4j.Logger
import org.rogach.scallop.ScallopConf
import scala.concurrent.duration._
import scala.concurrent.{ Await, ExecutionContext, Future, Promise }
import scala.sys.ShutdownHookThread
import scala.sys.process._
import scala.util.control.NonFatal
import scala.util.{ Failure, Success, Try }
/**
* Book Keeper for processes and services.
* During integration tests, several services and processes have to be launched.
* The ProcessKeeper knows about them and can handle their lifecycle.
*/
object ProcessKeeper {
private[this] val log = Logger.getLogger(getClass.getName)
private[this] var processes = List.empty[Process]
private[this] var services = List.empty[Service]
private[this] val ENV_MESOS_WORK_DIR: String = "MESOS_WORK_DIR"
def startHttpService(port: Int, assetPath: String) = {
startService {
log.info(s"Start Http Service on port $port")
val conf = new ScallopConf(Array("--http_port", port.toString, "--assets_path", assetPath)) with HttpConf
conf.afterInit()
val injector = Guice.createInjector(new MetricsModule, new HttpModule(conf), new IntegrationTestModule)
injector.getInstance(classOf[HttpService])
}
}
def startZooKeeper(port: Int, workDir: String) {
val args = "org.apache.zookeeper.server.ZooKeeperServerMain" :: port.toString :: workDir :: Nil
val workDirFile = new File(workDir)
FileUtils.deleteDirectory(workDirFile)
FileUtils.forceMkdir(workDirFile)
startJavaProcess("zookeeper", heapInMegs = 256, args, new File("."), sys.env, _.contains("binding to port"))
}
def startMesosLocal(): Process = {
val mesosWorkDirForMesos: String = "/tmp/marathon-itest-mesos"
val mesosWorkDirFile: File = new File(mesosWorkDirForMesos)
FileUtils.deleteDirectory(mesosWorkDirFile)
FileUtils.forceMkdir(mesosWorkDirFile)
startProcess(
"mesos",
Process(Seq("mesos-local", "--ip=127.0.0.1"), cwd = None, ENV_MESOS_WORK_DIR -> mesosWorkDirForMesos),
upWhen = _.toLowerCase.contains("registered with master"))
}
def startMarathon(cwd: File, env: Map[String, String], arguments: List[String],
mainClass: String = "mesosphere.marathon.Main",
startupLine: String = "Started ServerConnector"): Process = {
val argsWithMain = mainClass :: arguments
val mesosWorkDir: String = "/tmp/marathon-itest-marathon"
val mesosWorkDirFile: File = new File(mesosWorkDir)
FileUtils.deleteDirectory(mesosWorkDirFile)
FileUtils.forceMkdir(mesosWorkDirFile)
startJavaProcess(
"marathon", heapInMegs = 512, argsWithMain, cwd,
env + (ENV_MESOS_WORK_DIR -> mesosWorkDir),
upWhen = _.contains(startupLine))
}
def startJavaProcess(name: String, heapInMegs: Int, arguments: List[String],
cwd: File = new File("."), env: Map[String, String] = Map.empty, upWhen: String => Boolean): Process = {
log.info(s"Start java process $name with args: $arguments")
val javaExecutable = sys.props.get("java.home").fold("java")(_ + "/bin/java")
val classPath = sys.props.getOrElse("java.class.path", "target/classes")
val memSettings = s"-Xmx${heapInMegs}m"
val builder = Process(javaExecutable :: memSettings :: "-classpath" :: classPath :: arguments, cwd, env.toList: _*)
val process = startProcess(name, builder, upWhen)
log.info(s"Java process $name up and running!")
process
}
def startProcess(name: String, processBuilder: ProcessBuilder, upWhen: String => Boolean, timeout: Duration = 30.seconds): Process = {
sealed trait ProcessState
case object ProcessIsUp extends ProcessState
case object ProcessExited extends ProcessState
val up = Promise[ProcessIsUp.type]()
val logger = new ProcessLogger {
def checkUp(out: String) = {
log.info(s"$name: $out")
if (!up.isCompleted && upWhen(out)) up.trySuccess(ProcessIsUp)
}
override def buffer[T](f: => T): T = f
override def out(s: => String) = checkUp(s)
override def err(s: => String) = checkUp(s)
}
val process = processBuilder.run(logger)
val processExitCode: Future[ProcessExited.type] = Future {
val exitCode = scala.concurrent.blocking {
process.exitValue()
}
log.info(s"Process $name finished with exit code $exitCode")
// Sometimes this finishes before the other future finishes parsing the output
// and we incorrectly report ProcessExited instead of ProcessIsUp as the result of upOrExited.
Await.result(up.future, 1.second)
ProcessExited
}(ExecutionContext.fromExecutor(Executors.newCachedThreadPool()))
val upOrExited = Future.firstCompletedOf(Seq(up.future, processExitCode))(ExecutionContext.global)
Try(Await.result(upOrExited, timeout)) match {
case Success(result) =>
processes = process :: processes
result match {
case ProcessExited =>
throw new IllegalStateException(s"Process $name exited before coming up. Give up. $processBuilder")
case ProcessIsUp => log.info(s"Process $name is up and running. ${processes.size} processes in total.")
}
case Failure(_) =>
process.destroy()
throw new IllegalStateException(
s"Process $name does not came up within time bounds ($timeout). Give up. $processBuilder")
}
process
}
def onStopServices(block: => Unit): Unit = {
services ::= new AbstractIdleService {
override def shutDown(): Unit = {
block
}
override def startUp(): Unit = {}
}
}
def stopOSProcesses(grep: String): Unit = {
val PIDRE = """\s*(\d+)\s.*""".r
val processes = ("ps -x" #| s"grep $grep").!!.split("\n").map { case PIDRE(pid) => pid }
processes.foreach(p => s"kill -9 $p".!)
}
def stopAllProcesses(): Unit = {
def waitForProcessesToFinish(): Unit = {
processes.foreach(p => Try(p.destroy()))
// Unfortunately, there seem to be race conditions in Process.exitValue.
// Thus this ugly workaround.
val waitForExitInThread = new Thread() {
override def run(): Unit = {
processes.foreach(_.exitValue())
}
}
waitForExitInThread.start()
try {
waitForExitInThread.join(1000)
}
finally {
waitForExitInThread.interrupt()
}
}
try waitForProcessesToFinish()
catch {
case NonFatal(e) =>
log.error("while waiting for processes to finish", e)
try waitForProcessesToFinish()
catch {
case NonFatal(e) =>
log.error("giving up waiting for processes to finish", e)
}
}
processes = Nil
}
def startService(service: Service): Unit = {
services ::= service
service.startAsync().awaitRunning()
}
def stopAllServices(): Unit = {
services.foreach(_.stopAsync())
services.par.foreach(_.awaitTerminated(5, TimeUnit.SECONDS))
services = Nil
}
def shutdown(): Unit = {
stopAllProcesses()
stopAllServices()
}
val shutDownHook: ShutdownHookThread = sys.addShutdownHook {
shutdown()
}
def main(args: Array[String]) {
//startMarathon(new File("."), Map("MESOS_NATIVE_LIBRARY" -> "/usr/local/lib/libmesos.dylib"), List("--master", "local", "--event_subscriber", "http_callback"))
startZooKeeper(2183, "/tmp/foo")
Thread.sleep(10000)
stopAllProcesses()
//startHttpService(11211, ".")
}
}
| MrMarvin/marathon | src/test/scala/mesosphere/marathon/integration/setup/ProcessKeeper.scala | Scala | apache-2.0 | 7,858 |
package views.vrm_assign
object FulfilFailure {
final val ExitId = "exit"
} | dvla/vrm-assign-online | app/views/vrm_assign/FulfilFailure.scala | Scala | mit | 79 |
package sp.system
import akka.actor._
/**
* The starting object
* Created by Kristofer on 2014-06-06.
*/
object SPActorSystem {
// The actor system used by all parts of SP. Maybe we will allow remote actors in the future
implicit val system = ActorSystem("SP")
// val eventHandler = system.actorOf(Props[Dummy], "eventHandler")
// val modelHandler = system.actorOf(Props[Dummy])
// val serviceHandler = system.actorOf(Props[Dummy])
// val runtimeHandler = system.actorOf(Props[Dummy])
// val userHandler = system.actorOf(Props[Dummy])
//
// // TODO: Send this to all handlers instead of during construction
// val handlers = SPHandlers(modelHandler, serviceHandler, eventHandler)
val settings = SPSettings(system)
}
// some extra actors to help migrating to cluster. To be removed at a later stage
class PubActor(topic: String) extends Actor {
import akka.cluster.pubsub.DistributedPubSub
import akka.cluster.pubsub.DistributedPubSubMediator.{ Publish }
val mediator = DistributedPubSub(context.system).mediator
def receive = {
case x => mediator.tell(Publish(topic, x), sender())
}
}
object PubActor {
def props(topic: String) = Props(classOf[PubActor], topic)
}
| kristoferB/SP | sp1/src/main/scala/sp/system/SPActorSystem.scala | Scala | mit | 1,209 |
class Foo[A]
object Test {
def foo[M[_,_]](x: M[Int,Int]) = x
type Alias[X,Y] = Foo[X]
val x: Alias[Int,Int] = new Foo[Int]
foo[Alias](x) // ok
foo(x)
}
| som-snytt/dotty | tests/pos/i1181b.scala | Scala | apache-2.0 | 166 |
package mesosphere.marathon
package core.launchqueue.impl
import akka.actor.{Actor, ActorRef, Cancellable, Props}
import akka.event.LoggingReceive
import com.typesafe.scalalogging.StrictLogging
import mesosphere.marathon.core.launchqueue.impl.RateLimiterActor._
import mesosphere.marathon.core.leadership.LeaderDeferrable
import mesosphere.marathon.state.{RunSpec, RunSpecConfigRef}
import scala.concurrent.duration._
private[launchqueue] object RateLimiterActor {
def props(
rateLimiter: RateLimiter): Props =
Props(new RateLimiterActor(rateLimiter))
private[impl] case class AddDelay(runSpec: RunSpec)
private[impl] case class DecreaseDelay(runSpec: RunSpec)
private[impl] case class AdvanceDelay(runSpec: RunSpec)
private[impl] case class ResetDelay(runSpec: RunSpec)
private[impl] case class GetDelay(ref: RunSpecConfigRef)
@LeaderDeferrable private[launchqueue] case object Subscribe
private[launchqueue] case object Unsubscribe
private case object CleanupOverdueDelays
}
private class RateLimiterActor private (rateLimiter: RateLimiter) extends Actor with StrictLogging {
var cleanup: Cancellable = _
var subscribers: Set[ActorRef] = Set.empty
override def preStart(): Unit = {
import context.dispatcher
val overdueDelayCleanupInterval = 10.seconds
cleanup = context.system.scheduler.schedule(
overdueDelayCleanupInterval, overdueDelayCleanupInterval, self, CleanupOverdueDelays)
logger.info("started RateLimiterActor")
}
override def postStop(): Unit = {
cleanup.cancel()
}
override def receive: Receive = LoggingReceive {
Seq[Receive](
receiveCleanup,
receiveDelayOps
).reduceLeft(_.orElse[Any, Unit](_))
}
private[this] def receiveCleanup: Receive = {
case CleanupOverdueDelays =>
// If a run spec gets removed or updated, the delay should be reset.
// In addition to that we remove overdue delays to ensure there are no leaks,
// by calling this periodically.
rateLimiter.cleanUpOverdueDelays().foreach { configRef =>
notify(RateLimiter.DelayUpdate(configRef, None))
}
}
private[this] def receiveDelayOps: Receive = {
case Subscribe =>
if (!subscribers.contains(sender)) {
subscribers += sender
rateLimiter.currentDelays.foreach { delay: RateLimiter.DelayUpdate =>
sender ! delay
}
}
case Unsubscribe =>
subscribers -= sender
case GetDelay(ref) =>
sender() ! RateLimiter.DelayUpdate(ref, rateLimiter.getDelay(ref))
case AddDelay(runSpec) =>
rateLimiter.addDelay(runSpec)
notify(RateLimiter.DelayUpdate(runSpec.configRef, rateLimiter.getDelay(runSpec.configRef)))
case DecreaseDelay(_) => // ignore for now
case AdvanceDelay(runSpec) =>
rateLimiter.advanceDelay(runSpec)
notify(RateLimiter.DelayUpdate(runSpec.configRef, rateLimiter.getDelay(runSpec.configRef)))
case ResetDelay(runSpec) =>
rateLimiter.resetDelay(runSpec)
notify(RateLimiter.DelayUpdate(runSpec.configRef, None))
}
private def notify(update: RateLimiter.DelayUpdate): Unit = {
// Have launchQueue subscribe the same way?
subscribers.foreach { _ ! update }
}
}
| gsantovena/marathon | src/main/scala/mesosphere/marathon/core/launchqueue/impl/RateLimiterActor.scala | Scala | apache-2.0 | 3,226 |
/*
* Copyright (c) 2015,
* Ilya Sergey, Christopher Earl, Matthew Might and David Van Horn
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the project "Reachability" nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.ucombinator.scheme.cfa.kcfa
import org.ucombinator.scheme.syntax._
import tools.nsc.io.Directory
import org.ucombinator.util._
import org.ucombinator.scheme.transform.ANormalizer
import org.ucombinator.scheme.cfa.SchemeCFARunner
import org.ucombinator.cfa.CFAStatistics
/**
* @author ilya
*/
class KCFAAnalysisRunner(opts: CFAOptions) extends SchemeCFARunner(opts) with PointerCESKMachinery with KCFAGarbageCollector
with FancyOutput {
def runKCFA(opts: CFAOptions, anast: Exp) {
val sizeExp = ANormalizer.size(anast)
val firstTime = (new java.util.Date()).getTime
val (resultEdges, resultConfs): (Set[(Conf, Conf)], Set[Conf]) =
evaluateKCFA(anast)
val secondTime = (new java.util.Date()).getTime
val delta = (secondTime - firstTime)
println()
println("The analysis has taken " + (
if (delta / 1000 < 1) "less than one second."
else if (delta / 1000 == 1) "1 second."
else delta / 1000 + " seconds."))
if (opts.verbose) {
println()
println("Finished. Computed states: " + resultConfs.size)
}
if (!opts.simplifyGraph &&
resultConfs.toString().contains("Final")) {
if (opts.verbose) {
println("Has final state.\\n")
}
} else if (!opts.simplifyGraph) {
println("Warning: no final state!\\n")
}
println()
println("Computing statistics")
println()
val controlStates: Set[ControlState] = resultConfs.map(_._1)
var stateCounter = 0
val map: Map[ControlState, Int] = controlStates.map(s => {
stateCounter = stateCounter + 1
(s, stateCounter)
}).toMap.asInstanceOf[Map[ControlState, Int]]
val intNodes: Set[Int] = map.values.toSet
val intEdges: Set[(Int, Int)] = resultEdges.flatMap[(Int, Int), Set[(Int, Int)]]{
case (c, c1) => if (map.isDefinedAt(c._1) && map.isDefinedAt(c1._1)){
Set((map.apply(c._1), map.apply(c1._1)))
} else Set.empty
}
val (allVars, singletons) = computeSingletons(controlStates, anast)
val interrupted = opts.interrupt && resultConfs.size > opts.interruptAfter
dumpStatistics(opts, CFAStatistics(delta, sizeExp, allVars.size,
singletons.size, intNodes.size, intEdges.size, interrupted))
if (interrupt) {
println ("Interrupted after " + resultConfs.size + " states visited")
}
if (opts.dumpGraph) {
println()
println("Writing State Transition Graph")
println()
val path = dumpTransitionGraph(opts, resultConfs, resultEdges)
println("Transition Graph dumped into " + path)
}
}
/**
* Prints transition graph according to the passed parameters
*/
def prettyPrintTransitions(states: Set[Conf], edges: Set[(Conf, Conf)]): String = {
val controlStates: Set[ControlState] = states.map(x => x._1)
var stateCounter = 0
val map: Map[ControlState, Int] = controlStates.map(s => {
stateCounter = stateCounter + 1
(s, stateCounter)
}).toMap.asInstanceOf[Map[ControlState, Int]]
println("Control states: " + controlStates.size)
val buffer = new StringBuffer
buffer.append("digraph BST {\\nsize=\\"6,4\\" ; ratio = fill;\\n ")
var list: List[String] = List()
for (edge <- edges: Set[(Conf, Conf)]) {
val buf = new StringBuffer()
val (s, _) = edge._1
val (s1, _) = edge._2
buf.append("\\"" + prettyPrintState(s, map) + "\\"")
buf.append(" -> ")
buf.append("\\"" + prettyPrintState(s1, map) + "\\"")
buf.append(";\\n")
list = buf.toString :: list
}
buffer.append(list.distinct.mkString(""))
buffer.append("}\\n")
buffer.toString
}
def dumpTransitionGraph(opts: CFAOptions, states: Set[Conf], edges: Set[(Conf, Conf)]): String = {
import java.io._
val graphs = new Directory(new File(graphsDirName))
if (!graphs.exists) {
graphs.createDirectory(force = true)
graphs.createFile(failIfExists = false)
}
val subfolderPath = graphsDirName + File.separator + StringUtils.trimFileName(opts.fileName)
val subfolder = new Directory(new File(subfolderPath))
if (!subfolder.exists) {
subfolder.createDirectory(force = true)
subfolder.createFile(failIfExists = false)
}
val path = subfolderPath + File.separator + getGraphDumpFileName(opts)
val file = new File(path)
if (!file.exists()) {
file.createNewFile()
}
val writer = new FileWriter(file)
writer.write(prettyPrintTransitions(states, edges))
writer.close()
path
}
}
| ilyasergey/reachability | src/org/ucombinator/scheme/cfa/kcfa/KCFAAnalysisRunner.scala | Scala | bsd-3-clause | 6,165 |
package com.phaller.rasync
package test
import java.util.concurrent.CountDownLatch
import com.phaller.rasync.cell.{ Cell, FinalOutcome, NextOutcome, NoOutcome }
import com.phaller.rasync.lattice.{ DefaultKey, Updater }
import com.phaller.rasync.pool.HandlerPool
import com.phaller.rasync.test.lattice.{ IntUpdater, StringIntKey }
import org.scalatest.FunSuite
import scala.concurrent.Await
import scala.concurrent.duration._
class LazySuite extends FunSuite {
implicit val stringIntUpdater: Updater[Int] = new IntUpdater
test("lazy init") {
val latch = new CountDownLatch(1)
val pool = new HandlerPool[Int, Null]
val cell = pool.mkCell(_ => {
FinalOutcome(1)
})
cell.onComplete(_ => latch.countDown())
assert(!cell.isComplete)
cell.trigger()
latch.await()
assert(cell.isComplete)
assert(cell.getResult() == 1)
pool.shutdown()
}
test("trigger dependees") {
val latch = new CountDownLatch(2)
val pool = new HandlerPool[Int, Null]
var cell1: Cell[Int, Null] = null
var cell2: Cell[Int, Null] = null
cell1 = pool.mkCell(_ => {
FinalOutcome(1)
})
cell2 = pool.mkCell(_ => {
cell2.when(cell1)(it => {
if (it.head._2.get.isInstanceOf[FinalOutcome[_]]) FinalOutcome(3)
else NoOutcome
})
NextOutcome(2)
})
cell1.onComplete(_ => latch.countDown())
cell2.onComplete(_ => latch.countDown())
assert(!cell1.isComplete)
assert(!cell2.isComplete)
cell2.trigger()
latch.await()
assert(cell1.isComplete)
assert(cell1.getResult() == 1)
assert(cell2.isComplete)
assert(cell2.getResult() == 3)
pool.shutdown()
}
test("do not trigger unneeded cells") {
val latch = new CountDownLatch(1)
val pool = new HandlerPool[Int, Null]
var cell1: Cell[Int, Null] = null
var cell2: Cell[Int, Null] = null
cell1 = pool.mkCell(_ => {
assert(false)
FinalOutcome(-11)
})
cell2 = pool.mkCell(_ => {
FinalOutcome(2)
})
cell2.onComplete(_ => latch.countDown())
cell2.trigger()
latch.await()
assert(!cell1.isComplete)
assert(cell1.getResult() == 0)
pool.shutdown()
}
test("cycle deps") {
val latch1 = new CountDownLatch(2)
val latch2 = new CountDownLatch(2)
val pool = new HandlerPool[Int, Null]
var cell1: Cell[Int, Null] = null
var cell2: Cell[Int, Null] = null
cell1 = pool.mkCell(_ => {
cell1.when(cell2)(it => {
if (it.head._2.get.isInstanceOf[FinalOutcome[_]]) FinalOutcome(3)
else NoOutcome
})
NextOutcome(1)
})
cell2 = pool.mkCell(_ => {
cell2.when(cell1)(it => {
if (it.head._2.get.isInstanceOf[FinalOutcome[_]]) FinalOutcome(3)
else NoOutcome
})
NextOutcome(2)
})
cell1.onNext(_ => latch1.countDown())
cell2.onNext(_ => latch1.countDown())
cell1.onComplete(_ => latch2.countDown())
cell2.onComplete(_ => latch2.countDown())
cell2.trigger()
latch1.await()
val fut = pool.quiescentResolveCell
Await.ready(fut, 2.seconds)
latch2.await()
assert(cell1.isComplete)
assert(cell1.getResult() == 1)
assert(cell2.isComplete)
assert(cell2.getResult() == 2)
pool.shutdown()
}
test("cycle deps with incoming dep") {
val latch1 = new CountDownLatch(2)
val latch2 = new CountDownLatch(3)
val pool = new HandlerPool[Int, Null]
var cell1: Cell[Int, Null] = null
var cell2: Cell[Int, Null] = null
var cell3: Cell[Int, Null] = null
cell1 = pool.mkCell(_ => {
cell1.when(cell2)(_ => NextOutcome(-1))
NextOutcome(101)
})
cell2 = pool.mkCell(_ => {
cell2.when(cell1)(_ => NextOutcome(-1))
NextOutcome(102)
})
cell3 = pool.mkCell(_ => {
cell3.when(cell1)(_ => FinalOutcome(103))
NextOutcome(-1)
})
cell1.onNext(_ => latch1.countDown())
cell2.onNext(_ => latch1.countDown())
cell1.onComplete(_ => latch2.countDown())
cell2.onComplete(_ => latch2.countDown())
cell3.onComplete(_ => latch2.countDown())
assert(!cell1.isComplete)
assert(!cell2.isComplete)
cell3.trigger()
latch1.await()
val fut = pool.quiescentResolveCell
Await.ready(fut, 2.seconds)
latch2.await()
assert(cell3.isComplete)
assert(cell3.getResult() === 103)
pool.shutdown()
}
test("cycle deps with incoming dep, resolve cycle first") {
val theKey = new DefaultKey[Int, Null]()
val latch1 = new CountDownLatch(2)
val latch2 = new CountDownLatch(2)
val latch3 = new CountDownLatch(1)
val pool = new HandlerPool[Int, Null](theKey)
var cell1: Cell[Int, Null] = null
var cell2: Cell[Int, Null] = null
var cell3: Cell[Int, Null] = null
cell1 = pool.mkCell(c => {
c.when(cell2)(_ => {
NextOutcome(-111)
})
NextOutcome(11)
})
cell2 = pool.mkCell(c => {
c.when(cell1)(_ => {
NextOutcome(-222)
})
NextOutcome(22)
})
cell1.onNext(_ => latch1.countDown())
cell2.onNext(_ => latch1.countDown())
cell1.onComplete(_ => latch2.countDown())
cell2.onComplete(_ => latch2.countDown())
cell2.trigger()
latch1.await()
val fut = pool.quiescentResolveCell
Await.ready(fut, 2.seconds)
latch2.await()
cell3 = pool.mkCell(c => {
c.when(cell1)(_ => {
FinalOutcome(333)
})
NextOutcome(-3)
})
cell3.onComplete(_ => latch3.countDown())
cell3.trigger()
latch3.await()
assert(cell3.isComplete)
assert(cell3.getResult() === 333)
pool.shutdown()
}
test("cycle does not get resolved, if not triggered") {
val pool = new HandlerPool[Int, Null]
var c1: Cell[Int, Null] = null
var c2: Cell[Int, Null] = null
c1 = pool.mkCell(_ => {
c1.when(c2)(_ => FinalOutcome(-2))
FinalOutcome(-1)
})
c2 = pool.mkCell(_ => {
c2.when(c1)(_ => FinalOutcome(-2))
FinalOutcome(-1)
})
val fut2 = pool.quiescentResolveCell
Await.ready(fut2, 2.seconds)
assert(c1.getResult() == 0)
assert(!c1.isComplete)
assert(c2.getResult() == 0)
assert(!c2.isComplete)
pool.shutdown()
}
//
test("cell does not get resolved, if not triggered") {
val pool = new HandlerPool[Int, Null]
val c = pool.mkCell(_ => FinalOutcome(-1))
val fut2 = pool.quiescentResolveCell
Await.ready(fut2, 2.seconds)
assert(c.getResult() == 0)
assert(!c.isComplete)
pool.shutdown()
}
//
test("cell gets resolved, if triggered") {
val pool = new HandlerPool[Int, Null](new StringIntKey(""))
val cell = pool.mkCell(_ => {
NextOutcome(-1)
})
cell.trigger()
val fut2 = pool.quiescentResolveCell
Await.ready(fut2, 2.seconds)
assert(cell.isComplete) // cell should be completed with a fallback value
assert(cell.getResult() == 1) // StringIntKey sets cell to fallback value `1`.
pool.shutdown()
}
}
| phaller/reactive-async | core/src/test/scala/com/phaller/rasync/test/LazySuite.scala | Scala | bsd-2-clause | 7,014 |
package spark.broadcast
/**
* An interface for all the broadcast implementations in Spark (to allow
* multiple broadcast implementations). SparkContext uses a user-specified
* BroadcastFactory implementation to instantiate a particular broadcast for the
* entire Spark job.
*/
private[spark] trait BroadcastFactory {
def initialize(isDriver: Boolean): Unit
def newBroadcast[T](value: T, isLocal: Boolean, id: Long): Broadcast[T]
def stop(): Unit
}
| koeninger/spark | core/src/main/scala/spark/broadcast/BroadcastFactory.scala | Scala | bsd-3-clause | 461 |
/*
* Copyright 2013 Akiyoshi Sugiki, University of Tsukuba
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kumoi.impl.cache
import kumoi.core._
/**
*
* @author Akiyoshi Sugiki
*/
trait CacheCommon {
protected val timeout = Config("core.cache.timeout", 30 * 1000)
protected val port = Config("core.cache.port", 9104)
protected val portObj = Config("core.cache.port", 9104)
protected val name = Symbol(Config("core.cache.name", "cm"))
protected val enableObj = Config("core.cache.obj.enable", false)
protected val nameObj = Symbol(Config("core.cache.obj.name", "cm2"))
protected val invalidateObj = Config("core.cache.obj.invalidate.enable", false)
} | axi-sugiki/kumoi | src/kumoi/impl/cache/CacheCommon.scala | Scala | apache-2.0 | 1,186 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.admin
import java.util.Properties
import joptsimple._
import joptsimple.util.EnumConverter
import kafka.security.auth._
import kafka.server.KafkaConfig
import kafka.utils._
import org.apache.kafka.clients.admin.{AdminClientConfig, AdminClient => JAdminClient}
import org.apache.kafka.common.acl._
import org.apache.kafka.common.resource.{PatternType, ResourcePattern, ResourcePatternFilter, Resource => JResource, ResourceType => JResourceType}
import org.apache.kafka.common.security.JaasUtils
import org.apache.kafka.common.security.auth.KafkaPrincipal
import org.apache.kafka.common.utils.{SecurityUtils, Utils}
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.io.StdIn
object AclCommand extends Logging {
val ClusterResourceFilter = new ResourcePatternFilter(JResourceType.CLUSTER, JResource.CLUSTER_NAME, PatternType.LITERAL)
private val Newline = scala.util.Properties.lineSeparator
def main(args: Array[String]) {
val opts = new AclCommandOptions(args)
CommandLineUtils.printHelpAndExitIfNeeded(opts, "This tool helps to manage acls on kafka.")
opts.checkArgs()
val aclCommandService = {
if (opts.options.has(opts.bootstrapServerOpt)) {
new AdminClientService(opts)
} else {
new AuthorizerService(opts)
}
}
try {
if (opts.options.has(opts.addOpt))
aclCommandService.addAcls()
else if (opts.options.has(opts.removeOpt))
aclCommandService.removeAcls()
else if (opts.options.has(opts.listOpt))
aclCommandService.listAcls()
} catch {
case e: Throwable =>
println(s"Error while executing ACL command: ${e.getMessage}")
println(Utils.stackTrace(e))
Exit.exit(1)
}
}
sealed trait AclCommandService {
def addAcls(): Unit
def removeAcls(): Unit
def listAcls(): Unit
}
class AdminClientService(val opts: AclCommandOptions) extends AclCommandService with Logging {
private def withAdminClient(opts: AclCommandOptions)(f: JAdminClient => Unit) {
val props = if (opts.options.has(opts.commandConfigOpt))
Utils.loadProps(opts.options.valueOf(opts.commandConfigOpt))
else
new Properties()
props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, opts.options.valueOf(opts.bootstrapServerOpt))
val adminClient = JAdminClient.create(props)
try {
f(adminClient)
} finally {
adminClient.close()
}
}
def addAcls(): Unit = {
val resourceToAcl = getResourceToAcls(opts)
withAdminClient(opts) { adminClient =>
for ((resource, acls) <- resourceToAcl) {
val resourcePattern = resource.toPattern
println(s"Adding ACLs for resource `$resourcePattern`: $Newline ${acls.map("\\t" + _).mkString(Newline)} $Newline")
val aclBindings = acls.map(acl => new AclBinding(resourcePattern, getAccessControlEntry(acl))).asJavaCollection
adminClient.createAcls(aclBindings).all().get()
}
listAcls()
}
}
def removeAcls(): Unit = {
withAdminClient(opts) { adminClient =>
val filterToAcl = getResourceFilterToAcls(opts)
for ((filter, acls) <- filterToAcl) {
if (acls.isEmpty) {
if (confirmAction(opts, s"Are you sure you want to delete all ACLs for resource filter `$filter`? (y/n)"))
removeAcls(adminClient, acls, filter)
} else {
if (confirmAction(opts, s"Are you sure you want to remove ACLs: $Newline ${acls.map("\\t" + _).mkString(Newline)} $Newline from resource filter `$filter`? (y/n)"))
removeAcls(adminClient, acls, filter)
}
}
listAcls()
}
}
def listAcls(): Unit = {
withAdminClient(opts) { adminClient =>
val filters = getResourceFilter(opts, dieIfNoResourceFound = false)
val listPrincipals = getPrincipals(opts, opts.listPrincipalsOpt)
val resourceToAcls = getAcls(adminClient, filters)
if (listPrincipals.isEmpty) {
for ((resource, acls) <- resourceToAcls)
println(s"Current ACLs for resource `$resource`: $Newline ${acls.map("\\t" + _).mkString(Newline)} $Newline")
} else {
listPrincipals.foreach(principal => {
println(s"ACLs for principal `$principal`")
val filteredResourceToAcls = resourceToAcls.mapValues(acls =>
acls.filter(acl => principal.toString.equals(acl.principal))).filter(entry => entry._2.nonEmpty)
for ((resource, acls) <- filteredResourceToAcls)
println(s"Current ACLs for resource `$resource`: $Newline ${acls.map("\\t" + _).mkString(Newline)} $Newline")
})
}
}
}
private def getAccessControlEntry(acl: Acl): AccessControlEntry = {
new AccessControlEntry(acl.principal.toString, acl.host, acl.operation.toJava, acl.permissionType.toJava)
}
private def removeAcls(adminClient: JAdminClient, acls: Set[Acl], filter: ResourcePatternFilter): Unit = {
if (acls.isEmpty)
adminClient.deleteAcls(List(new AclBindingFilter(filter, AccessControlEntryFilter.ANY)).asJava).all().get()
else {
val aclBindingFilters = acls.map(acl => new AclBindingFilter(filter, getAccessControlEntryFilter(acl))).toList.asJava
adminClient.deleteAcls(aclBindingFilters).all().get()
}
}
private def getAccessControlEntryFilter(acl: Acl): AccessControlEntryFilter = {
new AccessControlEntryFilter(acl.principal.toString, acl.host, acl.operation.toJava, acl.permissionType.toJava)
}
private def getAcls(adminClient: JAdminClient, filters: Set[ResourcePatternFilter]): Map[ResourcePattern, Set[AccessControlEntry]] = {
val aclBindings =
if (filters.isEmpty) adminClient.describeAcls(AclBindingFilter.ANY).values().get().asScala.toList
else {
val results = for (filter <- filters) yield {
adminClient.describeAcls(new AclBindingFilter(filter, AccessControlEntryFilter.ANY)).values().get().asScala.toList
}
results.reduceLeft(_ ++ _)
}
val resourceToAcls = mutable.Map[ResourcePattern, Set[AccessControlEntry]]().withDefaultValue(Set())
aclBindings.foreach(aclBinding => resourceToAcls(aclBinding.pattern()) = resourceToAcls(aclBinding.pattern()) + aclBinding.entry())
resourceToAcls.toMap
}
}
class AuthorizerService(val opts: AclCommandOptions) extends AclCommandService with Logging {
private def withAuthorizer()(f: Authorizer => Unit) {
val defaultProps = Map(KafkaConfig.ZkEnableSecureAclsProp -> JaasUtils.isZkSecurityEnabled)
val authorizerProperties =
if (opts.options.has(opts.authorizerPropertiesOpt)) {
val authorizerProperties = opts.options.valuesOf(opts.authorizerPropertiesOpt).asScala
defaultProps ++ CommandLineUtils.parseKeyValueArgs(authorizerProperties, acceptMissingValue = false).asScala
} else {
defaultProps
}
val authorizerClass = if (opts.options.has(opts.authorizerOpt))
opts.options.valueOf(opts.authorizerOpt)
else
classOf[SimpleAclAuthorizer].getName
val authZ = CoreUtils.createObject[Authorizer](authorizerClass)
try {
authZ.configure(authorizerProperties.asJava)
f(authZ)
}
finally CoreUtils.swallow(authZ.close(), this)
}
def addAcls(): Unit = {
val resourceToAcl = getResourceToAcls(opts)
withAuthorizer() { authorizer =>
for ((resource, acls) <- resourceToAcl) {
println(s"Adding ACLs for resource `$resource`: $Newline ${acls.map("\\t" + _).mkString(Newline)} $Newline")
authorizer.addAcls(acls, resource)
}
listAcls()
}
}
def removeAcls(): Unit = {
withAuthorizer() { authorizer =>
val filterToAcl = getResourceFilterToAcls(opts)
for ((filter, acls) <- filterToAcl) {
if (acls.isEmpty) {
if (confirmAction(opts, s"Are you sure you want to delete all ACLs for resource filter `$filter`? (y/n)"))
removeAcls(authorizer, acls, filter)
} else {
if (confirmAction(opts, s"Are you sure you want to remove ACLs: $Newline ${acls.map("\\t" + _).mkString(Newline)} $Newline from resource filter `$filter`? (y/n)"))
removeAcls(authorizer, acls, filter)
}
}
listAcls()
}
}
def listAcls(): Unit = {
withAuthorizer() { authorizer =>
val filters = getResourceFilter(opts, dieIfNoResourceFound = false)
val listPrincipals = getPrincipals(opts, opts.listPrincipalsOpt)
if (listPrincipals.isEmpty) {
val resourceToAcls = getFilteredResourceToAcls(authorizer, filters)
for ((resource, acls) <- resourceToAcls)
println(s"Current ACLs for resource `$resource`: $Newline ${acls.map("\\t" + _).mkString(Newline)} $Newline")
} else {
listPrincipals.foreach(principal => {
println(s"ACLs for principal `$principal`")
val resourceToAcls = getFilteredResourceToAcls(authorizer, filters, Some(principal))
for ((resource, acls) <- resourceToAcls)
println(s"Current ACLs for resource `$resource`: $Newline ${acls.map("\\t" + _).mkString(Newline)} $Newline")
})
}
}
}
private def removeAcls(authorizer: Authorizer, acls: Set[Acl], filter: ResourcePatternFilter) {
getAcls(authorizer, filter)
.keys
.foreach(resource =>
if (acls.isEmpty) authorizer.removeAcls(resource)
else authorizer.removeAcls(acls, resource)
)
}
private def getFilteredResourceToAcls(authorizer: Authorizer, filters: Set[ResourcePatternFilter],
listPrincipal: Option[KafkaPrincipal] = None): Iterable[(Resource, Set[Acl])] = {
if (filters.isEmpty)
if (listPrincipal.isEmpty)
authorizer.getAcls()
else
authorizer.getAcls(listPrincipal.get)
else filters.flatMap(filter => getAcls(authorizer, filter, listPrincipal))
}
private def getAcls(authorizer: Authorizer, filter: ResourcePatternFilter,
listPrincipal: Option[KafkaPrincipal] = None): Map[Resource, Set[Acl]] =
if (listPrincipal.isEmpty)
authorizer.getAcls().filter { case (resource, acl) => filter.matches(resource.toPattern) }
else
authorizer.getAcls(listPrincipal.get).filter { case (resource, acl) => filter.matches(resource.toPattern) }
}
private def getResourceToAcls(opts: AclCommandOptions): Map[Resource, Set[Acl]] = {
val patternType: PatternType = opts.options.valueOf(opts.resourcePatternType)
if (!patternType.isSpecific)
CommandLineUtils.printUsageAndDie(opts.parser, s"A '--resource-pattern-type' value of '$patternType' is not valid when adding acls.")
val resourceToAcl = getResourceFilterToAcls(opts).map {
case (filter, acls) =>
Resource(ResourceType.fromJava(filter.resourceType()), filter.name(), filter.patternType()) -> acls
}
if (resourceToAcl.values.exists(_.isEmpty))
CommandLineUtils.printUsageAndDie(opts.parser, "You must specify one of: --allow-principal, --deny-principal when trying to add ACLs.")
resourceToAcl
}
private def getResourceFilterToAcls(opts: AclCommandOptions): Map[ResourcePatternFilter, Set[Acl]] = {
var resourceToAcls = Map.empty[ResourcePatternFilter, Set[Acl]]
//if none of the --producer or --consumer options are specified , just construct ACLs from CLI options.
if (!opts.options.has(opts.producerOpt) && !opts.options.has(opts.consumerOpt)) {
resourceToAcls ++= getCliResourceFilterToAcls(opts)
}
//users are allowed to specify both --producer and --consumer options in a single command.
if (opts.options.has(opts.producerOpt))
resourceToAcls ++= getProducerResourceFilterToAcls(opts)
if (opts.options.has(opts.consumerOpt))
resourceToAcls ++= getConsumerResourceFilterToAcls(opts).map { case (k, v) => k -> (v ++ resourceToAcls.getOrElse(k, Set.empty[Acl])) }
validateOperation(opts, resourceToAcls)
resourceToAcls
}
private def getProducerResourceFilterToAcls(opts: AclCommandOptions): Map[ResourcePatternFilter, Set[Acl]] = {
val filters = getResourceFilter(opts)
val topics: Set[ResourcePatternFilter] = filters.filter(_.resourceType == JResourceType.TOPIC)
val transactionalIds: Set[ResourcePatternFilter] = filters.filter(_.resourceType == JResourceType.TRANSACTIONAL_ID)
val enableIdempotence = opts.options.has(opts.idempotentOpt)
val topicAcls = getAcl(opts, Set(Write, Describe, Create))
val transactionalIdAcls = getAcl(opts, Set(Write, Describe))
//Write, Describe, Create permission on topics, Write, Describe on transactionalIds
topics.map(_ -> topicAcls).toMap ++
transactionalIds.map(_ -> transactionalIdAcls).toMap ++
(if (enableIdempotence)
Map(ClusterResourceFilter -> getAcl(opts, Set(IdempotentWrite)))
else
Map.empty)
}
private def getConsumerResourceFilterToAcls(opts: AclCommandOptions): Map[ResourcePatternFilter, Set[Acl]] = {
val filters = getResourceFilter(opts)
val topics: Set[ResourcePatternFilter] = filters.filter(_.resourceType == JResourceType.TOPIC)
val groups: Set[ResourcePatternFilter] = filters.filter(_.resourceType == JResourceType.GROUP)
//Read, Describe on topic, Read on consumerGroup
val acls = getAcl(opts, Set(Read, Describe))
topics.map(_ -> acls).toMap[ResourcePatternFilter, Set[Acl]] ++
groups.map(_ -> getAcl(opts, Set(Read))).toMap[ResourcePatternFilter, Set[Acl]]
}
private def getCliResourceFilterToAcls(opts: AclCommandOptions): Map[ResourcePatternFilter, Set[Acl]] = {
val acls = getAcl(opts)
val filters = getResourceFilter(opts)
filters.map(_ -> acls).toMap
}
private def getAcl(opts: AclCommandOptions, operations: Set[Operation]): Set[Acl] = {
val allowedPrincipals = getPrincipals(opts, opts.allowPrincipalsOpt)
val deniedPrincipals = getPrincipals(opts, opts.denyPrincipalsOpt)
val allowedHosts = getHosts(opts, opts.allowHostsOpt, opts.allowPrincipalsOpt)
val deniedHosts = getHosts(opts, opts.denyHostsOpt, opts.denyPrincipalsOpt)
val acls = new collection.mutable.HashSet[Acl]
if (allowedHosts.nonEmpty && allowedPrincipals.nonEmpty)
acls ++= getAcls(allowedPrincipals, Allow, operations, allowedHosts)
if (deniedHosts.nonEmpty && deniedPrincipals.nonEmpty)
acls ++= getAcls(deniedPrincipals, Deny, operations, deniedHosts)
acls.toSet
}
private def getAcl(opts: AclCommandOptions): Set[Acl] = {
val operations = opts.options.valuesOf(opts.operationsOpt).asScala.map(operation => Operation.fromString(operation.trim)).toSet
getAcl(opts, operations)
}
def getAcls(principals: Set[KafkaPrincipal], permissionType: PermissionType, operations: Set[Operation],
hosts: Set[String]): Set[Acl] = {
for {
principal <- principals
operation <- operations
host <- hosts
} yield new Acl(principal, permissionType, host, operation)
}
private def getHosts(opts: AclCommandOptions, hostOptionSpec: ArgumentAcceptingOptionSpec[String],
principalOptionSpec: ArgumentAcceptingOptionSpec[String]): Set[String] = {
if (opts.options.has(hostOptionSpec))
opts.options.valuesOf(hostOptionSpec).asScala.map(_.trim).toSet
else if (opts.options.has(principalOptionSpec))
Set[String](Acl.WildCardHost)
else
Set.empty[String]
}
private def getPrincipals(opts: AclCommandOptions, principalOptionSpec: ArgumentAcceptingOptionSpec[String]): Set[KafkaPrincipal] = {
if (opts.options.has(principalOptionSpec))
opts.options.valuesOf(principalOptionSpec).asScala.map(s => SecurityUtils.parseKafkaPrincipal(s.trim)).toSet
else
Set.empty[KafkaPrincipal]
}
private def getResourceFilter(opts: AclCommandOptions, dieIfNoResourceFound: Boolean = true): Set[ResourcePatternFilter] = {
val patternType: PatternType = opts.options.valueOf(opts.resourcePatternType)
var resourceFilters = Set.empty[ResourcePatternFilter]
if (opts.options.has(opts.topicOpt))
opts.options.valuesOf(opts.topicOpt).asScala.foreach(topic => resourceFilters += new ResourcePatternFilter(JResourceType.TOPIC, topic.trim, patternType))
if (patternType == PatternType.LITERAL && (opts.options.has(opts.clusterOpt) || opts.options.has(opts.idempotentOpt)))
resourceFilters += ClusterResourceFilter
if (opts.options.has(opts.groupOpt))
opts.options.valuesOf(opts.groupOpt).asScala.foreach(group => resourceFilters += new ResourcePatternFilter(JResourceType.GROUP, group.trim, patternType))
if (opts.options.has(opts.transactionalIdOpt))
opts.options.valuesOf(opts.transactionalIdOpt).asScala.foreach(transactionalId =>
resourceFilters += new ResourcePatternFilter(JResourceType.TRANSACTIONAL_ID, transactionalId, patternType))
if (opts.options.has(opts.delegationTokenOpt))
opts.options.valuesOf(opts.delegationTokenOpt).asScala.foreach(token => resourceFilters += new ResourcePatternFilter(JResourceType.DELEGATION_TOKEN, token.trim, patternType))
if (resourceFilters.isEmpty && dieIfNoResourceFound)
CommandLineUtils.printUsageAndDie(opts.parser, "You must provide at least one resource: --topic <topic> or --cluster or --group <group> or --delegation-token <Delegation Token ID>")
resourceFilters
}
private def confirmAction(opts: AclCommandOptions, msg: String): Boolean = {
if (opts.options.has(opts.forceOpt))
return true
println(msg)
StdIn.readLine().equalsIgnoreCase("y")
}
private def validateOperation(opts: AclCommandOptions, resourceToAcls: Map[ResourcePatternFilter, Set[Acl]]): Unit = {
for ((resource, acls) <- resourceToAcls) {
val validOps = ResourceType.fromJava(resource.resourceType).supportedOperations + All
if ((acls.map(_.operation) -- validOps).nonEmpty)
CommandLineUtils.printUsageAndDie(opts.parser, s"ResourceType ${resource.resourceType} only supports operations ${validOps.mkString(",")}")
}
}
class AclCommandOptions(args: Array[String]) extends CommandDefaultOptions(args) {
val CommandConfigDoc = "A property file containing configs to be passed to Admin Client."
val bootstrapServerOpt = parser.accepts("bootstrap-server", "A list of host/port pairs to use for establishing the connection to the Kafka cluster." +
" This list should be in the form host1:port1,host2:port2,... This config is required for acl management using admin client API.")
.withRequiredArg
.describedAs("server to connect to")
.ofType(classOf[String])
val commandConfigOpt = parser.accepts("command-config", CommandConfigDoc)
.withOptionalArg()
.describedAs("command-config")
.ofType(classOf[String])
val authorizerOpt = parser.accepts("authorizer", "Fully qualified class name of the authorizer, defaults to kafka.security.auth.SimpleAclAuthorizer.")
.withRequiredArg
.describedAs("authorizer")
.ofType(classOf[String])
val authorizerPropertiesOpt = parser.accepts("authorizer-properties", "REQUIRED: properties required to configure an instance of Authorizer. " +
"These are key=val pairs. For the default authorizer the example values are: zookeeper.connect=localhost:2181")
.withRequiredArg
.describedAs("authorizer-properties")
.ofType(classOf[String])
val topicOpt = parser.accepts("topic", "topic to which ACLs should be added or removed. " +
"A value of * indicates ACL should apply to all topics.")
.withRequiredArg
.describedAs("topic")
.ofType(classOf[String])
val clusterOpt = parser.accepts("cluster", "Add/Remove cluster ACLs.")
val groupOpt = parser.accepts("group", "Consumer Group to which the ACLs should be added or removed. " +
"A value of * indicates the ACLs should apply to all groups.")
.withRequiredArg
.describedAs("group")
.ofType(classOf[String])
val transactionalIdOpt = parser.accepts("transactional-id", "The transactionalId to which ACLs should " +
"be added or removed. A value of * indicates the ACLs should apply to all transactionalIds.")
.withRequiredArg
.describedAs("transactional-id")
.ofType(classOf[String])
val idempotentOpt = parser.accepts("idempotent", "Enable idempotence for the producer. This should be " +
"used in combination with the --producer option. Note that idempotence is enabled automatically if " +
"the producer is authorized to a particular transactional-id.")
val delegationTokenOpt = parser.accepts("delegation-token", "Delegation token to which ACLs should be added or removed. " +
"A value of * indicates ACL should apply to all tokens.")
.withRequiredArg
.describedAs("delegation-token")
.ofType(classOf[String])
val resourcePatternType = parser.accepts("resource-pattern-type", "The type of the resource pattern or pattern filter. " +
"When adding acls, this should be a specific pattern type, e.g. 'literal' or 'prefixed'. " +
"When listing or removing acls, a specific pattern type can be used to list or remove acls from specific resource patterns, " +
"or use the filter values of 'any' or 'match', where 'any' will match any pattern type, but will match the resource name exactly, " +
"where as 'match' will perform pattern matching to list or remove all acls that affect the supplied resource(s). " +
"WARNING: 'match', when used in combination with the '--remove' switch, should be used with care.")
.withRequiredArg()
.ofType(classOf[String])
.withValuesConvertedBy(new PatternTypeConverter())
.defaultsTo(PatternType.LITERAL)
val addOpt = parser.accepts("add", "Indicates you are trying to add ACLs.")
val removeOpt = parser.accepts("remove", "Indicates you are trying to remove ACLs.")
val listOpt = parser.accepts("list", "List ACLs for the specified resource, use --topic <topic> or --group <group> or --cluster to specify a resource.")
val operationsOpt = parser.accepts("operation", "Operation that is being allowed or denied. Valid operation names are: " + Newline +
Operation.values.map("\\t" + _).mkString(Newline) + Newline)
.withRequiredArg
.ofType(classOf[String])
.defaultsTo(All.name)
val allowPrincipalsOpt = parser.accepts("allow-principal", "principal is in principalType:name format." +
" Note that principalType must be supported by the Authorizer being used." +
" For example, User:* is the wild card indicating all users.")
.withRequiredArg
.describedAs("allow-principal")
.ofType(classOf[String])
val denyPrincipalsOpt = parser.accepts("deny-principal", "principal is in principalType:name format. " +
"By default anyone not added through --allow-principal is denied access. " +
"You only need to use this option as negation to already allowed set. " +
"Note that principalType must be supported by the Authorizer being used. " +
"For example if you wanted to allow access to all users in the system but not test-user you can define an ACL that " +
"allows access to User:* and specify --deny-principal=User:test@EXAMPLE.COM. " +
"AND PLEASE REMEMBER DENY RULES TAKES PRECEDENCE OVER ALLOW RULES.")
.withRequiredArg
.describedAs("deny-principal")
.ofType(classOf[String])
val listPrincipalsOpt = parser.accepts("principal", "List ACLs for the specified principal. principal is in principalType:name format." +
" Note that principalType must be supported by the Authorizer being used. Multiple --principal option can be passed.")
.withOptionalArg()
.describedAs("principal")
.ofType(classOf[String])
val allowHostsOpt = parser.accepts("allow-host", "Host from which principals listed in --allow-principal will have access. " +
"If you have specified --allow-principal then the default for this option will be set to * which allows access from all hosts.")
.withRequiredArg
.describedAs("allow-host")
.ofType(classOf[String])
val denyHostsOpt = parser.accepts("deny-host", "Host from which principals listed in --deny-principal will be denied access. " +
"If you have specified --deny-principal then the default for this option will be set to * which denies access from all hosts.")
.withRequiredArg
.describedAs("deny-host")
.ofType(classOf[String])
val producerOpt = parser.accepts("producer", "Convenience option to add/remove ACLs for producer role. " +
"This will generate ACLs that allows WRITE,DESCRIBE and CREATE on topic.")
val consumerOpt = parser.accepts("consumer", "Convenience option to add/remove ACLs for consumer role. " +
"This will generate ACLs that allows READ,DESCRIBE on topic and READ on group.")
val forceOpt = parser.accepts("force", "Assume Yes to all queries and do not prompt.")
options = parser.parse(args: _*)
def checkArgs() {
if (options.has(bootstrapServerOpt) && options.has(authorizerOpt))
CommandLineUtils.printUsageAndDie(parser, "Only one of --bootstrap-server or --authorizer must be specified")
if (!options.has(bootstrapServerOpt))
CommandLineUtils.checkRequiredArgs(parser, options, authorizerPropertiesOpt)
if (options.has(commandConfigOpt) && !options.has(bootstrapServerOpt))
CommandLineUtils.printUsageAndDie(parser, "The --command-config option can only be used with --bootstrap-server option")
if (options.has(authorizerPropertiesOpt) && options.has(bootstrapServerOpt))
CommandLineUtils.printUsageAndDie(parser, "The --authorizer-properties option can only be used with --authorizer option")
val actions = Seq(addOpt, removeOpt, listOpt).count(options.has)
if (actions != 1)
CommandLineUtils.printUsageAndDie(parser, "Command must include exactly one action: --list, --add, --remove. ")
CommandLineUtils.checkInvalidArgs(parser, options, listOpt, Set(producerOpt, consumerOpt, allowHostsOpt, allowPrincipalsOpt, denyHostsOpt, denyPrincipalsOpt))
//when --producer or --consumer is specified , user should not specify operations as they are inferred and we also disallow --deny-principals and --deny-hosts.
CommandLineUtils.checkInvalidArgs(parser, options, producerOpt, Set(operationsOpt, denyPrincipalsOpt, denyHostsOpt))
CommandLineUtils.checkInvalidArgs(parser, options, consumerOpt, Set(operationsOpt, denyPrincipalsOpt, denyHostsOpt))
if (options.has(listPrincipalsOpt) && !options.has(listOpt))
CommandLineUtils.printUsageAndDie(parser, "The --principal option is only available if --list is set")
if (options.has(producerOpt) && !options.has(topicOpt))
CommandLineUtils.printUsageAndDie(parser, "With --producer you must specify a --topic")
if (options.has(idempotentOpt) && !options.has(producerOpt))
CommandLineUtils.printUsageAndDie(parser, "The --idempotent option is only available if --producer is set")
if (options.has(consumerOpt) && (!options.has(topicOpt) || !options.has(groupOpt) || (!options.has(producerOpt) && (options.has(clusterOpt) || options.has(transactionalIdOpt)))))
CommandLineUtils.printUsageAndDie(parser, "With --consumer you must specify a --topic and a --group and no --cluster or --transactional-id option should be specified.")
}
}
}
class PatternTypeConverter extends EnumConverter[PatternType](classOf[PatternType]) {
override def convert(value: String): PatternType = {
val patternType = super.convert(value)
if (patternType.isUnknown)
throw new ValueConversionException("Unknown resource-pattern-type: " + value)
patternType
}
override def valuePattern: String = PatternType.values
.filter(_ != PatternType.UNKNOWN)
.mkString("|")
}
| gf53520/kafka | core/src/main/scala/kafka/admin/AclCommand.scala | Scala | apache-2.0 | 29,085 |
package edu.osu.cse.groenkeb.logic.proof.rules.core
import edu.osu.cse.groenkeb.logic._
import edu.osu.cse.groenkeb.logic.proof._
import edu.osu.cse.groenkeb.logic.proof.rules._
case object NegationIntroduction extends BaseRule {
def major(sentence: Sentence) = false
def yields(conclusion: Sentence) = conclusion match {
case Not(_) => true
case _ => false
}
def params(major: Option[Sentence] = None)(implicit context: ProofContext) = goal match {
case Not(sentence) if major == None => Some(UnaryParams(RelevantProof(Absurdity, Required(Assumption(sentence, bind)))))
case _ => None
}
def infer(args: RuleArgs)(implicit context: ProofContext) = goal match {
case Not(sentence) => args match {
case UnaryArgs(disproof@Proof(Absurdity,_,_, assumptions,_)) if disproof uses sentence =>
Some(Proof(goal, this, args, assumptions.discharge(sentence), bind))
case _ => None
}
case _ => None
}
override def toString = "~I"
}
case object AndIntroduction extends BaseRule {
def major(sentence: Sentence) = false
def yields(conclusion: Sentence) = conclusion match {
case And(_,_) => true
case _ => false
}
def params(major: Option[Sentence] = None)(implicit context: ProofContext) = goal match {
case And(left, right) if major == None => Some(BinaryParams(AnyProof(left), AnyProof(right)))
case _ => None
}
def infer(args: RuleArgs)(implicit context: ProofContext) = args match {
case BinaryArgs(Proof(left, _, _, pleft, _), Proof(right, _, _, pright, _)) => Some(Proof(goal, this, args, pleft ++ pright))
case _ => None
}
override def toString = "&I"
}
case object OrIntroduction extends BaseRule {
def major(sentence: Sentence) = false
def yields(conc: Sentence) = conc match {
case Or(_,_) => true
case _ => false
}
def params(major: Option[Sentence] = None)(implicit context: ProofContext) = goal match {
case Or(left, right) if major == None =>
Some(OptionParams(
UnaryParams(AnyProof(left)),
UnaryParams(AnyProof(right))))
case _ => None
}
def infer(args: RuleArgs)(implicit context: ProofContext) = goal match {
case Or(left, right) => args match {
case UnaryArgs(Proof(c, _, _, prems, _)) if c.matches(left) || c.matches(right) =>
Some(Proof(goal, this, args, prems))
case _ => None
}
case _ => None
}
override def toString = "vI"
}
case object IfIntroduction extends BaseRule {
def major(sentence: Sentence) = false
def yields(conc: Sentence) = conc match {
case If(_,_) => true
case _ => false
}
def params(major: Option[Sentence] = None)(implicit context: ProofContext) = goal match {
case If(ante, conseq) if major == None =>
Some(OptionParams(
UnaryParams(AnyProof(conseq)),
UnaryParams(RelevantProof(conseq, Vacuous(Assumption(ante, bind)), Assumption(If(ante, conseq))))))
case _ => None
}
def infer(args: RuleArgs)(implicit context: ProofContext) = goal match {
case If(ante, conseq) => args match {
case UnaryArgs(anteDisproof@Proof(Absurdity, _, _, assumptions, _)) if anteDisproof uses ante =>
Some(Proof(If(ante, conseq), this, args, assumptions.discharge(ante), bind))
case UnaryArgs(consProof@Proof(`conseq`, _, _, assumptions, _)) if consProof uses ante =>
Some(Proof(If(ante, conseq), this, args, assumptions.discharge(ante), bind))
case UnaryArgs(Proof(`conseq`, _, _, assumptions, _)) =>
Some(Proof(If(ante, conseq), this, args, assumptions))
case _ => None
}
case _ => None
}
override def toString = ">I"
}
| bgroenks96/AutoMoL | core/src/main/scala/edu/osu/cse/groenkeb/logic/proof/rules/core/introRules.scala | Scala | mit | 3,678 |
package io.circe.tests
import cats.kernel.Eq
import java.util.UUID
import org.scalacheck.{ Arbitrary, Gen }
import org.scalacheck.util.Buildable
import shapeless.{ :+:, ::, AdditiveCollection, CNil, Coproduct, Generic, HList, HNil, Inl, Inr, IsTuple, Nat, Sized }
import shapeless.labelled.{ FieldType, field }
import shapeless.ops.nat.ToInt
trait MissingInstances {
implicit lazy val eqThrowable: Eq[Throwable] = Eq.fromUniversalEquals
implicit lazy val eqBigDecimal: Eq[BigDecimal] = Eq.fromUniversalEquals
implicit lazy val eqUUID: Eq[UUID] = Eq.fromUniversalEquals
implicit def eqRefArray[A <: AnyRef: Eq]: Eq[Array[A]] =
Eq.by((value: Array[A]) => Predef.wrapRefArray(value).toVector)(
cats.kernel.instances.vector.catsKernelStdEqForVector[A]
)
implicit def eqSeq[A: Eq]: Eq[Seq[A]] = Eq.by((_: Seq[A]).toVector)(
cats.kernel.instances.vector.catsKernelStdEqForVector[A]
)
implicit def arbitraryTuple1[A](implicit A: Arbitrary[A]): Arbitrary[Tuple1[A]] =
Arbitrary(A.arbitrary.map(Tuple1(_)))
implicit def arbitrarySome[A](implicit A: Arbitrary[A]): Arbitrary[Some[A]] = Arbitrary(A.arbitrary.map(Some(_)))
implicit lazy val arbitraryNone: Arbitrary[None.type] = Arbitrary(Gen.const(None))
implicit def eqSome[A](implicit A: Eq[A]): Eq[Some[A]] = Eq.by(_.get)
implicit lazy val eqNone: Eq[None.type] = Eq.instance((_, _) => true)
implicit lazy val arbitrarySymbol: Arbitrary[Symbol] = Arbitrary(Arbitrary.arbitrary[String].map(Symbol(_)))
implicit lazy val eqHNil: Eq[HNil] = Eq.instance((_, _) => true)
implicit lazy val eqCNil: Eq[CNil] = Eq.instance((_, _) => false)
implicit def eqHCons[H, T <: HList](implicit eqH: Eq[H], eqT: Eq[T]): Eq[H :: T] =
Eq.instance[H :: T] {
case (h1 :: t1, h2 :: t2) => eqH.eqv(h1, h2) && eqT.eqv(t1, t2)
}
implicit def eqCCons[L, R <: Coproduct](implicit eqL: Eq[L], eqR: Eq[R]): Eq[L :+: R] =
Eq.instance[L :+: R] {
case (Inl(l1), Inl(l2)) => eqL.eqv(l1, l2)
case (Inr(r1), Inr(r2)) => eqR.eqv(r1, r2)
case (_, _) => false
}
implicit def eqTuple[P: IsTuple, L <: HList](implicit
gen: Generic.Aux[P, L],
eqL: Eq[L]
): Eq[P] = Eq.by(gen.to)(eqL)
implicit lazy val arbitraryHNil: Arbitrary[HNil] = Arbitrary(Gen.const(HNil))
implicit def arbitraryHCons[H, T <: HList](implicit H: Arbitrary[H], T: Arbitrary[T]): Arbitrary[H :: T] =
Arbitrary(
for {
h <- H.arbitrary
t <- T.arbitrary
} yield h :: t
)
implicit def arbitrarySingletonCoproduct[L](implicit L: Arbitrary[L]): Arbitrary[L :+: CNil] =
Arbitrary(L.arbitrary.map(Inl(_)))
implicit def arbitraryCoproduct[L, R <: Coproduct](implicit
L: Arbitrary[L],
R: Arbitrary[R]
): Arbitrary[L :+: R] = Arbitrary(
Arbitrary.arbitrary[Either[L, R]].map {
case Left(l) => Inl(l)
case Right(r) => Inr(r)
}
)
implicit def eqFieldType[K, V](implicit V: Eq[V]): Eq[FieldType[K, V]] =
Eq.by[FieldType[K, V], V](identity)
implicit def arbitraryFieldType[K, V](implicit V: Arbitrary[V]): Arbitrary[FieldType[K, V]] =
Arbitrary(V.arbitrary.map(field[K](_)))
implicit def eqSized[L <: Nat, C[_], A](implicit CA: Eq[C[A]]): Eq[Sized[C[A], L]] =
Eq.by[Sized[C[A], L], C[A]](_.unsized)
implicit def arbitrarySized[L <: Nat, C[_], A](implicit
A: Arbitrary[A],
additive: AdditiveCollection[C[A]],
buildable: Buildable[A, C[A]],
ev: C[A] => Traversable[A],
toInt: ToInt[L]
): Arbitrary[Sized[C[A], L]] =
Arbitrary(
Gen.containerOfN[C, A](toInt(), A.arbitrary).filter(ca => ev(ca).size == toInt()).map(Sized.wrap[C[A], L])
)
}
| travisbrown/circe | modules/tests/shared/src/main/scala/io/circe/tests/MissingInstances.scala | Scala | apache-2.0 | 3,664 |
package com.cleawing.akka.consul
import java.util.UUID
import akka.actor.{ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider}
import akka.pattern._
import akka.util.Timeout
import scala.concurrent.Future
import scala.concurrent.duration._
object Consul
extends ExtensionId[ConsulExt]
with ExtensionIdProvider {
override def lookup() = Consul
override def createExtension(system: ExtendedActorSystem) = new ConsulExt(system)
}
class ConsulExt(system: ExtendedActorSystem) extends Extension {
import api.ConsulGuardian
import api.Response._
val config = system.settings.config.getConfig("consul")
val nodeId = UUID.randomUUID()
val serviceId = s"akka-$nodeId"
val serviceName = s"akka-${system.name}"
private val guardian = system.systemActorOf(
ConsulGuardian.props(config.getString("host"), config.getInt("port")),
"consul"
)
import system.dispatcher
system.scheduler.schedule(0.second, 1.seconds, guardian, ConsulGuardian.UpdateTTL)
object agent {
import api.Request.Agent._
def checks() : Future[CheckDescriptors] = request(Get.Checks).mapTo[CheckDescriptors]
def services() : Future[ServiceDescriptors] = request(Get.Services).mapTo[ServiceDescriptors]
def members() : Future[Members] = request(Get.Members).mapTo[Members]
def self() : Future[Self] = request(Get.Self).mapTo[Self]
def maintenance(enable: Boolean, reason: Option[String] = None) : Future[Boolean] =
request(Put.Maintenance(enable, reason)).mapTo[Boolean]
def join(address: String,
wan: Boolean = false) = request(Get.Join(address, wan)).mapTo[Boolean]
def forceLeave(node: String) : Future[Unit] = request(Get.ForceLeave(node)).mapTo[Unit]
object check {
def register(name: String,
id: Option[String] = None,
notes: Option[String] = None,
script: Option[String] = None,
http: Option[String] = None,
interval: Option[String] = None,
ttl: Option[String] = None,
serviceId: Option[String] = None) : Future[Boolean] =
request(Check.Put.RegisterCheck(name, id, notes, script, http, interval, ttl, serviceId)).mapTo[Boolean]
def deRegister(checkId: String) : Future[Boolean] = request(Check.Get.DeRegisterCheck(checkId)).mapTo[Boolean]
def pass(checkId: String, note: Option[String] = None) : Future[Boolean] = request(Check.Get.Pass(checkId, note)).mapTo[Boolean]
def warn(checkId: String, note: Option[String] = None) : Future[Boolean] = request(Check.Get.Warn(checkId, note)).mapTo[Boolean]
def fail(checkId: String, note: Option[String] = None) : Future[Boolean] = request(Check.Get.Fail(checkId, note)).mapTo[Boolean]
}
object health {
}
object service {
def register(name: String,
id: Option[String] = None,
tags: Option[Seq[String]] = None,
address: Option[String] = None,
port: Option[Int] = None,
scriptCheck : Option[String] = None,
httpCheck : Option[String] = None,
ttlCheck: Option[String] = None,
interval: Option[String] = None) : Future[Boolean] = {
val check = if (scriptCheck.isDefined || httpCheck.isDefined || ttlCheck.isDefined)
Some(Service.Put.ServiceCheck(scriptCheck, httpCheck, interval, ttlCheck))
else None
request(Service.Put.RegisterService(name, id, tags, address, port, check)).mapTo[Boolean]
}
def deRegister(serviceId: String) : Future[Boolean] = request(Service.Get.DeRegisterService(serviceId)).mapTo[Boolean]
def maintenance(serviceId: String, enable: Boolean, reason: Option[String] = None) : Future[Boolean] =
request(Service.Put.Maintenance(serviceId, enable, reason)).mapTo[Boolean]
}
}
object status {
import api.Request.Status._
def leader() : Future[String] = request(Get.Leader).mapTo[String]
def peers() : Future[Seq[String]] = request(Get.Peers).mapTo[Seq[String]]
}
// TODO. Grab timeout from config
protected implicit val timeout = Timeout(5.seconds)
private def request(req: com.cleawing.akka.consul.api.Request) : Future[_] = {
guardian.ask(req)
}
}
| Cleawing/united | akka-extensions/src/main/scala/com/cleawing/akka/consul/Consul.scala | Scala | apache-2.0 | 4,330 |
package reactivemongo.core.actors
import scala.concurrent.Promise
import reactivemongo.io.netty.channel.ChannelId
import reactivemongo.core.protocol.{ Request, Response }
private[actors] class AwaitingResponse(
val request: Request,
val channelID: ChannelId,
val promise: Promise[Response],
val isGetLastError: Boolean,
val isMongo26WriteOp: Boolean,
val pinnedNode: Option[String]) extends Product with Serializable {
@deprecated("Use the complete constructor", "0.18.5")
def this(
request: Request,
channelID: ChannelId,
promise: Promise[Response],
isGetLastError: Boolean,
isMongo26WriteOp: Boolean) = this(
request, channelID, promise, isGetLastError, isMongo26WriteOp, None)
@inline def requestID: Int = request.requestID
private var _retry = 0 // TODO: Refactor as property
// TODO: Refactor as Property
var _writeConcern: Option[Request] = None
def withWriteConcern(wc: Request): AwaitingResponse = {
_writeConcern = Some(wc)
this
}
def getWriteConcern: Option[Request] = _writeConcern
/**
* If this is not already completed and,
* if the current retry count is less then the maximum.
*/
def retriable(max: Int): Option[ChannelId => AwaitingResponse] =
if (!promise.isCompleted && _retry >= max) None else Some({ id: ChannelId =>
val req = copy(this.request, channelID = id)
req._retry = _retry + 1
req._writeConcern = _writeConcern
req
})
def copy( // TODO: Remove
request: Request,
channelID: ChannelId,
promise: Promise[Response],
isGetLastError: Boolean,
isMongo26WriteOp: Boolean): AwaitingResponse =
new AwaitingResponse(request, channelID, promise,
isGetLastError, isMongo26WriteOp, None)
def copy(
request: Request = this.request,
channelID: ChannelId = this.channelID,
promise: Promise[Response] = this.promise,
isGetLastError: Boolean = this.isGetLastError,
isMongo26WriteOp: Boolean = this.isMongo26WriteOp,
pinnedNode: Option[String] = this.pinnedNode): AwaitingResponse =
new AwaitingResponse(request, channelID, promise,
isGetLastError, isMongo26WriteOp, pinnedNode)
def canEqual(that: Any): Boolean = that match {
case _: AwaitingResponse => true
case _ => false
}
override def equals(that: Any): Boolean = that match {
case other: AwaitingResponse =>
tupled == other.tupled
case _ =>
false
}
lazy val productArity: Int = tupled.productArity
@inline def productElement(n: Int): Any = tupled.productElement(n)
override lazy val hashCode: Int = tupled.hashCode
private lazy val tupled = Tuple6(request, this.channelID, promise,
isGetLastError, isMongo26WriteOp, pinnedNode)
}
@deprecated("No longer a case class", "0.18.5")
private[actors] object AwaitingResponse extends scala.runtime.AbstractFunction5[Request, ChannelId, Promise[Response], Boolean, Boolean, AwaitingResponse] {
def apply(
request: Request,
channelID: ChannelId,
promise: Promise[Response],
isGetLastError: Boolean,
isMongo26WriteOp: Boolean): AwaitingResponse =
new AwaitingResponse(request, channelID, promise,
isGetLastError, isMongo26WriteOp)
def unapply(req: AwaitingResponse): Option[(Request, ChannelId, Promise[Response], Boolean, Boolean)] = Some(Tuple5(req.request, req.channelID, req.promise, req.isGetLastError, req.isMongo26WriteOp))
}
| ornicar/ReactiveMongo | driver/src/main/scala/core/actors/AwaitingResponse.scala | Scala | apache-2.0 | 3,436 |
package com.socrata.pg.server
class SoQLUrlTest extends SoQLTest {
test("select url") {
compareSoqlResult("""select code, url where code='LOCATION'""", "select-url.json")
}
test("text to url and equality check") {
compareSoqlResult(
"""SELECT code, url WHERE url = '<a href="http://www.socrata.com">Home Site</a>'""",
"select-url.json")
}
test("text to url description only and equality check") {
compareSoqlResult("""SELECT code, url WHERE url = '{ "description": "Home Site"}'""", "select-url-description.json")
}
test("text to url url only and equality check") {
compareSoqlResult(
"SELECT code, url WHERE url = 'http://www.socrata.com'",
"select-url-url.json")
}
test("text to url (json) and equality check") {
compareSoqlResult(
"""SELECT code, url WHERE url = '{ url: "http://www.socrata.com", description : "Home Site" }'""",
"select-url.json")
}
test("url description is null") {
compareSoqlResult(
"SELECT code, url, url.url as url_url WHERE url is not null and url.description is null",
"where-url_description-isnull.json")
}
test("url url is null") {
compareSoqlResult(
"SELECT code, url, url.description as url_description WHERE url is not null and url.url is null",
"where-url_url-isnull.json")
}
test("url.description") {
compareSoqlResult(
"select code, url where url.description = 'Home Site' and code='LOCATION'",
"select-url.json")
}
test("url.url") {
compareSoqlResult(
"select code, url where url.url = 'http://www.socrata.com' and code='LOCATION'",
"select-url.json")
}
test("url constructor") {
compareSoqlResult(
"SELECT code, url('http://www.socrata.com', 'Home Site') as url WHERE code = 'LOCATION'",
"select-url.json")
}
}
| socrata-platform/soql-postgres-adapter | soql-server-pg/src/test/scala/com/socrata/pg/server/SoQLUrlTest.scala | Scala | apache-2.0 | 1,836 |
package text.search
/**
* @author ynupc
* Created on 2016/08/21
*/
object Horspool extends Search {
override def indexOf[T](source: Array[T], target: Array[T]): Int = {
-1
}
override def indicesOf[T](source: Array[T], target: Array[T]): Array[Int] = {
Array()
}
}
| ynupc/scalastringcourseday6 | src/main/scala/text/search/Horspool.scala | Scala | apache-2.0 | 296 |
//: ----------------------------------------------------------------------------
//: Copyright (C) 2014 Verizon. All Rights Reserved.
//:
//: Licensed under the Apache License, Version 2.0 (the "License");
//: you may not use this file except in compliance with the License.
//: You may obtain a copy of the License at
//:
//: http://www.apache.org/licenses/LICENSE-2.0
//:
//: Unless required by applicable law or agreed to in writing, software
//: distributed under the License is distributed on an "AS IS" BASIS,
//: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//: See the License for the specific language governing permissions and
//: limitations under the License.
//:
//: ----------------------------------------------------------------------------
package remotely
import scala.reflect.runtime.universe.TypeTag
import scalaz.Monad
trait Value {
def apply(args: Any*): Response[Any]
}
object Value {
def fromValue(a: Any) = new Value {
def apply(args: Any*) = args.length match {
case 0 => a.asInstanceOf[Response[Any]]
case 1 => a.asInstanceOf[Any => Response[Any]](args(0))
case 2 => a.asInstanceOf[(Any,Any) => Response[Any]](args(0), args(1))
case 3 => a.asInstanceOf[(Any,Any,Any) => Response[Any]](args(0), args(1), args(2))
case 4 => a.asInstanceOf[(Any, Any, Any, Any) => Response[Any]](args(0), args(1), args(2), args(3))
case 5 => a.asInstanceOf[(Any, Any, Any, Any, Any) => Response[Any]](args(0), args(1), args(2), args(3), args(4))
case 6 => a.asInstanceOf[(Any, Any, Any, Any, Any, Any) => Response[Any]](args(0), args(1), args(2), args(3), args(4), args(5))
case n => Response.fail(new Exception("functions of arity " + n + " not supported"))
}
}
}
case class Values(values: Map[String,Value]) {
/**
* Declare the value for the given name in this `Environment`,
* or throw an error if the type-qualified name is already bound.
*/
// def declareStrict[A:TypeTag](name: String, a: A): Values = {
// val tag = Remote.nameToTag[A](name)
// if (values.contains(tag)) sys.error("Environment already has declaration for: "+tag)
// else this.copy(values = values + (tag -> Value.fromValue(Response.now(a))))
// }
def declare[A:TypeTag](name: String, a: Response[A]): Values = {
val tag = Remote.nameToTag[A](name)
if (values.contains(tag)) sys.error("Environment already has declaration for: "+tag)
else this.copy(values = values + (tag -> Value.fromValue(a)))
}
def declare[A:TypeTag,B:TypeTag](name: String, f: A => Response[B]): Values = {
val tag = Remote.nameToTag[A => B](name)
if (values.contains(tag)) sys.error("Environment already has declaration for: "+tag)
else this.copy(values = values + (tag -> Value.fromValue(f)))
}
def declare[A:TypeTag,B:TypeTag,C:TypeTag](name: String, f: (A,B) => Response[C]): Values = {
val tag = Remote.nameToTag[(A,B) => C](name)
if (values.contains(tag)) sys.error("Environment already has declaration for: "+tag)
else this.copy(values = values + (tag -> Value.fromValue(f)))
}
def declare[A:TypeTag,B:TypeTag,C:TypeTag,D:TypeTag](name: String, f: (A,B,C) => Response[D]): Values = {
val tag = Remote.nameToTag[(A,B,C) => D](name)
if (values.contains(tag)) sys.error("Environment already has declaration for: "+tag)
else this.copy(values = values + (tag -> Value.fromValue(f)))
}
def declare[A:TypeTag,B:TypeTag,C:TypeTag,D:TypeTag,E:TypeTag](name: String, f: (A,B,C,D) => Response[E]): Values = {
val tag = Remote.nameToTag[(A,B,C,D) => E](name)
if (values.contains(tag)) sys.error("Environment already has declaration for: "+tag)
else this.copy(values = values + (tag -> Value.fromValue(f)))
}
def declare[A:TypeTag,B:TypeTag,C:TypeTag,D:TypeTag,E:TypeTag,F:TypeTag](name: String, f: (A,B,C,D,E) => Response[F]): Values = {
val tag = Remote.nameToTag[(A,B,C,D,E) => F](name)
if (values.contains(tag)) sys.error("Environment already has declaration for: "+tag)
else this.copy(values = values + (tag -> Value.fromValue(f)))
}
def declare[A:TypeTag,B:TypeTag,C:TypeTag,D:TypeTag,E:TypeTag,F:TypeTag,G:TypeTag](name: String, f: (A,B,C,D,E,F) => Response[G]): Values = {
val tag = Remote.nameToTag[(A,B,C,D,E,F) => G](name)
if (values.contains(tag)) sys.error("Environment already has declaration for: "+tag)
else this.copy(values = values + (tag -> Value.fromValue(f)))
}
def keySet = values.keySet
}
object Values {
val empty = Values(Map())
}
| ShengC/remotely | core/src/main/scala/Values.scala | Scala | apache-2.0 | 4,568 |
import org.opencv.core.{Core, CvType, Mat, Scalar, Size}
import org.opencv.highgui.VideoCapture
package object opencv {
def using[A](m1: Mat)(f: Mat => A): A = {
try {
f(m1)
} finally {
m1.release()
}
}
def using[A](m1: Mat, m2: Mat)(f: (Mat, Mat) => A): A = {
try {
f(m1, m2)
} finally {
m1.release()
m2.release()
}
}
def using[A](m1: Mat, m2: Mat, m3: Mat)(f: (Mat, Mat, Mat) => A): A = {
try {
f(m1, m2, m3)
} finally {
m1.release()
m2.release()
m3.release()
}
}
def using[A](m1: Mat, m2: Mat, m3: Mat, m4: Mat)(f: (Mat, Mat, Mat, Mat) => A): A = {
try {
f(m1, m2, m3, m4)
} finally {
m1.release()
m2.release()
m3.release()
m4.release()
}
}
def using[A](cap: VideoCapture)(f: VideoCapture => A): A = {
try {
f(cap)
} finally {
cap.release()
}
}
def zeros(rows: Int, cols: Int, typ: Int) = Mat.zeros(rows, cols, typ)
def zeros(size: Size, typ: Int) = Mat.zeros(size, typ)
implicit class RichMat(val self: Mat) extends AnyVal {
def consume[A](f: Mat => A): A = using(self)(f)
def +(other: Mat): Mat = {
val dst = new Mat
Core.add(self, other, dst)
dst
}
def +=(other: Mat): Mat = {
Core.add(self, other, self)
self
}
def -(other: Mat): Mat = {
val dst = new Mat
Core.subtract(self, other, dst)
dst
}
def -=(other: Mat): Mat = {
Core.subtract(self, other, self)
self
}
def *(other: Mat): Mat = {
val dst = new Mat
Core.multiply(self, other, dst)
dst
}
def *=(other: Mat): Mat = {
Core.multiply(self, other, self)
self
}
def /(other: Scalar): Mat = {
val dst = new Mat
Core.divide(self, other, dst)
dst
}
def /=(other: Scalar): Mat = {
Core.divide(self, other, self)
self
}
def ===(other: Mat): Boolean = other match {
case _ if self.empty() && other.empty() => true
case _ if
self.rows != other.rows ||
self.cols != other.cols ||
self.`type` != other.`type` ||
self.dims != other.dims => false
case _ =>
self.compare(other, Core.CMP_NE).consume { diff =>
diff.extractChannels().forall(_.consume(_.countNonZero == 0))
}
}
def =/=(other: Mat): Boolean = !(self === other)
def compare(other: Mat, cmpop: Int): Mat = {
val dst = new Mat
Core.compare(self, other, dst, cmpop)
dst
}
def convert(rtype: Int): Mat = {
val dst = new Mat
self.convertTo(dst, rtype)
dst
}
def copy(): Mat = {
val dst = new Mat
self.copyTo(dst)
dst
}
def copy(mask: Mat): Mat = {
val dst = new Mat
self.copyTo(dst, mask)
dst
}
def countNonZero: Int = Core.countNonZero(self)
def extractChannel(coi: Int): Mat = {
val dst = new Mat
Core.extractChannel(self, dst, coi)
dst
}
def extractChannels(): Iterator[Mat] =
(0 until self.channels).toIterator.map(self.extractChannel)
}
implicit class RichMatTuple2(val self: (Mat, Mat)) extends AnyVal {
def consume[A](f: (Mat, Mat) => A): A = using(self._1, self._2)(f)
}
implicit class RichMatTuple3(val self: (Mat, Mat, Mat)) extends AnyVal {
def consume[A](f: (Mat, Mat, Mat) => A): A = using(self._1, self._2, self._3)(f)
}
implicit class RichMatTuple4(val self: (Mat, Mat, Mat, Mat)) extends AnyVal {
def consume[A](f: (Mat, Mat, Mat, Mat) => A): A = using(self._1, self._2, self._3, self._4)(f)
}
implicit class VideoCaptureIterator(val cap: VideoCapture) extends Iterator[Option[Mat]] {
override def hasNext: Boolean = true // always produce Some[Mat] or None
override def next(): Option[Mat] =
if (cap.grab()) {
val image = new Mat
cap.retrieve(image)
Some(image)
} else {
None
}
}
def mean(ms: Iterator[Mat]): Mat =
if (ms.hasNext) {
val imdType = CvType.CV_32SC3 // TODO
def widen(m: Mat) = m.consume(_.convert(imdType))
def plus(a: (Mat, Int), b: (Mat, Int)) = b._1.consume(a._1 += _) -> (a._2 + b._2)
val (sum, cnt) = ms.map(widen(_) -> 1).reduce(plus)
sum.consume(_ /= new Scalar(cnt, cnt, cnt))
} else {
new Mat
}
def meanPar(ms: Iterator[Mat], batchSize: Int): Mat =
if (ms.hasNext) {
val imdType = CvType.CV_32SC3 // TODO
def widen(m: Mat) = m.consume(_.convert(imdType))
def plus(a: (Mat, Int), b: (Mat, Int)) = b._1.consume(a._1 += _) -> (a._2 + b._2)
val partitions = ms.grouped(batchSize)
val (sum, cnt) =
partitions
.map(_.par.map(widen(_) -> 1).reduce(plus))
.reduce(plus)
/*
import scala.concurrent.{Await, ExecutionContext, Future}
import scala.concurrent.duration.Duration
import ExecutionContext.Implicits.global
val (sum, cnt) = Await.result(
Future.traverse(partitions) { ms: Iterator[Mat] =>
Future(ms.map(widen(_) -> 1).reduce(plus))
}.map { ps: Iterator[(Mat, Int)] =>
ps.reduce(plus)
},
Duration.Inf)
*/
sum.consume(_ /= new Scalar(cnt, cnt, cnt))
} else {
new Mat
}
}
| okapies/isolator | src/main/scala/opencv/package.scala | Scala | bsd-2-clause | 5,371 |
/*
* Copyright (C) 2016 Vincibean <Andre Bessi>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.vincibean.scala.impatient.chapter10.exercise8
/**
* There are dozens of Scala trait tutorials with silly examples of barking dogs
* or philosophizing frogs. Reading through contrived hierarchies can be
* tedious and not very helpful, but designing your own is very illuminating.
* Make your own silly trait hierarchy example that demonstrates layered
* traits, concrete and abstract methods, and concrete and abstract fields.
*/
class Tank extends HasSubmachineGun with HasWheels {
val ammo: Int = 500 * 100
val wheels: Int = 0 // I don't have wheels; I have continuous tracks
val tracks: Int = 2 // I don't have wheels; I have continuous tracks
}
| Vincibean/ScalaForTheImpatient-Solutions | src/main/scala/org/vincibean/scala/impatient/chapter10/exercise8/Tank.scala | Scala | gpl-3.0 | 1,393 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.