code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package play.modules.reactivemongo import javax.inject._ import scala.concurrent.ExecutionContext import play.api._ import play.api.inject.{ ApplicationLifecycle, Binding, Module } /** * MongoDB module. */ @Singleton final class ReactiveMongoModule extends Module { import DefaultReactiveMongoApi.BindingInfo override def bindings( environment: Environment, configuration: Configuration ): Seq[Binding[_]] = apiBindings( DefaultReactiveMongoApi.parseConfiguration(configuration)( ExecutionContext.global ), configuration ) private def apiBindings( info: Seq[(String, BindingInfo)], cf: Configuration ): Seq[Binding[ReactiveMongoApi]] = info.flatMap { case (name, BindingInfo(strict, db, uri)) => val provider = new ReactiveMongoProvider( new DefaultReactiveMongoApi(uri, db, strict, cf, _)(_) ) val bs = List(ReactiveMongoModule.key(name).to(provider)) if (name == "default") { bind[ReactiveMongoApi].to(provider) :: bs } else bs } } object ReactiveMongoModule extends ReactiveMongoModuleCompat /** * Cake pattern components. */ trait ReactiveMongoComponents { def reactiveMongoApi: ReactiveMongoApi } /** * Inject provider for named databases. */ private[reactivemongo] final class ReactiveMongoProvider( factory: (ApplicationLifecycle, ExecutionContext) => ReactiveMongoApi) extends Provider[ReactiveMongoApi] { import com.github.ghik.silencer.silent @silent @Inject private var applicationLifecycle: ApplicationLifecycle = _ @silent @Inject private var executionContext: ExecutionContext = _ lazy val get: ReactiveMongoApi = factory(applicationLifecycle, executionContext) }
ReactiveMongo/Play-ReactiveMongo
src/main/scala/play/modules/reactivemongo/ReactiveMongoModule.scala
Scala
apache-2.0
1,730
/* * Copyright (c) 2014-2018 by The Monix Project Developers. * See the project homepage at: https://monix.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.reactive.internal.operators import monix.eval.Task import monix.execution.Ack import monix.execution.Ack.{Continue, Stop} import monix.execution.atomic.Atomic import scala.util.control.NonFatal import monix.reactive.Observable.Operator import monix.reactive.observers.Subscriber import monix.reactive.internal.util.Instances.ContinueTask import scala.concurrent.Future import scala.util.Success private[reactive] final class DoOnEarlyStopOperator[A](onStop: Task[Unit]) extends Operator[A,A] { def apply(out: Subscriber[A]): Subscriber[A] = new Subscriber[A] { implicit val scheduler = out.scheduler private[this] val isActive = Atomic(true) def onNext(elem: A): Future[Ack] = { val result = try out.onNext(elem) catch { case ex if NonFatal(ex) => Future.failed(ex) } val task = Task.fromFuture(result) .onErrorHandle { ex => onError(ex); Stop } .flatMap { case Continue => ContinueTask; case Stop => onStop.map(_ => Stop) } val future = task.runToFuture // Execution might be immediate future.value match { case Some(Success(ack)) => ack case _ => future } } def onError(ex: Throwable): Unit = { if (isActive.getAndSet(false)) out.onError(ex) else scheduler.reportFailure(ex) } def onComplete(): Unit = { if (isActive.getAndSet(false)) out.onComplete() } } }
Wogan/monix
monix-reactive/shared/src/main/scala/monix/reactive/internal/operators/DoOnEarlyStopOperator.scala
Scala
apache-2.0
2,179
package com.monsanto.arch.kamon.prometheus.spray import com.monsanto.arch.kamon.prometheus.converter.SnapshotConverter.{KamonCategoryLabel, KamonNameLabel} import com.monsanto.arch.kamon.prometheus.metric._ import kamon.metric.SingleInstrumentEntityRecorder import kamon.util.MilliTimestamp import org.scalactic.Uniformity import org.scalatest.Matchers._ import org.scalatest.{BeforeAndAfterAll, FreeSpec} import spray.http.HttpHeaders.{Accept, `Accept-Encoding`} import spray.http.{HttpEncodings, HttpResponse, MediaType, StatusCodes} import spray.httpx.encoding.Gzip import spray.httpx.unmarshalling.{Deserialized, FromResponseUnmarshaller, Unmarshaller} import spray.testkit.ScalatestRouteTest import scala.collection.immutable.ListMap class SprayEndpointSpec extends FreeSpec with ScalatestRouteTest with BeforeAndAfterAll { val extension = SprayEndpoint(system).asInstanceOf[PrometheusEndpoint] val endpoint = extension.route /** Unmarshaller from the text format. */ val textUnmarshaller = Unmarshaller.delegate[String, Seq[MetricFamily]](PrometheusEndpoint.TextMediaType)(TextFormat.parse) /** Unmarshaller from the protocol buffer format. */ val protoBufUnmarshaller = Unmarshaller.delegate[Array[Byte], Seq[MetricFamily]](PrometheusEndpoint.ProtoBufMediaType)(ProtoBufFormat.parse) /** Unmarshaller that supports both formats. */ val plainUnmarshaller = Unmarshaller.oneOf(textUnmarshaller, protoBufUnmarshaller) /** Unmarshaller that supports both formats, including the gzipped versions. */ implicit val mainUnmarshaller = new FromResponseUnmarshaller[Seq[MetricFamily]] { override def apply(response: HttpResponse): Deserialized[Seq[MetricFamily]] = { response.encoding match { case HttpEncodings.identity ⇒ plainUnmarshaller(response.entity) case HttpEncodings.gzip ⇒ plainUnmarshaller(Gzip.decode(response).entity) } } } "the SprayEndpoint extension" - { "must provide a route" in { endpoint should not be null } "when fulfilling a plain-text request, " - { def doGet() = Get() ~> endpoint "and there is no content, it should return an empty response" in { doGet() ~> check { handled shouldBe true status shouldBe StatusCodes.NoContent } } "and it has content, it should" - { "handle GET requests" in withSampleSnapshot { doGet() ~> check { handled shouldBe true status shouldBe StatusCodes.OK } } "use the correct encoding" in withSampleSnapshot { doGet() ~> check { definedCharset shouldBe defined charset.value shouldBe "UTF-8" } } "use the correct media type" in withSampleSnapshot { doGet() ~> check { mediaType shouldBe MediaType.custom("text", "plain", parameters = Map("version" -> "0.0.4")) } } "is not compressed" in withSampleSnapshot { doGet() ~> check { response.encoding shouldBe HttpEncodings.identity } } "have the correct content" in withSampleSnapshot { doGet() ~> check { val response = responseAs[Seq[MetricFamily]] (response should contain theSameElementsAs sampleSnapshot) (after being normalised) } } } "accepting gzip compression" - { def doGet(): RouteResult = Get() ~> `Accept-Encoding`(HttpEncodings.gzip) ~> endpoint "and there is no content, it should return an empty response" in { doGet() ~> check { handled shouldBe true status shouldBe StatusCodes.NoContent } } "and there is content, it should" - { "handle GET requests" in withSampleSnapshot { doGet() ~> check { handled shouldBe true status shouldBe StatusCodes.OK } } "use the correct encoding" in withSampleSnapshot { doGet() ~> check { definedCharset shouldBe defined charset.value shouldBe "UTF-8" } } "use the correct media type" in withSampleSnapshot { doGet() ~> check { mediaType shouldBe MediaType.custom("text", "plain", parameters = Map("version" -> "0.0.4")) } } "be compressed" in withSampleSnapshot { doGet() ~> check { response.encoding shouldBe HttpEncodings.gzip } } "have the correct content" in withSampleSnapshot { doGet() ~> check { val response = responseAs[Seq[MetricFamily]] (response should contain theSameElementsAs sampleSnapshot) (after being normalised) } } } } } "when fulfilling a protocol buffer request" - { def doGet(): RouteResult = Get() ~> Accept(PrometheusEndpoint.ProtoBufMediaType) ~> endpoint "and there is no content, it should return an empty response" in { doGet() ~> check { handled shouldBe true status shouldBe StatusCodes.NoContent } } "and there is content, it should" - { "handle GET requests" in withSampleSnapshot { doGet() ~> check { handled shouldBe true status shouldBe StatusCodes.OK } } "use the correct media type" in withSampleSnapshot { doGet() ~> check { mediaType shouldBe PrometheusEndpoint.ProtoBufMediaType } } "not be compressed" in withSampleSnapshot { doGet() ~> check { response.encoding shouldBe HttpEncodings.identity } } "have the correct content" in withSampleSnapshot { snapshot: Seq[MetricFamily] ⇒ doGet() ~> check { val response = responseAs[Seq[MetricFamily]] (response should contain theSameElementsAs snapshot) (after being normalised) } } } "accepting gzip compression" - { def doGet(): RouteResult = Get() ~> `Accept-Encoding`(HttpEncodings.gzip) ~> Accept(PrometheusEndpoint.ProtoBufMediaType) ~> endpoint "and there is no content, it should return an empty response" in { doGet() ~> check { handled shouldBe true status shouldBe StatusCodes.NoContent } } "and there is content, it should" - { "handle GET requests" in withSampleSnapshot { doGet() ~> check { handled shouldBe true status shouldBe StatusCodes.OK } } "use the correct media type" in withSampleSnapshot { doGet() ~> check { mediaType shouldBe PrometheusEndpoint.ProtoBufMediaType } } "be compressed" in withSampleSnapshot { doGet() ~> check { response.encoding shouldBe HttpEncodings.gzip } } "have the correct content" in withSampleSnapshot { snapshot: Seq[MetricFamily] ⇒ doGet() ~> check { val response = responseAs[Seq[MetricFamily]] (response should contain theSameElementsAs snapshot) (after being normalised) } } } } } } /** A sample snapshot useful for testing. */ val sampleSnapshot = { import MetricValue.{Bucket ⇒ B, Histogram ⇒ HG} val now = MilliTimestamp.now val ∞ = Double.PositiveInfinity Seq( MetricFamily("test_counter", PrometheusType.Counter, None, Seq( Metric(MetricValue.Counter(1), now, Map("type" → "a", KamonCategoryLabel → SingleInstrumentEntityRecorder.Counter, KamonNameLabel → "test_counter")), Metric(MetricValue.Counter(2), now, Map("type" → "b", KamonCategoryLabel → SingleInstrumentEntityRecorder.Counter, KamonNameLabel → "test_counter")))), MetricFamily("another_counter", PrometheusType.Counter, None, Seq(Metric(MetricValue.Counter(42), now, Map(KamonCategoryLabel → SingleInstrumentEntityRecorder.Counter, KamonNameLabel → "another_counter")))), MetricFamily("a_histogram", PrometheusType.Histogram, None, Seq( Metric(HG(Seq(B(1, 20), B(4, 23), B(∞, 23)), 23, 32), now, Map("got_label" → "yes", KamonCategoryLabel → SingleInstrumentEntityRecorder.Histogram, KamonNameLabel → "a_histogram")), Metric(HG(Seq(B(3, 2), B(5, 6), B(∞, 6)), 6, 26), now, Map("got_label" → "true", KamonCategoryLabel → SingleInstrumentEntityRecorder.Histogram, KamonNameLabel → "a_histogram")))), MetricFamily("another_histogram", PrometheusType.Histogram, None, Seq(Metric(HG(Seq(B(20, 20), B(∞, 20)), 20, 400), now, Map(KamonCategoryLabel → SingleInstrumentEntityRecorder.Histogram, KamonNameLabel → "another_histogram")))), MetricFamily("a_min_max_counter", PrometheusType.Histogram, None, Seq(Metric(HG(Seq(B(0, 1), B(1, 2), B(3, 3), B(∞, 3)), 3, 4), now, Map( KamonCategoryLabel → SingleInstrumentEntityRecorder.MinMaxCounter, KamonNameLabel → "a_min_max_counter"))))) } /** Fixture that sets up the sample snapshot in the Spray endpoint extension. */ def withSampleSnapshot[T](test: ⇒ T): T = { try { extension.snapshot.set(sampleSnapshot) test } finally { extension.snapshot.set(null) } } /** Normalises a metric family by ensuring its metrics are given an order and their timestamps are all given the * same value. */ val normalised = new Uniformity[MetricFamily] { /** Sorts metrics according to their labels. Assumes the labels are sorted. */ def metricSort(a: Metric, b: Metric): Boolean = { (a.labels.headOption, b.labels.headOption) match { case (Some(x), Some(y)) ⇒ if (x._1 < y._1) { true } else if (x._1 == y._1) { x._2 < y._2 } else { false } case (None, Some(_)) ⇒ true case (Some(_), None) ⇒ false case (None, None) ⇒ false } } override def normalizedOrSame(b: Any): Any = b match { case mf: MetricFamily ⇒ normalized(mf) case _ ⇒ b } override def normalizedCanHandle(b: Any): Boolean = b.isInstanceOf[MetricFamily] override def normalized(metricFamily: MetricFamily): MetricFamily = { val normalMetrics = metricFamily.metrics.map { m ⇒ val sortedLabels = ListMap(m.labels.toSeq.sortWith(_._1 < _._2): _*) Metric(m.value, new MilliTimestamp(0), sortedLabels) }.sortWith(metricSort) MetricFamily(metricFamily.name, metricFamily.prometheusType, metricFamily.help, normalMetrics) } } }
MonsantoCo/kamon-prometheus
library/src/test/scala/com/monsanto/arch/kamon/prometheus/spray/SprayEndpointSpec.scala
Scala
bsd-3-clause
11,151
package sbt import java.lang.reflect.{ Array => _, _ } import java.lang.annotation.Annotation import annotation.tailrec import sbt.classfile.ClassFile import xsbti.api import xsbti.SafeLazy import SafeLazy.strict import collection.mutable object ClassToAPI { def apply(c: Seq[Class[_]]): api.SourceAPI = process(c)._1 // (api, public inherited classes) def process(c: Seq[Class[_]]): (api.SourceAPI, Set[Class[_]]) = { val pkgs = packages(c).map(p => new api.Package(p)) val cmap = emptyClassMap val defs = c.filter(isTopLevel).flatMap(toDefinitions(cmap)) val source = new api.SourceAPI(pkgs.toArray, defs.toArray) cmap.lz.foreach(_.get()) // force thunks to ensure all inherited dependencies are recorded val inDeps = cmap.inherited.toSet cmap.clear() (source, inDeps) } // Avoiding implicit allocation. private def arrayMap[T <: AnyRef, U <: AnyRef: reflect.ClassTag](xs: Array[T])(f: T => U): Array[U] = { val len = xs.length var i = 0 val res = new Array[U](len) while (i < len) { res(i) = f(xs(i)) i += 1 } res } def packages(c: Seq[Class[_]]): Set[String] = c.flatMap(packageName).toSet def isTopLevel(c: Class[_]): Boolean = c.getEnclosingClass eq null final class ClassMap private[sbt] (private[sbt] val memo: mutable.Map[String, Seq[api.ClassLike]], private[sbt] val inherited: mutable.Set[Class[_]], private[sbt] val lz: mutable.Buffer[xsbti.api.Lazy[_]]) { def clear(): Unit = { memo.clear() inherited.clear() lz.clear() } } def emptyClassMap: ClassMap = new ClassMap(new mutable.HashMap, new mutable.HashSet, new mutable.ListBuffer) def toDefinitions(cmap: ClassMap)(c: Class[_]): Seq[api.ClassLike] = cmap.memo.getOrElseUpdate(c.getName, toDefinitions0(c, cmap)) def toDefinitions0(c: Class[_], cmap: ClassMap): Seq[api.ClassLike] = { import api.DefinitionType.{ ClassDef, Module, Trait } val enclPkg = packageName(c) val mods = modifiers(c.getModifiers) val acc = access(c.getModifiers, enclPkg) val annots = annotations(c.getAnnotations) val name = c.getName val tpe = if (Modifier.isInterface(c.getModifiers)) Trait else ClassDef lazy val (static, instance) = structure(c, enclPkg, cmap) val cls = new api.ClassLike(tpe, strict(Empty), lzy(instance, cmap), emptyStringArray, typeParameters(typeParameterTypes(c)), name, acc, mods, annots) val stat = new api.ClassLike(Module, strict(Empty), lzy(static, cmap), emptyStringArray, emptyTypeParameterArray, name, acc, mods, annots) val defs = cls :: stat :: Nil cmap.memo(c.getName) = defs defs } /** Returns the (static structure, instance structure, inherited classes) for `c`. */ def structure(c: Class[_], enclPkg: Option[String], cmap: ClassMap): (api.Structure, api.Structure) = { lazy val cf = classFileForClass(c) val methods = mergeMap(c, c.getDeclaredMethods, c.getMethods, methodToDef(enclPkg)) val fields = mergeMap(c, c.getDeclaredFields, c.getFields, fieldToDef(c, cf, enclPkg)) val constructors = mergeMap(c, c.getDeclaredConstructors, c.getConstructors, constructorToDef(enclPkg)) val classes = merge[Class[_]](c, c.getDeclaredClasses, c.getClasses, toDefinitions(cmap), (_: Seq[Class[_]]).partition(isStatic), _.getEnclosingClass != c) val all = methods ++ fields ++ constructors ++ classes val parentJavaTypes = allSuperTypes(c) if (!Modifier.isPrivate(c.getModifiers)) cmap.inherited ++= parentJavaTypes.collect { case c: Class[_] => c } val parentTypes = types(parentJavaTypes) val instanceStructure = new api.Structure(lzyS(parentTypes.toArray), lzyS(all.declared.toArray), lzyS(all.inherited.toArray)) val staticStructure = new api.Structure(lzyEmptyTpeArray, lzyS(all.staticDeclared.toArray), lzyS(all.staticInherited.toArray)) (staticStructure, instanceStructure) } /** TODO: over time, ClassToAPI should switch the majority of access to the classfile parser */ private[this] def classFileForClass(c: Class[_]): ClassFile = classfile.Parser.apply(IO.classfileLocation(c)) private[this] def lzyS[T <: AnyRef](t: T): xsbti.api.Lazy[T] = lzy(t) def lzy[T <: AnyRef](t: => T): xsbti.api.Lazy[T] = xsbti.SafeLazy(t) private[this] def lzy[T <: AnyRef](t: => T, cmap: ClassMap): xsbti.api.Lazy[T] = { val s = lzy(t) cmap.lz += s s } private val emptyStringArray = new Array[String](0) private val emptyTypeArray = new Array[xsbti.api.Type](0) private val emptyAnnotationArray = new Array[xsbti.api.Annotation](0) private val emptyTypeParameterArray = new Array[xsbti.api.TypeParameter](0) private val emptySimpleTypeArray = new Array[xsbti.api.SimpleType](0) private val lzyEmptyTpeArray = lzyS(emptyTypeArray) private val lzyEmptyDefArray = lzyS(new Array[xsbti.api.Definition](0)) private def allSuperTypes(t: Type): Seq[Type] = { @tailrec def accumulate(t: Type, accum: Seq[Type] = Seq.empty): Seq[Type] = t match { case c: Class[_] => val (parent, interfaces) = (c.getGenericSuperclass, c.getGenericInterfaces) accumulate(parent, (accum :+ parent) ++ flattenAll(interfaces)) case p: ParameterizedType => accumulate(p.getRawType, accum) case _ => accum } @tailrec def flattenAll(interfaces: Seq[Type], accum: Seq[Type] = Seq.empty): Seq[Type] = { if (interfaces.nonEmpty) { val raw = interfaces map { case p: ParameterizedType => p.getRawType; case i => i } val children = raw flatMap { case i: Class[_] => i.getGenericInterfaces; case _ => Seq.empty } flattenAll(children, accum ++ interfaces ++ children) } else accum } accumulate(t).filterNot(_ == null).distinct } @deprecated("No longer used", "0.13.0") def parents(c: Class[_]): Seq[api.Type] = types(allSuperTypes(c)) def types(ts: Seq[Type]): Array[api.Type] = ts filter (_ ne null) map reference toArray; def upperBounds(ts: Array[Type]): api.Type = new api.Structure(lzy(types(ts)), lzyEmptyDefArray, lzyEmptyDefArray) @deprecated("Use fieldToDef[4] instead", "0.13.9") def fieldToDef(enclPkg: Option[String])(f: Field): api.FieldLike = { val c = f.getDeclaringClass() fieldToDef(c, classFileForClass(c), enclPkg)(f) } def fieldToDef(c: Class[_], cf: => ClassFile, enclPkg: Option[String])(f: Field): api.FieldLike = { val name = f.getName val accs = access(f.getModifiers, enclPkg) val mods = modifiers(f.getModifiers) val annots = annotations(f.getDeclaredAnnotations) val fieldTpe = reference(returnType(f)) // generate a more specific type for constant fields val specificTpe: Option[api.Type] = if (mods.isFinal) { try { cf.constantValue(name).map(singletonForConstantField(c, f, _)) } catch { case e: Throwable => throw new IllegalStateException( s"Failed to parse $c: this may mean your classfiles are corrupted. Please clean and try again.", e ) } } else { None } val tpe = specificTpe.getOrElse(fieldTpe) if (mods.isFinal) { new api.Val(tpe, name, accs, mods, annots) } else { new api.Var(tpe, name, accs, mods, annots) } } /** * Creates a Singleton type that includes both the type and ConstantValue for the given Field. * * Since java compilers are allowed to inline constant (static final primitive) fields in * downstream classfiles, we generate a type that will cause APIs to match only when both * the type and value of the field match. We include the classname mostly for readability. * * Because this type is purely synthetic, it's fine that the name might contain filename- * banned characters. */ private def singletonForConstantField(c: Class[_], field: Field, constantValue: AnyRef) = new api.Singleton( pathFromStrings( c.getName.split("\\\\.").toSeq :+ (field.getName + "$" + returnType(field) + "$" + constantValue) ) ) def methodToDef(enclPkg: Option[String])(m: Method): api.Def = defLike(m.getName, m.getModifiers, m.getDeclaredAnnotations, typeParameterTypes(m), m.getParameterAnnotations, parameterTypes(m), Option(returnType(m)), exceptionTypes(m), m.isVarArgs, enclPkg) def constructorToDef(enclPkg: Option[String])(c: Constructor[_]): api.Def = defLike("<init>", c.getModifiers, c.getDeclaredAnnotations, typeParameterTypes(c), c.getParameterAnnotations, parameterTypes(c), None, exceptionTypes(c), c.isVarArgs, enclPkg) def defLike[T <: GenericDeclaration](name: String, mods: Int, annots: Array[Annotation], tps: Array[TypeVariable[T]], paramAnnots: Array[Array[Annotation]], paramTypes: Array[Type], retType: Option[Type], exceptions: Array[Type], varArgs: Boolean, enclPkg: Option[String]): api.Def = { val varArgPosition = if (varArgs) paramTypes.length - 1 else -1 val isVarArg = List.tabulate(paramTypes.length)(_ == varArgPosition) val pa = (paramAnnots, paramTypes, isVarArg).zipped map { case (a, p, v) => parameter(a, p, v) } val params = new api.ParameterList(pa, false) val ret = retType match { case Some(rt) => reference(rt); case None => Empty } new api.Def(Array(params), ret, typeParameters(tps), name, access(mods, enclPkg), modifiers(mods), annotations(annots) ++ exceptionAnnotations(exceptions)) } def exceptionAnnotations(exceptions: Array[Type]): Array[api.Annotation] = if (exceptions.length == 0) emptyAnnotationArray else arrayMap(exceptions)(t => new api.Annotation(Throws, Array(new api.AnnotationArgument("value", t.toString)))) def parameter(annots: Array[Annotation], parameter: Type, varArgs: Boolean): api.MethodParameter = new api.MethodParameter("", annotated(reference(parameter), annots), false, if (varArgs) api.ParameterModifier.Repeated else api.ParameterModifier.Plain) def annotated(t: api.SimpleType, annots: Array[Annotation]): api.Type = ( if (annots.length == 0) t else new api.Annotated(t, annotations(annots)) ) case class Defs(declared: Seq[api.Definition], inherited: Seq[api.Definition], staticDeclared: Seq[api.Definition], staticInherited: Seq[api.Definition]) { def ++(o: Defs) = Defs(declared ++ o.declared, inherited ++ o.inherited, staticDeclared ++ o.staticDeclared, staticInherited ++ o.staticInherited) } def mergeMap[T <: Member](of: Class[_], self: Seq[T], public: Seq[T], f: T => api.Definition): Defs = merge[T](of, self, public, x => f(x) :: Nil, splitStatic _, _.getDeclaringClass != of) def merge[T](of: Class[_], self: Seq[T], public: Seq[T], f: T => Seq[api.Definition], splitStatic: Seq[T] => (Seq[T], Seq[T]), isInherited: T => Boolean): Defs = { val (selfStatic, selfInstance) = splitStatic(self) val (inheritedStatic, inheritedInstance) = splitStatic(public filter isInherited) Defs(selfInstance flatMap f, inheritedInstance flatMap f, selfStatic flatMap f, inheritedStatic flatMap f) } def splitStatic[T <: Member](defs: Seq[T]): (Seq[T], Seq[T]) = defs partition isStatic def isStatic(c: Class[_]): Boolean = Modifier.isStatic(c.getModifiers) def isStatic(a: Member): Boolean = Modifier.isStatic(a.getModifiers) def typeParameters[T <: GenericDeclaration](tps: Array[TypeVariable[T]]): Array[api.TypeParameter] = if (tps.length == 0) emptyTypeParameterArray else arrayMap(tps)(typeParameter) def typeParameter[T <: GenericDeclaration](tp: TypeVariable[T]): api.TypeParameter = new api.TypeParameter(typeVariable(tp), emptyAnnotationArray, emptyTypeParameterArray, api.Variance.Invariant, NothingRef, upperBounds(tp.getBounds)) // needs to be stable across compilations def typeVariable[T <: GenericDeclaration](tv: TypeVariable[T]): String = name(tv.getGenericDeclaration) + " " + tv.getName def reduceHash(in: Array[Byte]): Int = (0 /: in)((acc, b) => (acc * 43) ^ b) def name(gd: GenericDeclaration): String = gd match { case c: Class[_] => c.getName case m: Method => m.getName case c: Constructor[_] => c.getName } def modifiers(i: Int): api.Modifiers = { import Modifier.{ isAbstract, isFinal } new api.Modifiers(isAbstract(i), false, isFinal(i), false, false, false, false) } def access(i: Int, pkg: Option[String]): api.Access = { import Modifier.{ isPublic, isPrivate, isProtected } if (isPublic(i)) Public else if (isPrivate(i)) Private else if (isProtected(i)) Protected else packagePrivate(pkg) } def annotations(a: Array[Annotation]): Array[api.Annotation] = if (a.length == 0) emptyAnnotationArray else arrayMap(a)(annotation) def annotation(a: Annotation): api.Annotation = new api.Annotation(reference(a.annotationType), Array(javaAnnotation(a.toString))) // full information not available from reflection def javaAnnotation(s: String): api.AnnotationArgument = new api.AnnotationArgument("toString", s) def array(tpe: api.Type): api.SimpleType = new api.Parameterized(ArrayRef, Array(tpe)) def reference(c: Class[_]): api.SimpleType = if (c.isArray) array(reference(c.getComponentType)) else if (c.isPrimitive) primitive(c.getName) else reference(c.getName) // does not handle primitives def reference(s: String): api.SimpleType = { val (pkg, cls) = packageAndName(s) pkg match { // translate all primitives? case None => new api.Projection(Empty, cls) case Some(p) => new api.Projection(new api.Singleton(pathFromString(p)), cls) } } def referenceP(t: ParameterizedType): api.Parameterized = { val targs = t.getActualTypeArguments val args = if (targs.isEmpty) emptyTypeArray else arrayMap(targs)(t => reference(t): api.Type) val base = reference(t.getRawType) new api.Parameterized(base, args.toArray[api.Type]) } def reference(t: Type): api.SimpleType = t match { case w: WildcardType => reference("_") case tv: TypeVariable[_] => new api.ParameterRef(typeVariable(tv)) case pt: ParameterizedType => referenceP(pt) case gat: GenericArrayType => array(reference(gat.getGenericComponentType)) case c: Class[_] => reference(c) } def pathFromString(s: String): api.Path = pathFromStrings(s.split("\\\\.")) def pathFromStrings(ss: Seq[String]): api.Path = new api.Path((ss.map(new api.Id(_)) :+ ThisRef).toArray) def packageName(c: Class[_]) = packageAndName(c)._1 def packageAndName(c: Class[_]): (Option[String], String) = packageAndName(c.getName) def packageAndName(name: String): (Option[String], String) = { val lastDot = name.lastIndexOf('.') if (lastDot >= 0) (Some(name.substring(0, lastDot)), name.substring(lastDot + 1)) else (None, name) } val Empty = new api.EmptyType val ThisRef = new api.This val Public = new api.Public val Unqualified = new api.Unqualified val Private = new api.Private(Unqualified) val Protected = new api.Protected(Unqualified) def packagePrivate(pkg: Option[String]): api.Access = new api.Private(new api.IdQualifier(pkg getOrElse "")) val ArrayRef = reference("scala.Array") val Throws = reference("scala.throws") val NothingRef = reference("scala.Nothing") private[this] def PrimitiveNames = Seq("boolean", "byte", "char", "short", "int", "long", "float", "double") private[this] def PrimitiveMap = PrimitiveNames.map(j => (j, j.capitalize)) :+ ("void" -> "Unit") private[this] val PrimitiveRefs = PrimitiveMap.map { case (n, sn) => (n, reference("scala." + sn)) }.toMap def primitive(name: String): api.SimpleType = PrimitiveRefs(name) // Workarounds for https://github.com/sbt/sbt/issues/1035 // these catch the GenericSignatureFormatError and return the erased type private[this] def returnType(f: Field): Type = try f.getGenericType catch { case _: GenericSignatureFormatError => f.getType } private[this] def parameterTypes(c: Constructor[_]): Array[Type] = try c.getGenericParameterTypes catch { case _: GenericSignatureFormatError => convert(c.getParameterTypes) } private[this] def exceptionTypes(c: Constructor[_]): Array[Type] = try c.getGenericExceptionTypes catch { case _: GenericSignatureFormatError => convert(c.getExceptionTypes) } private[this] def parameterTypes(m: Method): Array[Type] = try m.getGenericParameterTypes catch { case _: GenericSignatureFormatError => convert(m.getParameterTypes) } private[this] def returnType(m: Method): Type = try m.getGenericReturnType catch { case _: GenericSignatureFormatError => m.getReturnType } private[this] def exceptionTypes(m: Method): Array[Type] = try m.getGenericExceptionTypes catch { case _: GenericSignatureFormatError => convert(m.getExceptionTypes) } private[this] def typeParameterTypes[T](m: Constructor[T]): Array[TypeVariable[Constructor[T]]] = try m.getTypeParameters catch { case _: GenericSignatureFormatError => new Array(0) } private[this] def typeParameterTypes[T](m: Class[T]): Array[TypeVariable[Class[T]]] = try m.getTypeParameters catch { case _: GenericSignatureFormatError => new Array(0) } private[this] def typeParameterTypes(m: Method): Array[TypeVariable[Method]] = try m.getTypeParameters catch { case _: GenericSignatureFormatError => new Array(0) } private[this] def superclassType(c: Class[_]): Type = try c.getGenericSuperclass catch { case _: GenericSignatureFormatError => c.getSuperclass } private[this] def interfaces(c: Class[_]): Array[Type] = try c.getGenericInterfaces catch { case _: GenericSignatureFormatError => convert(c.getInterfaces) } private[this] def convert(classes: Array[Class[_]]): Array[Type] = classes.asInstanceOf[Array[Type]] // ok: treat Arrays as read-only }
jasonchaffee/sbt
compile/api/src/main/scala/sbt/ClassToAPI.scala
Scala
bsd-3-clause
18,108
package com.fhuertas.monkey.models import akka.actor.{Actor, ActorRef, Props} import com.fhuertas.monkey.messages._ import com.fhuertas.monkey.models.Directions._ import com.typesafe.scalalogging.LazyLogging class Canyon extends Actor with LazyLogging { override def receive: Receive = empty var starvationActorRef: Option[ActorRef] = None // starvation condition partial function private def starvationCondition: Receive = { case _ if starvationActorRef.isDefined => logger.debug(logMsg(s"At least a monkey has starvation. You should wait")) sender ! CannotCross } // starvation detection partial function private def cannotCrossAndDetectStarvation(direction: Direction): Receive = { case CanICross(newDirection) if newDirection != direction => logger.debug( logMsg(s"You cannot cross to $newDirection because there are monkeys in the other direction")) starvationActorRef = Option(sender) sender ! CannotCross case CanICross(newDirection) => logger.debug( logMsg(s"You cannot cross to $newDirection. ")) sender ! CannotCross } private def empty: Receive = { case CanICross(direction) => logger.debug(logMsg(s"A monkey is trying to cross the canyon to $direction")) context.become(receiveClimbingRobe(direction, incrementMonkeys(Set.empty[ActorRef], sender))) sender ! CanCross case message => logger.debug(logMsg(s"I don't understand you: $message")) } private def receiveClimbingRobe(direction: Direction, monkeys: Set[ActorRef]): Receive = climbingRobeOtherMonkeys(direction, monkeys) orElse lastMonkeyCrossingCanyon(direction) orElse starvationCondition orElse cannotCrossAndDetectStarvation(direction) private def climbingRobeOtherMonkeys(direction: Direction, monkeys: Set[ActorRef]): Receive = { case CrossingCanyon => logger.info(logMsg(s"You are now in the robe to cross to $direction, Be aware. Monkeys = ${monkeys.size}")) context.become(receiveCrossing(direction, monkeys)) case CrossedCanyon if monkeys.size > 1 => val monkeysInTheRobe = decrementMonkeys(monkeys, sender) logger.info(logMsg(s"Congratulation. A monkey is in the other side ($direction). " + s"There are monkeys in the robe. Monkeys = ${monkeysInTheRobe.size}")) context.become(receiveClimbingRobe(direction, monkeysInTheRobe)) } private def lastMonkeyCrossingCanyon(direction: Direction): Receive = { case CrossedCanyon => logger.info(logMsg(s"Congratulation. A monkey is in the other side ($direction). The robe is empty")) starvationActorRef.foreach(actorRef => actorRef ! AreYouReady) starvationActorRef = None context.become(empty) } private def receiveCrossing(direction: Direction, numMonkeys: Set[ActorRef]): Receive = crossing(direction, numMonkeys) orElse lastMonkeyCrossingCanyon(direction) orElse starvationCondition orElse cannotCrossAndDetectStarvation(direction) // Crossing partial function private def crossing(direction: Direction, monkeys: Set[ActorRef]): Receive = { case CanICross(newDirection) if direction.equals(newDirection) && starvationActorRef.isEmpty => logger.debug(logMsg(s"A monkey is trying to cross the canyon to $direction")) val monkeysInTheRobe = incrementMonkeys(monkeys, sender) logger.info(logMsg(s"You can cross to $newDirection, but the robe is being used. Be aware. Monkeys = ${monkeysInTheRobe.size}")) context.become(receiveClimbingRobe(direction, monkeysInTheRobe)) sender ! CanCross case CrossedCanyon if monkeys.size > 1 => val monkeysInTheRobe = decrementMonkeys(monkeys, sender) logger.info(logMsg(s"Congratulation. A monkey is in the other side ($direction). " + s"There are monkeys in the robe. Monkeys = ${monkeysInTheRobe.size}")) context.become(receiveCrossing(direction, monkeysInTheRobe)) } private def logMsg(msg: String) = s"<canyon>: $msg" private def incrementMonkeys(initialMonkeys: Set[ActorRef], newMonkey: ActorRef) = initialMonkeys + newMonkey private def decrementMonkeys(initialMonkeys: Set[ActorRef], monkey: ActorRef) = initialMonkeys - monkey } object Canyon { val props = Props(classOf[Canyon]) }
fhuertas/monkey
src/main/scala/com/fhuertas/monkey/models/Canyon.scala
Scala
apache-2.0
4,297
package x import scala.annotation._ import scala.quoted._ trait CB[+T] object CBM: def pure[T](t:T):CB[T] = ??? def map[A,B](fa:CB[A])(f: A=>B):CB[B] = ??? def flatMap[A,B](fa:CB[A])(f: A=>CB[B]):CB[B] = ??? def spawn[A](op: =>CB[A]): CB[A] = ??? @compileTimeOnly("await should be inside async block") def await[T](f: CB[T]): T = ??? trait CpsExpr[T:Type](prev: Seq[Expr[?]]): def fLast(using Quotes): Expr[CB[T]] def prependExprs(exprs: Seq[Expr[?]]): CpsExpr[T] def append[A:Type](chunk: CpsExpr[A])(using Quotes): CpsExpr[A] def syncOrigin(using Quotes): Option[Expr[T]] def map[A:Type](f: Expr[T => A])(using Quotes): CpsExpr[A] = MappedCpsExpr[T,A](Seq(),this,f) def flatMap[A:Type](f: Expr[T => CB[A]])(using Quotes): CpsExpr[A] = FlatMappedCpsExpr[T,A](Seq(),this,f) def transformed(using Quotes): Expr[CB[T]] = import quotes.reflect._ Block(prev.toList.map(_.asTerm), fLast.asTerm).asExprOf[CB[T]] case class GenericSyncCpsExpr[T:Type](prev: Seq[Expr[?]],last: Expr[T]) extends CpsExpr[T](prev): override def fLast(using Quotes): Expr[CB[T]] = '{ CBM.pure(${last}:T) } override def prependExprs(exprs: Seq[Expr[?]]): CpsExpr[T] = copy(prev = exprs ++: prev) override def syncOrigin(using Quotes): Option[Expr[T]] = import quotes.reflect._ Some(Block(prev.toList.map(_.asTerm), last.asTerm).asExprOf[T]) override def append[A:Type](e: CpsExpr[A])(using Quotes) = e.prependExprs(Seq(last)).prependExprs(prev) override def map[A:Type](f: Expr[T => A])(using Quotes): CpsExpr[A] = copy(last = '{ $f($last) }) override def flatMap[A:Type](f: Expr[T => CB[A]])(using Quotes): CpsExpr[A] = GenericAsyncCpsExpr[A](prev, '{ CBM.flatMap(CBM.pure($last))($f) } ) abstract class AsyncCpsExpr[T:Type]( prev: Seq[Expr[?]] ) extends CpsExpr[T](prev): override def append[A:Type](e: CpsExpr[A])(using Quotes): CpsExpr[A] = flatMap( '{ (x:T) => ${e.transformed} }) override def syncOrigin(using Quotes): Option[Expr[T]] = None case class GenericAsyncCpsExpr[T:Type]( prev: Seq[Expr[?]], fLastExpr: Expr[CB[T]] ) extends AsyncCpsExpr[T](prev): override def fLast(using Quotes): Expr[CB[T]] = fLastExpr override def prependExprs(exprs: Seq[Expr[?]]): CpsExpr[T] = copy(prev = exprs ++: prev) override def map[A:Type](f: Expr[T => A])(using Quotes): CpsExpr[A] = MappedCpsExpr(Seq(),this,f) override def flatMap[A:Type](f: Expr[T => CB[A]])(using Quotes): CpsExpr[A] = FlatMappedCpsExpr(Seq(),this,f) case class MappedCpsExpr[S:Type, T:Type]( prev: Seq[Expr[?]], point: CpsExpr[S], mapping: Expr[S=>T] ) extends AsyncCpsExpr[T](prev): override def fLast(using Quotes): Expr[CB[T]] = '{ CBM.map(${point.transformed})($mapping) } override def prependExprs(exprs: Seq[Expr[?]]): CpsExpr[T] = copy(prev = exprs ++: prev) case class FlatMappedCpsExpr[S:Type, T:Type]( prev: Seq[Expr[?]], point: CpsExpr[S], mapping: Expr[S => CB[T]] ) extends AsyncCpsExpr[T](prev): override def fLast(using Quotes): Expr[CB[T]] = '{ CBM.flatMap(${point.transformed})($mapping) } override def prependExprs(exprs: Seq[Expr[?]]): CpsExpr[T] = copy(prev = exprs ++: prev) class ValRhsFlatMappedCpsExpr[T:Type, V:Type](using thisQuotes: Quotes) ( prev: Seq[Expr[?]], oldValDef: quotes.reflect.ValDef, cpsRhs: CpsExpr[V], next: CpsExpr[T] ) extends AsyncCpsExpr[T](prev) { override def fLast(using Quotes):Expr[CB[T]] = import quotes.reflect._ next.syncOrigin match case Some(nextOrigin) => // owner of this block is incorrect '{ CBM.map(${cpsRhs.transformed})((vx:V) => ${buildAppendBlockExpr('vx, nextOrigin)}) } case None => '{ CBM.flatMap(${cpsRhs.transformed})((v:V)=> ${buildAppendBlockExpr('v, next.transformed)}) } override def prependExprs(exprs: Seq[Expr[?]]): CpsExpr[T] = ValRhsFlatMappedCpsExpr(using thisQuotes)(exprs ++: prev,oldValDef,cpsRhs,next) override def append[A:quoted.Type](e: CpsExpr[A])(using Quotes) = ValRhsFlatMappedCpsExpr(using thisQuotes)(prev,oldValDef,cpsRhs,next.append(e)) private def buildAppendBlock(using Quotes)(rhs:quotes.reflect.Term, exprTerm:quotes.reflect.Term): quotes.reflect.Term = import quotes.reflect._ import scala.quoted.Expr val castedOldValDef = oldValDef.asInstanceOf[quotes.reflect.ValDef] val valDef = ValDef(castedOldValDef.symbol, Some(rhs.changeOwner(castedOldValDef.symbol))) exprTerm match case Block(stats,last) => Block(valDef::stats, last) case other => Block(valDef::Nil,other) private def buildAppendBlockExpr[A:Type](using Quotes)(rhs: Expr[V], expr:Expr[A]):Expr[A] = import quotes.reflect._ buildAppendBlock(rhs.asTerm,expr.asTerm).asExprOf[A] } object CpsExpr: def sync[T:Type](f: Expr[T]): CpsExpr[T] = GenericSyncCpsExpr[T](Seq(), f) def async[T:Type](f: Expr[CB[T]]): CpsExpr[T] = GenericAsyncCpsExpr[T](Seq(), f) object Async: transparent inline def transform[T](inline expr: T) = ${ Async.transformImpl[T]('expr) } def transformImpl[T:Type](f: Expr[T])(using Quotes): Expr[CB[T]] = import quotes.reflect._ // println(s"before transformed: ${f.show}") val cpsExpr = rootTransform[T](f) val r = '{ CBM.spawn(${cpsExpr.transformed}) } // println(s"transformed value: ${r.show}") r def rootTransform[T:Type](f: Expr[T])(using Quotes): CpsExpr[T] = { import quotes.reflect._ f match case '{ while ($cond) { $repeat } } => val cpsRepeat = rootTransform(repeat.asExprOf[Unit]) CpsExpr.async('{ def _whilefun():CB[Unit] = if ($cond) { ${cpsRepeat.flatMap('{(x:Unit) => _whilefun()}).transformed} } else { CBM.pure(()) } _whilefun() }.asExprOf[CB[T]]) case _ => val fTree = f.asTerm fTree match { case fun@Apply(fun1@TypeApply(obj2,targs2), args1) => if (obj2.symbol.name == "await") { val awaitArg = args1.head CpsExpr.async(awaitArg.asExprOf[CB[T]]) } else { ??? } case Assign(left,right) => left match case id@Ident(x) => right.tpe.widen.asType match case '[r] => val cpsRight = rootTransform(right.asExprOf[r]) CpsExpr.async( cpsRight.map[T]( '{ (x:r) => ${Assign(left,'x.asTerm).asExprOf[T] } }).transformed ) case _ => ??? case Block(prevs,last) => val rPrevs = prevs.map[CpsExpr[?]]{ p => p match case v@ValDef(vName,vtt,optRhs) => optRhs.get.tpe.widen.asType match case '[l] => val cpsRight = rootTransform(optRhs.get.asExprOf[l]) ValRhsFlatMappedCpsExpr(using quotes)(Seq(), v, cpsRight, CpsExpr.sync('{})) case t: Term => // TODO: rootTransform t.asExpr match case '{ $p: tp } => rootTransform(p) case other => printf(other.show) throw RuntimeException(s"can't handle term in block: $other") case other => printf(other.show) throw RuntimeException(s"unknown tree type in block: $other") } val rLast = rootTransform(last.asExprOf[T]) val blockResult = rPrevs.foldRight(rLast)((e,s) => e.append(s)) val retval = CpsExpr.async(blockResult.transformed) retval //BlockTransform(cpsCtx).run(prevs,last) case id@Ident(name) => CpsExpr.sync(id.asExprOf[T]) case tid@Typed(Ident(name), tp) => CpsExpr.sync(tid.asExprOf[T]) case matchTerm@Match(scrutinee, caseDefs) => val nCases = caseDefs.map{ old => CaseDef.copy(old)(old.pattern, old.guard, rootTransform(old.rhs.asExprOf[T]).transformed.asTerm) } CpsExpr.async(Match(scrutinee, nCases).asExprOf[CB[T]]) case inlinedTerm@ Inlined(call,List(),body) => rootTransform(body.asExprOf[T]) case constTerm@Literal(_)=> CpsExpr.sync(constTerm.asExprOf[T]) case _ => throw RuntimeException(s"language construction is not supported: ${fTree}") } }
dotty-staging/dotty
tests/neg-macros/i13809/Macros_1.scala
Scala
apache-2.0
10,548
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.parquet import scala.collection.JavaConversions._ import scala.reflect.ClassTag import scala.reflect.runtime.universe.TypeTag import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.hadoop.mapreduce.{JobContext, TaskAttemptContext} import org.scalatest.BeforeAndAfterAll import parquet.example.data.simple.SimpleGroup import parquet.example.data.{Group, GroupWriter} import parquet.hadoop.api.WriteSupport import parquet.hadoop.api.WriteSupport.WriteContext import parquet.hadoop.metadata.{CompressionCodecName, FileMetaData, ParquetMetadata} import parquet.hadoop.{Footer, ParquetFileWriter, ParquetOutputCommitter, ParquetWriter} import parquet.io.api.RecordConsumer import parquet.schema.{MessageType, MessageTypeParser} import org.apache.spark.SparkException import org.apache.spark.sql.catalyst.ScalaReflection import org.apache.spark.sql.catalyst.expressions.Row import org.apache.spark.sql.catalyst.util.DateUtils import org.apache.spark.sql.test.TestSQLContext import org.apache.spark.sql.test.TestSQLContext._ import org.apache.spark.sql.test.TestSQLContext.implicits._ import org.apache.spark.sql.types._ import org.apache.spark.sql.{DataFrame, QueryTest, SQLConf, SaveMode} // Write support class for nested groups: ParquetWriter initializes GroupWriteSupport // with an empty configuration (it is after all not intended to be used in this way?) // and members are private so we need to make our own in order to pass the schema // to the writer. private[parquet] class TestGroupWriteSupport(schema: MessageType) extends WriteSupport[Group] { var groupWriter: GroupWriter = null override def prepareForWrite(recordConsumer: RecordConsumer): Unit = { groupWriter = new GroupWriter(recordConsumer, schema) } override def init(configuration: Configuration): WriteContext = { new WriteContext(schema, new java.util.HashMap[String, String]()) } override def write(record: Group) { groupWriter.write(record) } } /** * A test suite that tests basic Parquet I/O. */ class ParquetIOSuiteBase extends QueryTest with ParquetTest { val sqlContext = TestSQLContext import sqlContext.implicits.localSeqToDataFrameHolder /** * Writes `data` to a Parquet file, reads it back and check file contents. */ protected def checkParquetFile[T <: Product : ClassTag: TypeTag](data: Seq[T]): Unit = { withParquetDataFrame(data)(r => checkAnswer(r, data.map(Row.fromTuple))) } test("basic data types (without binary)") { val data = (1 to 4).map { i => (i % 2 == 0, i, i.toLong, i.toFloat, i.toDouble) } checkParquetFile(data) } test("raw binary") { val data = (1 to 4).map(i => Tuple1(Array.fill(3)(i.toByte))) withParquetDataFrame(data) { df => assertResult(data.map(_._1.mkString(",")).sorted) { df.collect().map(_.getAs[Array[Byte]](0).mkString(",")).sorted } } } test("string") { val data = (1 to 4).map(i => Tuple1(i.toString)) // Property spark.sql.parquet.binaryAsString shouldn't affect Parquet files written by Spark SQL // as we store Spark SQL schema in the extra metadata. withSQLConf(SQLConf.PARQUET_BINARY_AS_STRING -> "false")(checkParquetFile(data)) withSQLConf(SQLConf.PARQUET_BINARY_AS_STRING -> "true")(checkParquetFile(data)) } test("fixed-length decimals") { def makeDecimalRDD(decimal: DecimalType): DataFrame = sparkContext .parallelize(0 to 1000) .map(i => Tuple1(i / 100.0)) .toDF() // Parquet doesn't allow column names with spaces, have to add an alias here .select($"_1" cast decimal as "dec") for ((precision, scale) <- Seq((5, 2), (1, 0), (1, 1), (18, 10), (18, 17))) { withTempPath { dir => val data = makeDecimalRDD(DecimalType(precision, scale)) data.write.parquet(dir.getCanonicalPath) checkAnswer(read.parquet(dir.getCanonicalPath), data.collect().toSeq) } } // Decimals with precision above 18 are not yet supported intercept[Throwable] { withTempPath { dir => makeDecimalRDD(DecimalType(19, 10)).write.parquet(dir.getCanonicalPath) read.parquet(dir.getCanonicalPath).collect() } } // Unlimited-length decimals are not yet supported intercept[Throwable] { withTempPath { dir => makeDecimalRDD(DecimalType.Unlimited).write.parquet(dir.getCanonicalPath) read.parquet(dir.getCanonicalPath).collect() } } } test("date type") { def makeDateRDD(): DataFrame = sparkContext .parallelize(0 to 1000) .map(i => Tuple1(DateUtils.toJavaDate(i))) .toDF() .select($"_1") withTempPath { dir => val data = makeDateRDD() data.write.parquet(dir.getCanonicalPath) checkAnswer(read.parquet(dir.getCanonicalPath), data.collect().toSeq) } } test("map") { val data = (1 to 4).map(i => Tuple1(Map(i -> s"val_$i"))) checkParquetFile(data) } test("array") { val data = (1 to 4).map(i => Tuple1(Seq(i, i + 1))) checkParquetFile(data) } test("struct") { val data = (1 to 4).map(i => Tuple1((i, s"val_$i"))) withParquetDataFrame(data) { df => // Structs are converted to `Row`s checkAnswer(df, data.map { case Tuple1(struct) => Row(Row(struct.productIterator.toSeq: _*)) }) } } test("nested struct with array of array as field") { val data = (1 to 4).map(i => Tuple1((i, Seq(Seq(s"val_$i"))))) withParquetDataFrame(data) { df => // Structs are converted to `Row`s checkAnswer(df, data.map { case Tuple1(struct) => Row(Row(struct.productIterator.toSeq: _*)) }) } } test("nested map with struct as value type") { val data = (1 to 4).map(i => Tuple1(Map(i -> (i, s"val_$i")))) withParquetDataFrame(data) { df => checkAnswer(df, data.map { case Tuple1(m) => Row(m.mapValues(struct => Row(struct.productIterator.toSeq: _*))) }) } } test("nulls") { val allNulls = ( null.asInstanceOf[java.lang.Boolean], null.asInstanceOf[Integer], null.asInstanceOf[java.lang.Long], null.asInstanceOf[java.lang.Float], null.asInstanceOf[java.lang.Double]) withParquetDataFrame(allNulls :: Nil) { df => val rows = df.collect() assert(rows.length === 1) assert(rows.head === Row(Seq.fill(5)(null): _*)) } } test("nones") { val allNones = ( None.asInstanceOf[Option[Int]], None.asInstanceOf[Option[Long]], None.asInstanceOf[Option[String]]) withParquetDataFrame(allNones :: Nil) { df => val rows = df.collect() assert(rows.length === 1) assert(rows.head === Row(Seq.fill(3)(null): _*)) } } test("compression codec") { def compressionCodecFor(path: String): String = { val codecs = ParquetTypesConverter .readMetaData(new Path(path), Some(configuration)) .getBlocks .flatMap(_.getColumns) .map(_.getCodec.name()) .distinct assert(codecs.size === 1) codecs.head } val data = (0 until 10).map(i => (i, i.toString)) def checkCompressionCodec(codec: CompressionCodecName): Unit = { withSQLConf(SQLConf.PARQUET_COMPRESSION -> codec.name()) { withParquetFile(data) { path => assertResult(conf.parquetCompressionCodec.toUpperCase) { compressionCodecFor(path) } } } } // Checks default compression codec checkCompressionCodec(CompressionCodecName.fromConf(conf.parquetCompressionCodec)) checkCompressionCodec(CompressionCodecName.UNCOMPRESSED) checkCompressionCodec(CompressionCodecName.GZIP) checkCompressionCodec(CompressionCodecName.SNAPPY) } test("read raw Parquet file") { def makeRawParquetFile(path: Path): Unit = { val schema = MessageTypeParser.parseMessageType( """ |message root { | required boolean _1; | required int32 _2; | required int64 _3; | required float _4; | required double _5; |} """.stripMargin) val writeSupport = new TestGroupWriteSupport(schema) val writer = new ParquetWriter[Group](path, writeSupport) (0 until 10).foreach { i => val record = new SimpleGroup(schema) record.add(0, i % 2 == 0) record.add(1, i) record.add(2, i.toLong) record.add(3, i.toFloat) record.add(4, i.toDouble) writer.write(record) } writer.close() } withTempDir { dir => val path = new Path(dir.toURI.toString, "part-r-0.parquet") makeRawParquetFile(path) checkAnswer(read.parquet(path.toString), (0 until 10).map { i => Row(i % 2 == 0, i, i.toLong, i.toFloat, i.toDouble) }) } } test("write metadata") { withTempPath { file => val path = new Path(file.toURI.toString) val fs = FileSystem.getLocal(configuration) val attributes = ScalaReflection.attributesFor[(Int, String)] ParquetTypesConverter.writeMetaData(attributes, path, configuration) assert(fs.exists(new Path(path, ParquetFileWriter.PARQUET_COMMON_METADATA_FILE))) assert(fs.exists(new Path(path, ParquetFileWriter.PARQUET_METADATA_FILE))) val metaData = ParquetTypesConverter.readMetaData(path, Some(configuration)) val actualSchema = metaData.getFileMetaData.getSchema val expectedSchema = ParquetTypesConverter.convertFromAttributes(attributes) actualSchema.checkContains(expectedSchema) expectedSchema.checkContains(actualSchema) } } test("save - overwrite") { withParquetFile((1 to 10).map(i => (i, i.toString))) { file => val newData = (11 to 20).map(i => (i, i.toString)) newData.toDF().write.format("parquet").mode(SaveMode.Overwrite).save(file) checkAnswer(read.parquet(file), newData.map(Row.fromTuple)) } } test("save - ignore") { val data = (1 to 10).map(i => (i, i.toString)) withParquetFile(data) { file => val newData = (11 to 20).map(i => (i, i.toString)) newData.toDF().write.format("parquet").mode(SaveMode.Ignore).save(file) checkAnswer(read.parquet(file), data.map(Row.fromTuple)) } } test("save - throw") { val data = (1 to 10).map(i => (i, i.toString)) withParquetFile(data) { file => val newData = (11 to 20).map(i => (i, i.toString)) val errorMessage = intercept[Throwable] { newData.toDF().write.format("parquet").mode(SaveMode.ErrorIfExists).save(file) }.getMessage assert(errorMessage.contains("already exists")) } } test("save - append") { val data = (1 to 10).map(i => (i, i.toString)) withParquetFile(data) { file => val newData = (11 to 20).map(i => (i, i.toString)) newData.toDF().write.format("parquet").mode(SaveMode.Append).save(file) checkAnswer(read.parquet(file), (data ++ newData).map(Row.fromTuple)) } } test("SPARK-6315 regression test") { // Spark 1.1 and prior versions write Spark schema as case class string into Parquet metadata. // This has been deprecated by JSON format since 1.2. Notice that, 1.3 further refactored data // types API, and made StructType.fields an array. This makes the result of StructType.toString // different from prior versions: there's no "Seq" wrapping the fields part in the string now. val sparkSchema = "StructType(Seq(StructField(a,BooleanType,false),StructField(b,IntegerType,false)))" // The Parquet schema is intentionally made different from the Spark schema. Because the new // Parquet data source simply falls back to the Parquet schema once it fails to parse the Spark // schema. By making these two different, we are able to assert the old style case class string // is parsed successfully. val parquetSchema = MessageTypeParser.parseMessageType( """message root { | required int32 c; |} """.stripMargin) withTempPath { location => val extraMetadata = Map(RowReadSupport.SPARK_METADATA_KEY -> sparkSchema.toString) val fileMetadata = new FileMetaData(parquetSchema, extraMetadata, "Spark") val path = new Path(location.getCanonicalPath) ParquetFileWriter.writeMetadataFile( sparkContext.hadoopConfiguration, path, new Footer(path, new ParquetMetadata(fileMetadata, Nil)) :: Nil) assertResult(read.parquet(path.toString).schema) { StructType( StructField("a", BooleanType, nullable = false) :: StructField("b", IntegerType, nullable = false) :: Nil) } } } test("SPARK-6352 DirectParquetOutputCommitter") { val clonedConf = new Configuration(configuration) // Write to a parquet file and let it fail. // _temporary should be missing if direct output committer works. try { configuration.set("spark.sql.parquet.output.committer.class", "org.apache.spark.sql.parquet.DirectParquetOutputCommitter") sqlContext.udf.register("div0", (x: Int) => x / 0) withTempPath { dir => intercept[org.apache.spark.SparkException] { sqlContext.sql("select div0(1)").write.parquet(dir.getCanonicalPath) } val path = new Path(dir.getCanonicalPath, "_temporary") val fs = path.getFileSystem(configuration) assert(!fs.exists(path)) } } finally { // Hadoop 1 doesn't have `Configuration.unset` configuration.clear() clonedConf.foreach(entry => configuration.set(entry.getKey, entry.getValue)) } } test("SPARK-8121: spark.sql.parquet.output.committer.class shouldn't be overriden") { withTempPath { dir => val clonedConf = new Configuration(configuration) configuration.set( SQLConf.OUTPUT_COMMITTER_CLASS, classOf[ParquetOutputCommitter].getCanonicalName) configuration.set( "spark.sql.parquet.output.committer.class", classOf[BogusParquetOutputCommitter].getCanonicalName) try { val message = intercept[SparkException] { sqlContext.range(0, 1).write.parquet(dir.getCanonicalPath) }.getCause.getMessage assert(message === "Intentional exception for testing purposes") } finally { // Hadoop 1 doesn't have `Configuration.unset` configuration.clear() clonedConf.foreach(entry => configuration.set(entry.getKey, entry.getValue)) } } } } class BogusParquetOutputCommitter(outputPath: Path, context: TaskAttemptContext) extends ParquetOutputCommitter(outputPath, context) { override def commitJob(jobContext: JobContext): Unit = { sys.error("Intentional exception for testing purposes") } } class ParquetDataSourceOnIOSuite extends ParquetIOSuiteBase with BeforeAndAfterAll { val originalConf = sqlContext.conf.parquetUseDataSourceApi override protected def beforeAll(): Unit = { sqlContext.conf.setConf(SQLConf.PARQUET_USE_DATA_SOURCE_API, "true") } override protected def afterAll(): Unit = { sqlContext.setConf(SQLConf.PARQUET_USE_DATA_SOURCE_API, originalConf.toString) } test("SPARK-6330 regression test") { // In 1.3.0, save to fs other than file: without configuring core-site.xml would get: // IllegalArgumentException: Wrong FS: hdfs://..., expected: file:/// intercept[Throwable] { sqlContext.read.parquet("file:///nonexistent") } val errorMessage = intercept[Throwable] { sqlContext.read.parquet("hdfs://nonexistent") }.toString assert(errorMessage.contains("UnknownHostException")) } } class ParquetDataSourceOffIOSuite extends ParquetIOSuiteBase with BeforeAndAfterAll { val originalConf = sqlContext.conf.parquetUseDataSourceApi override protected def beforeAll(): Unit = { sqlContext.conf.setConf(SQLConf.PARQUET_USE_DATA_SOURCE_API, "false") } override protected def afterAll(): Unit = { sqlContext.setConf(SQLConf.PARQUET_USE_DATA_SOURCE_API, originalConf.toString) } }
andrewor14/iolap
sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala
Scala
apache-2.0
16,921
// Copyright (c) 2011 Thomas Suckow // are made available under the terms of the Eclipse Public License v1.0 // which accompanies this distribution, and is available at // http://www.eclipse.org/legal/epl-v10.html package net.codingwell.weave.languages.silk import org.parboiled.scala._ import net.codingwell.parboiled.Helpers._ import java.lang.String import org.parboiled.errors.{ErrorUtils, ParsingException} import net.codingwell.parboiled.IncludableInputBuffer import org.parboiled.support.IndexRange import org.parboiled.Context import org.parboiled.common.FileUtils class SilkParser(val buffer: IncludableInputBuffer[String]) extends Parser { def File = { OWhiteSpace ~ zeroOrMore(GlobalStatement) ~~> ast.File ~ EOI } def GlobalStatement = rule { Import | ImportViral | Module } def Import = rule { "import" ~ WhiteSpace ~ PackageSpecification ~~> ast.Import ~ OWhiteSpace ~ SEMI } def ImportViral = rule { "importviral" ~ WhiteSpace ~ PackageSpecification ~~> ast.ImportViral ~ OWhiteSpace ~ SEMI } def PackageSpecification = rule { oneOrMore( Identifier, "." ) ~~> ast.PackageSpecification } def Identifier = rule { ID ~> ast.Identifier } def Module = rule { "module" ~ WhiteSpace ~ Identifier ~ OWhiteSpace ~ zeroOrMore(ModuleParameter) ~ Scope ~~> ast.Module } def ModuleParameter = rule { Direction ~ WhiteSpace ~ TypeSpecification ~ WhiteSpace ~ Identifier ~ OWhiteSpace ~ SEMI ~~> ast.Parameter } def Direction = rule { ("in" | "out" | "ref" | "ret") ~> ast.Direction } def TypeSpecification = rule { PlainType } def PlainType = rule { Identifier ~~> ast.PlainType } def LiteralType = rule { WhiteSpace }//TODO def NumberType = rule { WhiteSpace }//TODO def ArrayType = rule { WhiteSpace }//TODO def Scope = rule { "{" ~ OWhiteSpace ~ zeroOrMore(Statement) ~ "}" ~~> ast.Scope ~ OWhiteSpace } def Statement:Rule1[ast.Statement] = rule { GlobalStatement | Instantiation | ExpressionStatement | Scope } def ExpressionStatement = rule { ExpressionGroup ~ SEMI ~~> ast.ExpressionStatement } def ExpressionGroup = rule { oneOrMore( Expression, OWhiteSpace ) ~~> ast.ExpressionGroup ~ OWhiteSpace } def Expression:Rule1[ast.Expression] = rule { SimpleExpression ~ ChainExpression ~~> ast.Expression } def SimpleExpression = rule { Identifier | ParenExpression } def ChainExpression:Rule1[Option[ast.ChainExpression]] = rule { optional( ArrayExpression | MemberDereference ) } def ParenExpression = rule { "(" ~ OWhiteSpace ~ ExpressionGroup ~ ")" } def ArrayExpression = rule { "[" ~ OWhiteSpace ~ ExpressionGroup ~ "]" ~ OWhiteSpace ~ ChainExpression ~~> ast.ArrayExpression } def MemberDereference = rule { "." ~ Identifier ~ ChainExpression ~~> ast.MemberDereference } //def ArrayExpression = rule { Expression ~ "[" ~ OWhiteSpace ~ Expression ~ "]" ~~> ast.ArrayExpression } //TODO Inline Array Expression. AKA. {a,b,c,d} //def InlineArrayExpression = rule { } //def MemberDereference = rule { Expression ~ "." ~ Identifier ~~> ast.MemberDereference } //def ParenExpression = rule { "(" ~ OWhiteSpace ~ ExpressionGroup ~ ")" } def Instantiation = rule { "var" ~ WhiteSpace ~ TypeSpecification ~ WhiteSpace ~ Identifier ~ OWhiteSpace ~ SEMI ~~> ast.Instantiation } def SEMI = rule("';'") { ";" ~ OWhiteSpace } @Deprecated def PackageSpec = rule { oneOrMore( ID, "." ) } def ID = rule { ( Letter ~ IDrest ) | Op } def IDrest = rule { zeroOrMore( !OpEnd ~ ( Letter | Digit ) ) ~ optional( OpEnd ) } //TODO: AST def Number = rule { Decimal | Integer } //Order matters def Decimal = rule { oneOrMore( Digit ) ~ "." ~ oneOrMore( Digit ) } def Integer = rule { oneOrMore( Digit ) } def Letter = rule { "A"-"Z" | "a"-"z" | "$" | "_" } def SpecialChar = rule { anyOf(".,{}[]();") } def OpChar = rule { !SpecialChar ~ !WhiteSpaceChar ~ !Letter ~ !Digit ~ ("!"-"~") } def Op = rule { oneOrMore( OpChar ) } def OpEnd = rule { "_" ~ Op } def Digit = rule { "0"-"9" } def SilkString = rule("String") { "\\"" ~ zeroOrMore( !anyOf("\\r\\n\\"\\\\") ~ ANY ) ~> ((s:String) => s) ~>> withContext( OriginalIndexRange ) ~~> ast.QuotedString ~ "\\"" ~ OWhiteSpace } def WhiteSpaceChar = rule { anyOf(" \\n\\r\\t\\f") } def WhiteSpace: Rule0 = rule("Whitespace") { oneOrMore( WhiteSpaceChar ) } def OWhiteSpace: Rule0 = rule("Whitespace") { optional( WhiteSpace ) } }
codingwell/Weave
lang-silk/src/main/scala/net/codingwell/weave/languages/silk/SilkParser.scala
Scala
epl-1.0
4,378
package au.id.cxd.math.probability.random /** * Parameters used in the GSL RBinom algorithm * These are computed once and then used during the calculation * @param p * @param q * @param np */ case class RBinomParameters(val p:Double, val q:Double, val np:Double ) { val s:Double = p / q // parameters that do not change. val ffm:Double = np + p val fm:Double = Math.round(ffm).toDouble val xm:Double = fm + 0.5 val npq:Double = np * q // From GSL lin 178. /* Compute cumulative area of tri, para, exp tails */ /* p1: radius of triangle region; since height=1, also: area of region */ /* p2: p1 + area of parallelogram region */ /* p3: p2 + area of left tail */ /* p4: p3 + area of right tail */ /* pi/p4: probability of i'th area (i=1,2,3,4) */ /* Note: magic numbers 2.195, 4.6, 0.134, 20.5, 15.3 */ /* These magic numbers are not adjustable...at least not easily! */ val p1:Double = Math.floor(2.195 * Math.sqrt(npq) - 4.6 * q) + 0.5 /* xl, xr: left and right edges of triangle */ val xl:Double = xm - p1 val xr:Double = xm + p1 /* Parameter of exponential tails */ /* Left tail: t(x) = c*exp(-lambda_l*[xl - (x+0.5)]) */ /* Right tail: t(x) = c*exp(-lambda_r*[(x+0.5) - xr]) */ val c:Double = 0.134 + 20.5 / (15.3 + fm) val p2:Double = p1 * (1.0 + c + c) val al:Double = (ffm - xl) / (ffm - xl * p) val lambda_l :Double= al * (1.0 + 0.5 * al) val ar:Double = (xr - ffm) / (xr * q) val lambda_r :Double= ar * (1.0 + 0.5 * ar) val p3:Double = p2 + c / lambda_l val p4:Double = p3 + c / lambda_r } case class RBinomOutputs(val ix:Double, val vari:Double, val accept:Double, val u:Double, val v:Double) {}
cxd/scala-au.id.cxd.math
math/src/main/scala/au/id/cxd/math/probability/random/RBinomParameters.scala
Scala
mit
1,686
/** * Copyright (C) 2010 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.oxf.xforms.control import org.orbeon.dom.QName import org.orbeon.oxf.xml.XMLReceiverHelper import org.xml.sax.helpers.AttributesImpl import org.orbeon.xforms.XFormsNames._ trait ControlExtensionAttributesSupport { self: XFormsControl => import ControlExtensionAttributesSupport._ // Optional extension attributes supported by the control private[ControlExtensionAttributesSupport] var _extensionAttributes: Option[Map[QName, String]] = None final def evaluatedExtensionAttributes: Map[QName, String] = _extensionAttributes getOrElse { val result = if (staticControl eq null) Map.empty[QName, String] else if (isRelevant) // NOTE: evaluateAvt can return null if there is no context // WARNING: don't use `mapValues`, which return a view which can't be stored in the back control tree staticControl.extensionAttributes map { case (k, v) => k -> (Option(evaluateAvt(v)) getOrElse "") } else // Don't attempt to evaluate expression when the control is non-relevant staticControl.nonRelevantExtensionAttributes _extensionAttributes = Some(result) result } final def evaluateNonRelevantExtensionAttribute(): Unit = _extensionAttributes = None final def markExtensionAttributesDirty(): Unit = _extensionAttributes = None final def compareExtensionAttributes(other: XFormsControl): Boolean = evaluatedExtensionAttributes == other.evaluatedExtensionAttributes // NOTE: Overridden by some tests def extensionAttributeValue(attributeName: QName): Option[String] = evaluatedExtensionAttributes.get(attributeName) // Add all non-null values to the given list of attributes, filtering by namespace URI // NOTE: The `class` attribute is excluded because handled separately. // NOTE: The `accept` attribute is also handled separately by the handler. final def addExtensionAttributesExceptClassAndAcceptForHandler(attributesImpl: AttributesImpl, namespaceURI: String): Unit = for { (name, value) <- evaluatedExtensionAttributes if value ne null if name.namespace.uri == namespaceURI && ! StandardAttributesToFilterOnHandler(name) localName = name.localName } locally { attributesImpl.addAttribute("", localName, localName, XMLReceiverHelper.CDATA, value) } final def addExtensionAttributesExceptClassAndAcceptForAjax( previousControlOpt : Option[XFormsControl], namespaceURI : String)(implicit ch : XMLReceiverHelper ): Unit = for { name <- staticControl.extensionAttributes.keys if name.namespace.uri == namespaceURI && ! StandardAttributesToFilterOnHandler(name) } locally { ControlAjaxSupport.outputAttributeElement( previousControlOpt, this, effectiveId, name.localName, _.extensionAttributeValue(name).orNull )(ch, containingDocument) } } private object ControlExtensionAttributesSupport { val StandardAttributesToFilterOnHandler = Set(CLASS_QNAME, ACCEPT_QNAME) }
orbeon/orbeon-forms
xforms-runtime/shared/src/main/scala/org/orbeon/oxf/xforms/control/ControlExtensionAttributesSupport.scala
Scala
lgpl-2.1
3,745
/** Copyright 2015 TappingStone, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prediction.data.storage /** Elasticsearch implementation of storage traits, supporting meta data only * * @group Implementation */ package object elasticsearch {}
ch33hau/PredictionIO
data/src/main/scala/io/prediction/data/storage/elasticsearch/package.scala
Scala
apache-2.0
795
package net.fwbrasil.zoot.spray import scala.concurrent.Await import scala.concurrent.Future import scala.concurrent.duration.Duration import akka.actor.ActorSystem import akka.io.IO import akka.pattern.ask import akka.util.Timeout import net.fwbrasil.zoot.core.request.Request import net.fwbrasil.zoot.core.response.Response import net.fwbrasil.zoot.spray.request.requestToSpray import net.fwbrasil.zoot.spray.response.responseFromSpray import spray.can.Http import spray.http.HttpResponse case class SprayClient(host: String, port: Int)(implicit system: ActorSystem, timeout: Timeout) extends (Request => Future[Response[Array[Byte]]]) { import system.dispatcher private val Http.HostConnectorInfo(connector, _) = Await.result(IO(Http) ? Http.HostConnectorSetup(host, port), Duration.Inf) def apply(request: Request) = connector.ask(requestToSpray(request)) .mapTo[HttpResponse] .map(responseFromSpray(_)) }
fwbrasil/zoot
zoot-spray/src/main/scala/net/fwbrasil/zoot/spray/SprayClient.scala
Scala
lgpl-2.1
971
/* * Copyright (C) Lightbend Inc. <https://www.lightbend.com> */ package play.api.libs.ws.ahc /** An object for parsing application/x-www-form-urlencoded data */ private[ahc] object FormUrlEncodedParser { /** * Parse the content type "application/x-www-form-urlencoded" which consists of a bunch of & separated key=value * pairs, both of which are URL encoded. * @param data The body content of the request, or whatever needs to be so parsed * @param encoding The character encoding of data * @return A ListMap of keys to the sequence of values for that key */ def parseNotPreservingOrder(data: String, encoding: String = "utf-8"): Map[String, Seq[String]] = { // Generate the pairs of values from the string. parseToPairs(data, encoding) .groupBy(_._1) .map(param => param._1 -> param._2.map(_._2)) } /** * Parse the content type "application/x-www-form-urlencoded" which consists of a bunch of & separated key=value * pairs, both of which are URL encoded. We are careful in this parser to maintain the original order of the * keys by using OrderPreserving.groupBy as some applications depend on the original browser ordering. * @param data The body content of the request, or whatever needs to be so parsed * @param encoding The character encoding of data * @return A ListMap of keys to the sequence of values for that key */ def parse(data: String, encoding: String = "utf-8"): Map[String, Seq[String]] = { // Generate the pairs of values from the string. val pairs: Seq[(String, String)] = parseToPairs(data, encoding) // Group the pairs by the key (first item of the pair) being sure to preserve insertion order OrderPreserving.groupBy(pairs)(_._1) } /** * Parse the content type "application/x-www-form-urlencoded", mapping to a Java compatible format. * @param data The body content of the request, or whatever needs to be so parsed * @param encoding The character encoding of data * @return A Map of keys to the sequence of values for that key */ def parseAsJava(data: String, encoding: String): java.util.Map[String, java.util.List[String]] = { import scala.collection.JavaConverters._ parse(data, encoding).map { case (key, values) => key -> values.asJava }.asJava } /** * Parse the content type "application/x-www-form-urlencoded", mapping to a Java compatible format. * @param data The body content of the request, or whatever needs to be so parsed * @param encoding The character encoding of data * @return A Map of keys to the sequence of array values for that key */ def parseAsJavaArrayValues(data: String, encoding: String): java.util.Map[String, Array[String]] = { import scala.collection.JavaConverters._ parse(data, encoding).map { case (key, values) => key -> values.toArray }.asJava } private[this] val parameterDelimiter = "[&;]".r /** * Do the basic parsing into a sequence of key/value pairs * @param data The data to parse * @param encoding The encoding to use for interpreting the data * @return The sequence of key/value pairs */ private def parseToPairs(data: String, encoding: String): Seq[(String, String)] = { if (data.isEmpty) { Seq.empty } else { parameterDelimiter.split(data).toIndexedSeq.map { param => val parts = param.split("=", -1) val key = java.net.URLDecoder.decode(parts(0), encoding) val value = java.net.URLDecoder.decode(parts.lift(1).getOrElse(""), encoding) key -> value } } } }
playframework/play-ws
play-ahc-ws-standalone/src/main/scala/play/api/libs/ws/ahc/FormUrlEncodedParser.scala
Scala
apache-2.0
3,593
package mesosphere.marathon.upgrade import java.util.UUID import akka.actor.{ ActorSystem, Props } import akka.testkit.{ TestActorRef, TestProbe } import akka.util.Timeout import mesosphere.marathon.core.launchqueue.LaunchQueue import mesosphere.marathon.event.MesosStatusUpdateEvent import mesosphere.marathon.health.HealthCheckManager import mesosphere.marathon.io.storage.StorageProvider import mesosphere.marathon.state._ import mesosphere.marathon.tasks.{ MarathonTasks, TaskTracker } import mesosphere.marathon.upgrade.DeploymentManager.{ DeploymentFinished, DeploymentStepInfo } import mesosphere.marathon.{ MarathonSpec, SchedulerActions } import mesosphere.marathon.Protos.MarathonTask import mesosphere.mesos.protos.Implicits._ import mesosphere.mesos.protos.{ SlaveID, TaskID } import org.apache.mesos.Protos.Status import org.apache.mesos.SchedulerDriver import org.mockito.Matchers.{ any, same } import org.mockito.Mockito.{ times, verify, verifyNoMoreInteractions, when } import org.mockito.invocation.InvocationOnMock import org.mockito.stubbing.Answer import org.scalatest.mock.MockitoSugar import org.scalatest.{ BeforeAndAfterAll, Matchers } import scala.concurrent.Future import scala.concurrent.duration._ class DeploymentActorTest extends MarathonSpec with Matchers with BeforeAndAfterAll with MockitoSugar { var tracker: TaskTracker = _ var queue: LaunchQueue = _ var driver: SchedulerDriver = _ var scheduler: SchedulerActions = _ var storage: StorageProvider = _ var hcManager: HealthCheckManager = _ implicit val defaultTimeout: Timeout = 5.seconds before { driver = mock[SchedulerDriver] tracker = mock[TaskTracker] queue = mock[LaunchQueue] scheduler = mock[SchedulerActions] storage = mock[StorageProvider] hcManager = mock[HealthCheckManager] } test("Deploy") { implicit val system = ActorSystem("TestSystem") val managerProbe = TestProbe() val receiverProbe = TestProbe() val app1 = AppDefinition(id = PathId("app1"), cmd = Some("cmd"), instances = 2) val app2 = AppDefinition(id = PathId("app2"), cmd = Some("cmd"), instances = 1) val app3 = AppDefinition(id = PathId("app3"), cmd = Some("cmd"), instances = 1) val app4 = AppDefinition(id = PathId("app4"), cmd = Some("cmd")) val origGroup = Group(PathId("/foo/bar"), Set(app1, app2, app4)) val version2 = AppDefinition.VersionInfo.forNewConfig(Timestamp(1000)) val app1New = app1.copy(instances = 1, versionInfo = version2) val app2New = app2.copy(instances = 2, cmd = Some("otherCmd"), versionInfo = version2) val targetGroup = Group(PathId("/foo/bar"), Set(app1New, app2New, app3)) // setting started at to 0 to make sure this survives val slaveId = SlaveID("some slave id") val task1_1 = MarathonTasks.makeTask("task1_1", "", Nil, Nil, app1.version, slaveId).toBuilder.setStartedAt(0).build() val task1_2 = MarathonTasks.makeTask("task1_2", "", Nil, Nil, app1.version, slaveId).toBuilder.setStartedAt(1000).build() val task2_1 = MarathonTasks.makeTask("task2_1", "", Nil, Nil, app2.version, slaveId) val task3_1 = MarathonTasks.makeTask("task3_1", "", Nil, Nil, app3.version, slaveId) val task4_1 = MarathonTasks.makeTask("task4_1", "", Nil, Nil, app4.version, slaveId) val plan = DeploymentPlan(origGroup, targetGroup) when(tracker.get(app1.id)).thenReturn(Set(task1_1, task1_2)) when(tracker.get(app2.id)).thenReturn(Set(task2_1)) when(tracker.get(app3.id)).thenReturn(Set(task3_1)) when(tracker.get(app4.id)).thenReturn(Set(task4_1)) when(driver.killTask(TaskID(task1_2.getId))).thenAnswer(new Answer[Status] { def answer(invocation: InvocationOnMock): Status = { system.eventStream.publish(MesosStatusUpdateEvent("", "task1_2", "TASK_KILLED", "", app1.id, "", Nil, app1New.version.toString)) Status.DRIVER_RUNNING } }) when(driver.killTask(TaskID(task2_1.getId))).thenAnswer(new Answer[Status] { def answer(invocation: InvocationOnMock): Status = { system.eventStream.publish(MesosStatusUpdateEvent("", "task2_1", "TASK_KILLED", "", app2.id, "", Nil, app2.version.toString)) Status.DRIVER_RUNNING } }) when(queue.add(same(app2New), any[Int])).thenAnswer(new Answer[Boolean] { def answer(invocation: InvocationOnMock): Boolean = { println(invocation.getArguments.toSeq) for (i <- 0 until invocation.getArguments()(1).asInstanceOf[Int]) system.eventStream.publish(MesosStatusUpdateEvent("", UUID.randomUUID().toString, "TASK_RUNNING", "", app2.id, "", Nil, app2New.version.toString)) true } }) when(scheduler.startApp(driver, app3)).thenAnswer(new Answer[Future[Unit]] { def answer(invocation: InvocationOnMock): Future[Unit] = { // system.eventStream.publish(MesosStatusUpdateEvent("", "task3_1", "TASK_RUNNING", "", app3.id, "", Nil, app3.version.toString)) Future.successful(()) } }) when(scheduler.scale(driver, app3)).thenAnswer(new Answer[Future[Unit]] { def answer(invocation: InvocationOnMock): Future[Unit] = { system.eventStream.publish(MesosStatusUpdateEvent("", "task3_1", "TASK_RUNNING", "", app3.id, "", Nil, app3.version.toString)) Future.successful(()) } }) when(driver.killTask(TaskID(task4_1.getId))).thenAnswer(new Answer[Status] { def answer(invocation: InvocationOnMock): Status = { system.eventStream.publish(MesosStatusUpdateEvent("", "task4_1", "TASK_FINISHED", "", app4.id, "", Nil, app4.version.toString)) Status.DRIVER_RUNNING } }) try { TestActorRef( DeploymentActor.props( managerProbe.ref, receiverProbe.ref, driver, scheduler, plan, tracker, queue, storage, hcManager, system.eventStream ) ) plan.steps.zipWithIndex.foreach { case (step, num) => managerProbe.expectMsg(5.seconds, DeploymentStepInfo(plan, step, num + 1)) } managerProbe.expectMsg(5.seconds, DeploymentFinished(plan)) verify(scheduler).startApp(driver, app3.copy(instances = 0)) verify(driver, times(1)).killTask(TaskID(task1_2.getId)) verify(scheduler).stopApp(driver, app4.copy(instances = 0)) } finally { system.shutdown() } } test("Restart app") { implicit val system = ActorSystem("TestSystem") val managerProbe = TestProbe() val receiverProbe = TestProbe() val app = AppDefinition(id = PathId("app1"), cmd = Some("cmd"), instances = 2) val origGroup = Group(PathId("/foo/bar"), Set(app)) val version2 = AppDefinition.VersionInfo.forNewConfig(Timestamp(1000)) val appNew = app.copy(cmd = Some("cmd new"), versionInfo = version2) val targetGroup = Group(PathId("/foo/bar"), Set(appNew)) val slaveId = SlaveID("some slave id") val task1_1 = MarathonTasks.makeTask("task1_1", "", Nil, Nil, app.version, slaveId).toBuilder.setStartedAt(0).build() val task1_2 = MarathonTasks.makeTask("task1_2", "", Nil, Nil, app.version, slaveId).toBuilder.setStartedAt(1000).build() when(tracker.get(app.id)).thenReturn(Set(task1_1, task1_2)) val plan = DeploymentPlan("foo", origGroup, targetGroup, List(DeploymentStep(List(RestartApplication(appNew)))), Timestamp.now()) when(driver.killTask(TaskID(task1_1.getId))).thenAnswer(new Answer[Status] { def answer(invocation: InvocationOnMock): Status = { system.eventStream.publish(MesosStatusUpdateEvent("", "task1_1", "TASK_KILLED", "", app.id, "", Nil, app.version.toString)) Status.DRIVER_RUNNING } }) when(driver.killTask(TaskID(task1_2.getId))).thenAnswer(new Answer[Status] { def answer(invocation: InvocationOnMock): Status = { system.eventStream.publish(MesosStatusUpdateEvent("", "task1_2", "TASK_KILLED", "", app.id, "", Nil, app.version.toString)) Status.DRIVER_RUNNING } }) val taskIDs = Iterator.from(3) when(queue.count(appNew.id)).thenAnswer(new Answer[Int] { override def answer(p1: InvocationOnMock): Int = appNew.instances }) when(queue.add(same(appNew), any[Int])).thenAnswer(new Answer[Boolean] { def answer(invocation: InvocationOnMock): Boolean = { for (i <- 0 until invocation.getArguments()(1).asInstanceOf[Int]) system.eventStream.publish(MesosStatusUpdateEvent("", s"task1_${taskIDs.next()}", "TASK_RUNNING", "", app.id, "", Nil, appNew.version.toString)) true } }) try { TestActorRef( DeploymentActor.props( managerProbe.ref, receiverProbe.ref, driver, scheduler, plan, tracker, queue, storage, hcManager, system.eventStream ) ) receiverProbe.expectMsg(DeploymentFinished(plan)) verify(driver).killTask(TaskID(task1_1.getId)) verify(driver).killTask(TaskID(task1_2.getId)) verify(queue).add(appNew, 2) } finally { system.shutdown() } } test("Restart suspended app") { implicit val system = ActorSystem("TestSystem") val managerProbe = TestProbe() val receiverProbe = TestProbe() val app = AppDefinition(id = PathId("app1"), cmd = Some("cmd"), instances = 0) val origGroup = Group(PathId("/foo/bar"), Set(app)) val version2 = AppDefinition.VersionInfo.forNewConfig(Timestamp(1000)) val appNew = app.copy(cmd = Some("cmd new"), versionInfo = version2) val targetGroup = Group(PathId("/foo/bar"), Set(appNew)) val plan = DeploymentPlan("foo", origGroup, targetGroup, List(DeploymentStep(List(RestartApplication(appNew)))), Timestamp.now()) when(tracker.get(app.id)).thenReturn(Set[MarathonTask]()) try { TestActorRef( DeploymentActor.props( managerProbe.ref, receiverProbe.ref, driver, scheduler, plan, tracker, queue, storage, hcManager, system.eventStream ) ) receiverProbe.expectMsg(DeploymentFinished(plan)) } finally { system.shutdown() } } test("Scale with tasksToKill") { implicit val system = ActorSystem("TestSystem") val managerProbe = TestProbe() val receiverProbe = TestProbe() val app1 = AppDefinition(id = PathId("app1"), cmd = Some("cmd"), instances = 3) val origGroup = Group(PathId("/foo/bar"), Set(app1)) val version2 = AppDefinition.VersionInfo.forNewConfig(Timestamp(1000)) val app1New = app1.copy(instances = 2, versionInfo = version2) val targetGroup = Group(PathId("/foo/bar"), Set(app1New)) val slaveId = SlaveID("some slave id") val task1_1 = MarathonTasks.makeTask("task1_1", "", Nil, Nil, app1.version, slaveId).toBuilder.setStartedAt(0).build() val task1_2 = MarathonTasks.makeTask("task1_2", "", Nil, Nil, app1.version, slaveId).toBuilder.setStartedAt(500).build() val task1_3 = MarathonTasks.makeTask("task1_3", "", Nil, Nil, app1.version, slaveId).toBuilder.setStartedAt(1000).build() val plan = DeploymentPlan(original = origGroup, target = targetGroup, toKill = Map(app1.id -> Set(task1_2))) when(tracker.get(app1.id)).thenReturn(Set(task1_1, task1_2, task1_3)) when(driver.killTask(TaskID(task1_2.getId))).thenAnswer(new Answer[Status] { def answer(invocation: InvocationOnMock): Status = { system.eventStream.publish(MesosStatusUpdateEvent("", "task1_2", "TASK_KILLED", "", app1.id, "", Nil, app1New.version.toString)) Status.DRIVER_RUNNING } }) try { TestActorRef( DeploymentActor.props( managerProbe.ref, receiverProbe.ref, driver, scheduler, plan, tracker, queue, storage, hcManager, system.eventStream ) ) plan.steps.zipWithIndex.foreach { case (step, num) => managerProbe.expectMsg(5.seconds, DeploymentStepInfo(plan, step, num + 1)) } managerProbe.expectMsg(5.seconds, DeploymentFinished(plan)) verify(driver, times(1)).killTask(TaskID(task1_2.getId)) verifyNoMoreInteractions(driver) } finally { system.shutdown() } } }
hangyan/marathon
src/test/scala/mesosphere/marathon/upgrade/DeploymentActorTest.scala
Scala
apache-2.0
12,395
package com.jayway.textmining import scala.collection.mutable import util.Random /** * Copyright 2012 Amir Moulavi (amir.moulavi@gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @author Amir Moulavi */ trait RandomSelector { implicit val dMap:mutable.Map[Document, Cluster] = mutable.Map[Document, Cluster]() def selectRandomInitialCluster(K:Int, documents:List[Document])(implicit documentMap:mutable.Map[Document, Cluster]):List[Cluster] = { var docs = documents map identity val r = new Random val seeds = mutable.ListBuffer[Cluster]() for ( i <- 0 until K ) { val selected = docs(r.nextInt(K)) docs = docs.filterNot(_ == selected) val cluster = Cluster(selected) documentMap += (selected -> cluster) seeds += cluster } seeds.toList } def selectRandom[A](K:Int, documents:List[A]):List[A] = { var docs = documents map identity val r = new Random val seeds = mutable.ListBuffer[A]() for ( i <- 0 until K ) { val selected = docs(r.nextInt(K)) docs = docs.filterNot(_ == selected) seeds += selected } seeds.toList } }
amir343/grape
src/main/scala/com/jayway/textmining/RandomSelector.scala
Scala
apache-2.0
1,655
// Instantiate a new mutable Map val m = scala.collection.mutable.Map[String, String]() // Convert an immutable Map to a mutable Map val m = collection.immutable.Map(1->"one", 2->"two") val n = collection.mutable.Map(m.toSeq: _*) // Iterate through a Map val m = Map(1->"one", 2->"two") for((k, v) <- m) println(s"${k}, ${v}") // Test if a key-value pair exists in a Map val m = Map(1->"one", 2->"two") m.exists(_ == 1->"one") // return true m.exists(_ == 1->"two") // return false // Test if a key exists in a Map val m = Map(1->"one", 2->"two") m.contains(1) // return true m.contains(3) // return false // Test if a value exists in a Map val m = Map(1->"one", 2->"two") m.values.exists(_ == "one") // return true m.values.exists(_ == "three") // return false
datability-io/learn-scala
snippet/map.scala
Scala
mit
767
/** * This file is part of the TA Buddy project. * Copyright (c) 2014-2015 Alexey Aksenov ezh@ezh.msk.ru * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU Affero General Global License version 3 * as published by the Free Software Foundation with the addition of the * following permission added to Section 15 as permitted in Section 7(a): * FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED * BY Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS», * Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS» DISCLAIMS * THE WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS. * * This program is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Affero General Global License for more details. * You should have received a copy of the GNU Affero General Global License * along with this program; if not, see http://www.gnu.org/licenses or write to * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, * Boston, MA, 02110-1301 USA, or download the license from the following URL: * http://www.gnu.org/licenses/agpl.html * * The interactive user interfaces in modified source and object code versions * of this program must display Appropriate Legal Notices, as required under * Section 5 of the GNU Affero General Global License. * * In accordance with Section 7(b) of the GNU Affero General Global License, * you must retain the producer line in every report, form or document * that is created or manipulated using TA Buddy. * * You can be released from the requirements of the license by purchasing * a commercial license. Buying such a license is mandatory as soon as you * develop commercial activities involving the TA Buddy software without * disclosing the source code of your own applications. * These activities include: offering paid services to customers, * serving files in a web or/and network application, * shipping TA Buddy with a closed source product. * * For more information, please contact Digimead Team at this * address: ezh@ezh.msk.ru */ package org.digimead.tabuddy.desktop.core.keyring import java.io.{ BufferedInputStream, InputStream, OutputStream } import org.bouncycastle.bcpg.{ ArmoredOutputStream, BCPGInputStream, CompressionAlgorithmTags } import org.bouncycastle.openpgp.operator.bc.BcKeyFingerprintCalculator import org.bouncycastle.openpgp.{ PGPCompressedData, PGPCompressedDataGenerator, PGPObjectFactory, PGPPublicKey, PGPPublicKeyRing, PGPSecretKeyRing, PGPUtil } import scala.language.reflectiveCalls /** * Key exchange part for keyring implementation. */ trait KeyRingExchange { /** Export PGP public key and close the output stream. */ def export(publicKey: PGPPublicKey, os: OutputStream): Unit = exportPGPData(publicKey, os, true, CompressionAlgorithmTags.UNCOMPRESSED) /** Export PGP public key and close the output stream. */ def export(publicKey: PGPPublicKey, os: OutputStream, armored: Boolean): Unit = exportPGPData(publicKey, os, armored, CompressionAlgorithmTags.UNCOMPRESSED) /** Export PGP public key and close the output stream. */ def export(publicKey: PGPPublicKey, os: OutputStream, armored: Boolean, compressed: Int): Unit = exportPGPData(publicKey, os, armored, compressed) /** Export PGP public key ring and close the output stream. */ def export(publicKeyRing: PGPPublicKeyRing, os: OutputStream): Unit = exportPGPData(publicKeyRing, os, true, CompressionAlgorithmTags.UNCOMPRESSED) /** Export PGP public key ring and close the output stream. */ def export(publicKeyRing: PGPPublicKeyRing, os: OutputStream, armored: Boolean): Unit = exportPGPData(publicKeyRing, os, armored, CompressionAlgorithmTags.UNCOMPRESSED) /** Export PGP public key ring and close the output stream. */ def export(publicKeyRing: PGPPublicKeyRing, os: OutputStream, armored: Boolean, compressed: Int): Unit = exportPGPData(publicKeyRing, os, armored, compressed) /** Export PGP private key ring and close the output stream. */ def export(privateKeyRing: PGPSecretKeyRing, os: OutputStream): Unit = exportPGPData(privateKeyRing, os, true, CompressionAlgorithmTags.UNCOMPRESSED) /** Export PGP private key ring and close the output stream. */ def export(privateKeyRing: PGPSecretKeyRing, os: OutputStream, armored: Boolean): Unit = exportPGPData(privateKeyRing, os, armored, CompressionAlgorithmTags.UNCOMPRESSED) /** Export PGP private key ring and close the output stream. */ def export(privateKeyRing: PGPSecretKeyRing, os: OutputStream, armored: Boolean, compressed: Int): Unit = exportPGPData(privateKeyRing, os, armored, compressed) /** Export PGP data and close the output stream. */ def exportPGPData(data: { def encode(os: OutputStream) }, os: OutputStream, armored: Boolean, compressed: Int) { // Bouncy Castle’s implementations don’t chain calls to close(), // so we have to keep track of the various streams. var outStream = os // Output the data as ASCII, otherwise it will be output'ed as Binary val armoredStream = if (armored) Some(new ArmoredOutputStream(outStream)) else None armoredStream.foreach(outStream = _) // Compress the outgoing data if needed val compressedStream = if (compressed != CompressionAlgorithmTags.UNCOMPRESSED) Some(new PGPCompressedDataGenerator(compressed).open(outStream)) else None compressedStream.foreach(outStream = _) try { data.encode(outStream) compressedStream.foreach { _.flush() } armoredStream.foreach { _.flush() } os.flush() } finally { compressedStream.foreach { _.close() } armoredStream.foreach { _.close() } os.close() } } /** Import PGP data. */ def importPGPData(is: InputStream): Stream[AnyRef] = { val streamWithMarkSupported = if (is.markSupported()) is else new BufferedInputStream(is) streamWithMarkSupported.mark(4096) val PGPFactory = try { val compressed = new PGPCompressedData(new BCPGInputStream(PGPUtil.getDecoderStream(streamWithMarkSupported))) new PGPObjectFactory(PGPUtil.getDecoderStream(compressed.getDataStream()), new BcKeyFingerprintCalculator()) } catch { case e: Throwable ⇒ streamWithMarkSupported.reset() new PGPObjectFactory(PGPUtil.getDecoderStream(streamWithMarkSupported), new BcKeyFingerprintCalculator()) } Option(PGPFactory.nextObject()) match { case Some(element) ⇒ Stream.cons(element, Option(PGPFactory.nextObject()).toStream) case None ⇒ Stream.empty } } /** Import PGP public key and close the input stream. */ def importPGPPublicKey(is: InputStream): PGPPublicKey = try { importPGPData(is).find { element ⇒ element.isInstanceOf[PGPPublicKey] || element.isInstanceOf[PGPPublicKeyRing] } match { case Some(publicKey: PGPPublicKey) ⇒ publicKey case Some(publicKeyRing: PGPPublicKeyRing) ⇒ publicKeyRing.getPublicKey() case _ ⇒ throw new NoSuchElementException("PGP public key not found.") } } finally is.close() /** Import PGP public key ring and close the input stream. */ def importPGPPublicKeyRing(is: InputStream): PGPPublicKeyRing = try { importPGPData(is).find { _.isInstanceOf[PGPPublicKeyRing] } match { case Some(publicKeyRing: PGPPublicKeyRing) ⇒ publicKeyRing case _ ⇒ throw new NoSuchElementException("PGP public keyring not found.") } } finally is.close() /** Import PGP private key ring and close the input stream. */ def importPGPSecretKeyRing(is: InputStream): PGPSecretKeyRing = try { importPGPData(is).find { _.isInstanceOf[PGPSecretKeyRing] } match { case Some(privateKeyRing: PGPSecretKeyRing) ⇒ privateKeyRing case _ ⇒ throw new NoSuchElementException("PGP private keyring not found.") } } finally is.close() }
digimead/digi-TABuddy-desktop
part-core-keyring/src/main/scala/org/digimead/tabuddy/desktop/core/keyring/KeyRingExchange.scala
Scala
agpl-3.0
8,026
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.cluster.sdv.generated import org.apache.spark.sql.common.util._ import org.scalatest.BeforeAndAfterAll import org.apache.carbondata.core.constants.CarbonCommonConstants import org.apache.carbondata.core.util.CarbonProperties /** * Test Class for Vector2TestCase to verify all scenerios */ class Vector2TestCase extends QueryTest with BeforeAndAfterAll { //To check select all records with vectorized carbon reader enabled test("Vector2-TC_071", Include) { sql(s"""CREATE TABLE uniqdatavector2 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdatavector2 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select * from uniqdatavector2 """).collect } //To check random measure select query with vectorized carbon reader enabled test("Vector2-TC_072", Include) { sql(s"""select cust_name,DOB,DOJ from uniqdatavector2 where cust_id=10999""").collect sql(s"""drop table uniqdatavector2""").collect } //To check select random columns and order with vectorized carbon reader enabled test("Vector2-TC_073", Include) { sql(s"""create table double1(id double, name string) STORED AS carbondata """).collect sql(s"""load data inpath '$resourcesPath/Data/InsertData/maxrange_double.csv' into table double1""").collect sql(s"""select id from double1 order by id""").collect } //To check the logs of executor with vectorized carbon reader enabled test("Vector2-TC_074", Include) { sql(s"""select id from double1 order by id""").collect } //To check for select random measures with group by and having clause with vectorized carbon reader enabled test("Vector2-TC_075", Include) { sql(s"""select id,count(*) from double1 group by id having count(*)=1""").collect } //To check for select count query with group by and having clause with vectorized carbon reader enabled test("Vector2-TC_076", Include) { sql(s"""select id,count(id) from double1 group by id having count(*)=1""").collect sql(s"""drop table double1""").collect } //To applied cast method with vectorized carbon reader enabled test("Vector2-TC_077", Include) { sql(s"""CREATE TABLE uniqdatavector22 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata""").collect sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdatavector22 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect sql(s"""select cast(Double_COLUMN1 as int) from uniqdatavector22""").collect } //To apply sum method on a column with select query with vectorized carbon reader enabled test("Vector2-TC_078", Include) { sql(s"""select sum(CUST_ID) from uniqdatavector22""").collect } //To apply the average method on a column with select query with vectorized carbon reader enabled test("Vector2-TC_079", Include) { sql(s"""select avg(CUST_ID) from uniqdatavector22""").collect } //To apply the percentile_approx method with vectorized carbon reader enabled test("Vector2-TC_080", Include) { sql(s"""select percentile_approx(1, 0.5 ,500) from uniqdatavector22""").collect } //To apply the var_samp method with vectorized carbon reader enabled test("Vector2-TC_081", Include) { sql(s"""select var_samp(cust_id) from uniqdatavector22""").collect } //To apply the stddev_pop method with vectorized carbon reader enabled test("Vector2-TC_082", Include) { sql(s"""select stddev_pop(cust_id) from uniqdatavector22""").collect } //To apply the stddev_samp method with vectorized carbon reader enabled test("Vector2-TC_083", Include) { sql(s"""select stddev_samp(cust_id) from uniqdatavector22""").collect } //To apply percentile method with vectorized carbon reader enabled test("Vector2-TC_084", Include) { sql(s"""select percentile(0,1) from uniqdatavector22""").collect } //To apply min method with vectorized carbon reader enabled test("Vector2-TC_085", Include) { sql(s"""select min(CUST_ID) from uniqdatavector22""").collect } //To applied max method with vectorized carbon reader enabled test("Vector2-TC_086", Include) { sql(s"""select max(CUST_ID) from uniqdatavector22""").collect } //To apply sum method with plus operator with vectorized carbon reader enabled test("Vector2-TC_087", Include) { sql(s"""select sum(CUST_ID+1) from uniqdatavector22""").collect } //To apply sum method with minus operator with vectorized carbon reader enabled test("Vector2-TC_088", Include) { sql(s"""select sum(CUST_ID-1) from uniqdatavector22""").collect } //To apply count method with distinct operator with vectorized carbon reader enabled test("Vector2-TC_089", Include) { sql(s"""select count(DISTINCT CUST_ID) from uniqdatavector22""").collect } //To check random measure select query with AND operator and vectorized carbon reader enabled test("Vector2-TC_090", Include) { sql(s"""select cust_name,DOB,DOJ from uniqdatavector22 where cust_id=10999 and INTEGER_COLUMN1=2000 """).collect } //To check random measure select query with OR operator and vectorized carbon reader enabled test("Vector2-TC_091", Include) { sql(s"""select cust_name,DOB,DOJ from uniqdatavector22 where cust_id=10999 or INTEGER_COLUMN1=2000 """).collect } //To apply count method with if operator with vectorized carbon reader enabled test("Vector2-TC_092", Include) { sql(s"""select count(if(CUST_ID<1999,NULL,CUST_NAME)) from uniqdatavector22""").collect } //To apply in operator with vectorized carbon reader enabled test("Vector2-TC_093", Include) { sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID IN(1,22)""").collect } //To apply not in operator with vectorized carbon reader enabled test("Vector2-TC_094", Include) { sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID NOT IN(1,22)""").collect } //To apply between operator with vectorized carbon reader enabled test("Vector2-TC_095", Include) { sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID BETWEEN 1 AND 11000""").collect } //To apply not between operator with vectorized carbon reader enabled test("Vector2-TC_096", Include) { sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID NOT BETWEEN 1 AND 11000""").collect } //To apply between in operator with order by clause with vectorized carbon reader enabled test("Vector2-TC_097", Include) { sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID in (1,10999)order by 'CUST_ID'""").collect } //To apply between in operator with group by clause with vectorized carbon reader enabled test("Vector2-TC_098", Include) { sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID in (1,10999) group by CUST_NAME""").collect } //To apply null clause with vectorized carbon reader enabled test("Vector2-TC_099", Include) { sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID is null""").collect } //To applied not null clause with vectorized carbon reader enabled test("Vector2-TC_100", Include) { sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID is not null""").collect } //To apply > operator with vectorized carbon reader enabled test("Vector2-TC_101", Include) { sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID>1""").collect } //To apply < operator with vectorized carbon reader enabled test("Vector2-TC_102", Include) { sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID<1""").collect } //To apply != operator with vectorized carbon reader enabled test("Vector2-TC_103", Include) { sql(s"""select CUST_NAME from uniqdatavector22 where CUST_ID!=1""").collect } //To apply like clause with vectorized carbon reader enabled test("Vector2-TC_104", Include) { sql(s"""select CUST_ID from uniqdatavector22 where CUST_ID like 10999""").collect } //To apply like% clause with vectorized carbon reader enabled test("Vector2-TC_105", Include) { sql(s"""select CUST_ID from uniqdatavector22 where CUST_ID like '%10999%'""").collect } //To apply rlike clause with vectorized carbon reader enabled test("Vector2-TC_106", Include) { sql(s"""select CUST_ID from uniqdatavector22 where CUST_ID rlike 10999""").collect } //To apply rlike% clause with vectorized carbon reader enabled test("Vector2-TC_107", Include) { sql(s"""select CUST_ID from uniqdatavector22 where CUST_ID rlike '%10999'""").collect } //To apply alias clause with vectorized carbon reader enabled test("Vector2-TC_108", Include) { sql(s"""select count(cust_id)+10.364 as a from uniqdatavector22""").collect } //To apply aliase clause with group by clause with vectorized carbon reader enabled test("Vector2-TC_109", Include) { sql(s"""select count(cust_id)+10.364 as a from uniqdatavector22 group by CUST_ID""").collect } //To apply aliase clause with order by clause with vectorized carbon reader enabled test("Vector2-TC_110", Include) { sql(s"""select cust_id,count(cust_name) a from uniqdatavector22 group by cust_id order by cust_id""").collect } //To apply regexp_replace clause with vectorized carbon reader enabled test("Vector2-TC_111", Include) { sql(s"""select regexp_replace(cust_id, 'i', 'ment') from uniqdatavector22""").collect } //To apply date_add method with vectorized carbon reader enabled test("Vector2-TC_118", Include) { sql(s"""SELECT date_add(DOB,1) FROM uniqdatavector22""").collect } //To apply date_sub method with vectorized carbon reader enabled test("Vector2-TC_119", Include) { sql(s"""SELECT date_sub(DOB,1) FROM uniqdatavector22""").collect } //To apply current_date method with vectorized carbon reader enabled test("Vector2-TC_120", Include) { sql(s"""SELECT current_date() FROM uniqdatavector22""").collect } //To apply add_month method with vectorized carbon reader enabled test("Vector2-TC_121", Include) { sql(s"""SELECT add_months(dob,1) FROM uniqdatavector22""").collect } //To apply last_day method with vectorized carbon reader enabled test("Vector2-TC_122", Include) { sql(s"""SELECT last_day(dob) FROM uniqdatavector22""").collect } //To apply next_day method with vectorized carbon reader enabled test("Vector2-TC_123", Include) { sql(s"""SELECT next_day(dob,'monday') FROM uniqdatavector22""").collect } //To apply months_between method on carbon table test("Vector2-TC_124", Include) { sql(s"""select months_between('2016-12-28', '2017-01-30') from uniqdatavector22""").collect } //Toapply date_diff method with vectorized carbon reader enabled test("Vector2-TC_125", Include) { sql(s"""select datediff('2009-03-01', '2009-02-27') from uniqdatavector22""").collect } //To apply concat method with vectorized carbon reader enabled test("Vector2-TC_126", Include) { sql(s"""SELECT concat('hi','hi') FROM uniqdatavector22""").collect } //To apply lower method with vectorized carbon reader enabled test("Vector2-TC_127", Include) { sql(s"""SELECT lower('H') FROM uniqdatavector22""").collect } //To apply substr method with vectorized carbon reader enabled test("Vector2-TC_128", Include) { sql(s"""select substr(cust_id,3) from uniqdatavector22""").collect } //To apply trim method with vectorized carbon reader enabled test("Vector2-TC_129", Include) { sql(s"""select trim(cust_id) from uniqdatavector22""").collect } //To apply split method with vectorized carbon reader enabled test("Vector2-TC_130", Include) { sql(s"""select split('knoldus','ol') from uniqdatavector22""").collect } //To apply split method limit clause with vectorized carbon reader enabled test("Vector2-TC_131", Include) { sql(s"""select split('knoldus','ol') from uniqdatavector22 limit 1""").collect } //To apply reverse on carbon table with vectorized carbon reader enabled test("Vector2-TC_132", Include) { sql(s"""select reverse('knoldus') from uniqdatavector22""").collect } //To apply replace on carbon table with vectorized carbon reader enabled test("Vector2-TC_133", Include) { sql(s"""select regexp_replace('Tester', 'T', 't') from uniqdatavector22""").collect } //To apply replace with limit clause with vectorized carbon reader enabled test("Vector2-TC_134", Include) { sql(s"""select regexp_replace('Tester', 'T', 't') from uniqdatavector22 limit 1""").collect } //To apply FORMAT_STRING on carbon table with vectorized carbon reader enabled test("Vector2-TC_135", Include) { sql(s"""select format_string('data', cust_name) from uniqdatavector22""").collect } //To apply sentences method with vectorized carbon reader enabled test("Vector2-TC_136", Include) { sql(s"""select sentences(cust_name) from uniqdatavector22""").collect } //To apply space method on carbon table with vectorized carbon reader enabled test("Vector2-TC_137", Include) { sql(s"""select space(10) from uniqdatavector22""").collect } //To apply rtrim method with vectorized carbon reader enabled test("Vector2-TC_138", Include) { sql(s"""select rtrim(" testing ") from uniqdatavector22""").collect } //To apply ascii method with vectorized carbon reader enabled test("Vector2-TC_139", Include) { sql(s"""select ascii('A') from uniqdatavector22""").collect } //To apply utc_timestamp method with vectorized carbon reader enabled test("Vector2-TC_140", Include) { sql(s"""select from_utc_timestamp('2016-12-12 08:00:00','PST') from uniqdatavector22""").collect sql(s"""drop table uniqdatavector22""").collect } val prop = CarbonProperties.getInstance() val p1 = prop.getProperty("carbon.enable.vector.reader", CarbonCommonConstants.ENABLE_VECTOR_READER_DEFAULT) override protected def beforeAll() { // Adding new properties prop.addProperty("carbon.enable.vector.reader", "false") } override def afterAll: Unit = { //Reverting to old prop.addProperty("carbon.enable.vector.reader", p1) } }
jackylk/incubator-carbondata
integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/Vector2TestCase.scala
Scala
apache-2.0
16,144
package usbinstall import javafx.beans.property.{SimpleBooleanProperty, SimpleStringProperty} abstract class AbstractStepButton( val visible: Boolean, xdisabled: Boolean, xlabel: String, var onTrigger: () => Boolean ) { def triggered(): Unit val disableProperty = new SimpleBooleanProperty(xdisabled) def disable: Boolean = disableProperty.get def disable_=(v: Boolean): Unit = disableProperty.set(v) val labelProperty = new SimpleStringProperty(xlabel) def label: String = labelProperty.get def label_=(v: String): Unit = labelProperty.set(v) } object NoButton extends AbstractStepButton(false, false, "", () => false) { override def triggered(): Unit = {} } class StepButton( pane: StepPane, f: => Boolean, xlabel: String ) extends AbstractStepButton(true, false, xlabel, () => f) { override def triggered(): Unit = { if (onTrigger()) pane.cancelSubscriptions() } } class PreviousButton(pane: StepPane, f: => Boolean) extends StepButton(pane, f, "Previous") class NextButton(pane: StepPane, f: => Boolean) extends StepButton(pane, f, "Next") class CancelButton(pane: StepPane, f: => Boolean) extends StepButton(pane, f, "Cancel")
suiryc/usbinstall
src/main/scala/usbinstall/StepButton.scala
Scala
gpl-3.0
1,190
/** * Copyright (c) 2016 Intel Corporation  * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * *       http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.trustedanalytics.sparktk.graph.internal.ops import org.scalatest.{ WordSpec, Matchers } import org.trustedanalytics.sparktk.frame.{ Frame, DataTypes, Column, FrameSchema } import org.trustedanalytics.sparktk.frame.internal.rdd.FrameRdd import org.trustedanalytics.sparktk.graph.{ Graph } import org.apache.spark.sql.catalyst.expressions.GenericRow import org.trustedanalytics.sparktk.testutils.TestingSparkContextWordSpec class LoopyBeliefPropagationTest extends TestingSparkContextWordSpec with Matchers { private def buildGraph(): Graph = { val vertices = FrameSchema(List(Column("id", DataTypes.int32), Column("priors", DataTypes.string))) // This graph is a diamond, 1 to 3 and 4, 3 and 4 to 5 val vertexRows = FrameRdd.toRowRDD(vertices, sparkContext.parallelize(List(Array[Any](1, "1.0 0"), Array[Any](3, "1.0 0"), Array[Any](4, "1.0 0"), Array[Any](5, "0 1.0")))) val edges = FrameSchema(List(Column("src", DataTypes.int32), Column("dst", DataTypes.int32), Column("weights", DataTypes.float32))) val edgeRows = FrameRdd.toRowRDD(edges, sparkContext.parallelize( List(Array[Any](1, 3, 1), Array[Any](1, 4, 1), Array[Any](3, 5, 1), Array[Any](4, 5, 1), Array[Any](3, 4, 1)))) val edgeFrame = new Frame(edgeRows, edges) val vertexFrame = new Frame(vertexRows, vertices) val graph = new Graph(vertexFrame, edgeFrame) graph } "loopy belief propgation" when { "called with good inputs" should { "optimize over the priors" in { val graph = buildGraph() val posteriorBelief = graph.loopyBeliefPropagation("priors", "weights") posteriorBelief.schema.columns should equal(List(Column("id", DataTypes.int32), Column("priors", DataTypes.string), Column("posterior", DataTypes.string))) posteriorBelief.rdd.toArray.toList should equal(List( new GenericRow(Array[Any](1, "1.0 0", "[1.0,0.0]")), new GenericRow(Array[Any](3, "1.0 0", "[1.0,0.0]")), new GenericRow(Array[Any](5, "0 1.0", "[0.0,1.0]")), new GenericRow(Array[Any](4, "1.0 0", "[1.0,0.0]")))) } } "called with invalid prior" should { "throw an exception" in { val graph = buildGraph() val thrown = the[IllegalArgumentException] thrownBy graph.loopyBeliefPropagation("invalid_prior", "weights") thrown.getMessage should equal("requirement failed: Property invalid_prior not found for prior") } } "called with an invalid weight" should { "throw an exception" in { val graph = buildGraph() val thrown = the[IllegalArgumentException] thrownBy graph.loopyBeliefPropagation("prior", "invalid_prior") thrown.getMessage should equal("requirement failed: Property invalid_prior not found for edge weight") } } "called with invalid iterations" should { "throw an exception" in { val graph = buildGraph() val thrown = the[IllegalArgumentException] thrownBy graph.loopyBeliefPropagation("prior", "invalid_prior", -3) thrown.getMessage should equal("requirement failed: maxIterations must be greater than 0") } } } }
aayushidwivedi01/spark-tk
sparktk-core/src/test/scala/org/trustedanalytics/sparktk/graph/internal/ops/LoopyBeliefPropagationTest.scala
Scala
apache-2.0
3,883
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.dllib.utils.tf.loaders import com.intel.analytics.bigdl.dllib.tensor.Tensor import com.intel.analytics.bigdl.dllib.utils.T import com.intel.analytics.bigdl.dllib.utils.serializer.ModuleSerializationTest import scala.util.Random class ConcatV2LoadTFSerialTest extends ModuleSerializationTest { override def test(): Unit = { val concatv2 = new ConcatV2LoadTF[Float]().setName("concatv2LoadTF") val input = T(Tensor[Float](1, 2).apply1(_ => Random.nextFloat()), Tensor[Float](1, 2).apply1(_ => Random.nextFloat()), Tensor[Int](T(1))) runSerializationTest(concatv2, input) } }
intel-analytics/BigDL
scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/utils/tf/loaders/ConcatV2LoadTFSerialTest.scala
Scala
apache-2.0
1,241
package franka.lang sealed trait Cardinality object Cardinality { case class Finite (value : BigInt) extends Cardinality case class Infinite (aleph : Int) extends Cardinality }
bylt/franka
src/main/scala/franka/lang/Cardinality.scala
Scala
mit
189
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.controller import java.util.Properties import java.util.concurrent.CountDownLatch import java.util.concurrent.atomic.AtomicReference import kafka.integration.KafkaServerTestHarness import kafka.server.KafkaConfig import kafka.utils._ import org.apache.kafka.common.TopicPartition import org.apache.kafka.common.metrics.Metrics import org.apache.log4j.Logger import org.junit.{After, Test} import org.junit.Assert._ import org.scalatest.Assertions.fail class ControllerFailoverTest extends KafkaServerTestHarness with Logging { val log = Logger.getLogger(classOf[ControllerFailoverTest]) val numNodes = 2 val numParts = 1 val msgQueueSize = 1 val topic = "topic1" val overridingProps = new Properties() val metrics = new Metrics() overridingProps.put(KafkaConfig.NumPartitionsProp, numParts.toString) override def generateConfigs = TestUtils.createBrokerConfigs(numNodes, zkConnect) .map(KafkaConfig.fromProps(_, overridingProps)) @After override def tearDown() { super.tearDown() this.metrics.close() } /** * See @link{https://issues.apache.org/jira/browse/KAFKA-2300} * for the background of this test case */ @Test def testHandleIllegalStateException() { val initialController = servers.find(_.kafkaController.isActive).map(_.kafkaController).getOrElse { fail("Could not find controller") } val initialEpoch = initialController.epoch // Create topic with one partition createTopic(topic, 1, 1) val topicPartition = new TopicPartition("topic1", 0) TestUtils.waitUntilTrue(() => initialController.controllerContext.partitionsInState(OnlinePartition).contains(topicPartition), s"Partition $topicPartition did not transition to online state") // Wait until we have verified that we have resigned val latch = new CountDownLatch(1) val exceptionThrown = new AtomicReference[Throwable]() val illegalStateEvent = new MockEvent(ControllerState.BrokerChange) { override def process(): Unit = { try initialController.handleIllegalState(new IllegalStateException("Thrown for test purposes")) catch { case t: Throwable => exceptionThrown.set(t) } latch.await() } } initialController.eventManager.put(illegalStateEvent) // Check that we have shutdown the scheduler (via onControllerResigned) TestUtils.waitUntilTrue(() => !initialController.kafkaScheduler.isStarted, "Scheduler was not shutdown") TestUtils.waitUntilTrue(() => !initialController.isActive, "Controller did not become inactive") latch.countDown() TestUtils.waitUntilTrue(() => Option(exceptionThrown.get()).isDefined, "handleIllegalState did not throw an exception") assertTrue(s"handleIllegalState should throw an IllegalStateException, but $exceptionThrown was thrown", exceptionThrown.get.isInstanceOf[IllegalStateException]) TestUtils.waitUntilTrue(() => { servers.exists { server => server.kafkaController.isActive && server.kafkaController.epoch > initialEpoch } }, "Failed to find controller") } }
KevinLiLu/kafka
core/src/test/scala/unit/kafka/controller/ControllerFailoverTest.scala
Scala
apache-2.0
3,919
package com.velorin import org.scalatest._ abstract class UnitSpec extends FlatSpec with Matchers with OptionValues with Inside with Inspectors
AmirHooshangi/ACMG-S
src/test/scala/com/velorin/UnitSpec.scala
Scala
gpl-3.0
148
package ui.shader.builder.value import org.scalajs.dom import ui.shader.builder.types.GlFloatType abstract class GlFloatVal extends GlValue[GlFloatType] { } object GlFloatVal { def apply(double: Double): GlValue[GlFloatType] = { new GlFloatValD(double) } def apply(float: Float): GlValue[GlFloatType] = { new GlFloatValF(float) } def apply(varName: String): GlValue[GlFloatType] = { new GlFloatValVar(varName) } implicit def floatToVal(float: Float): GlValue[GlFloatType] = { new GlFloatValF(float) } } class GlFloatValD(double: Double) extends GlValue[GlFloatType] { override def toGlsl: String = { val formatted = f"$double%8.12f" val dotPos = formatted.indexOf('.') val zeros = formatted.reverse.takeWhile(_ == '0').length formatted.substring(0, math.max(dotPos + 2, formatted.length - zeros)) } } class GlFloatValF(float: Float) extends GlValue[GlFloatType] { override def toGlsl: String = { val formatted = f"$float%8.12f" val dotPos = formatted.indexOf('.') val zeros = formatted.reverse.takeWhile(_ == '0').length formatted.substring(0, math.max(dotPos + 2, formatted.length - zeros)) } } class GlFloatValVar(name: String) extends GlValue[GlFloatType] { override def toGlsl: String = { name } }
gvatn/play-scalajs-webgl-spark
client/src/main/scala/ui/shader/builder/value/GlFloatVal.scala
Scala
mit
1,342
package htwg.scalmon.model import org.scalatest._ import scala.language.reflectiveCalls import htwg.scalmon.Listener class ModelSpec extends FlatSpec with Matchers with GivenWhenThen { def fixture = new { val m1 = new Model(1) val m5 = new Model(5) } "A Model" should "have a game size for scaling" in { fixture.m1.gameSize should be(1) fixture.m5.gameSize should be(5) } it should "have players" in { Given("a new model") Then("no players should be set") fixture.m1.playerA should be(null) fixture.m1.playerB should be(null) fixture.m5.playerA should be(null) fixture.m5.playerB should be(null) When("player A and B are set") val m = fixture.m1 val a1 = new Animal("Animal1") val a2 = new Animal("Animal2") m.playerA = new Player("A", Array(a1)) m.playerB = new Player("B", Array(a2)) m.playerA should not equal null m.playerB should not equal null Then("resetAnimals should reset all animals of both players") a1.healthPoints = 0 a2.healthPoints = 0 m.resetAnimals a1.healthPoints should be(a1.initHealthPoints) a2.healthPoints should be(a2.initHealthPoints) } it should "have a state" in { Given("a newly created model") val m = fixture.m1 Then("its state should be Init") m.state should be(Init()) When("the state is changed") m.state = RunRound(13, List()) Then("the model should return the new state") m.state should be(RunRound(13, List())) } it should "notify its attached listeners" in { Given("new listeners which are not attached to the model") val ls = new { val l1, l2 = new Listener { var count = 0 var lastInfo: Option[AbilityInfo] = None def update(info: Option[AbilityInfo]) = { count += 1; lastInfo = info } } def count = (l1.count, l2.count) } val m = fixture.m1 m.notifyListeners() Then("they should not be notified") ls.count should be(0, 0) When("a listener is added to the model") m.addListener(ls.l1) ls.count should be(0, 0) Then("it should be notified correctly") m.notifyListeners() ls.count should be(1, 0) When("multiple listeners are added") m.addListener(ls.l2) Then("all of them should be notified") m.notifyListeners() ls.count should be(2, 1) ls.l1.lastInfo should be(None) ls.l2.lastInfo should be(None) When("an ability info is included into the notification") m.notifyListeners(Option(AttackInfo(null, null, 100))) Then("the listener should have this info received") ls.count should be(3, 2) ls.l1.lastInfo shouldBe a[Some[AttackInfo]] ls.l2.lastInfo shouldBe a[Some[AttackInfo]] When("a listener is removed") m.removeListener(ls.l1) Then("it should not be notified anymore") m.notifyListeners() ls.count should be(3, 3) } }
themerius/scalmon
src/test/scala/htwg/scalmon/model/ModelSpec.scala
Scala
lgpl-3.0
2,898
package domala.jdbc.builder import java.util.function.{Function, Supplier} import java.util.stream import domala.internal.jdbc.scalar.Scalars import domala.internal.{OptionConverters, WrapIterator} import domala.jdbc.entity.EntityDesc import domala.jdbc.query.SqlSelectQuery import domala.jdbc.{Config, SelectOptions, SqlLogType} import org.seasar.doma.internal.jdbc.command._ import org.seasar.doma.internal.jdbc.scalar.{Scalar, ScalarException} import org.seasar.doma.jdbc.command.{ResultSetHandler, SelectCommand} import org.seasar.doma.jdbc.{ClassHelper, Sql} import org.seasar.doma.message.Message import org.seasar.doma.{DomaIllegalArgumentException, DomaNullPointerException, FetchType, MapKeyNamingType} import scala.collection.JavaConverters._ import scala.language.experimental.macros import scala.reflect._ class SelectBuilder( val config: Config, val helper: BuildingHelper = new BuildingHelper(), query: SqlSelectQuery = new SqlSelectQuery, paramIndex: ParamIndex = new ParamIndex()) { implicit val classHelper: ClassHelper = config.getClassHelper query.setConfig(config) query.setCallerClassName(getClass.getName) query.setFetchType(FetchType.LAZY) query.setSqlLogType(SqlLogType.FORMATTED) def sql(sql: String): SelectBuilder = { if (sql == null) throw new DomaNullPointerException("sql") helper.appendSqlWithLineSeparator(sql) new SubsequentSelectBuilder(config, helper, query, paramIndex) } def removeLast(): SelectBuilder = { helper.removeLast() new SubsequentSelectBuilder(config, helper, query, paramIndex) } def param[P](paramClass: Class[P], param: P): SelectBuilder = { if (paramClass == null) throw new DomaNullPointerException("paramClass") appendParam(paramClass, param, false) } def params[E](elementClass: Class[E], params: Iterable[E]): SelectBuilder = { if (elementClass == null) throw new DomaNullPointerException("elementClass") if (params == null) throw new DomaNullPointerException("params") appendParams(elementClass, params, false) } def literal[P](paramClass: Class[P], param: P): SelectBuilder = { if (paramClass == null) throw new DomaNullPointerException("paramClass") appendParam(paramClass, param, true) } def literals[E](elementClass: Class[E], params: Seq[E]): SelectBuilder = { if (elementClass == null) throw new DomaNullPointerException("elementClass") if (params == null) throw new DomaNullPointerException("params") appendParams(elementClass, params, true) } private def appendParam[P](paramClass: Class[P], param: P, literal: Boolean) = { helper.appendParam(new Param(paramClass, param, paramIndex, literal)) paramIndex.increment() new SubsequentSelectBuilder(config, helper, query, paramIndex) } private def appendParams[E](elementClass: Class[E], params: Iterable[E], literal: Boolean) = { var builder = this var index = 0 for (param <- params) { builder = builder.appendParam(elementClass, param, literal).sql(", ") index += 1 } if (index == 0) builder = builder.sql("null") else builder = builder.removeLast() builder } def getEntitySingleResult[RESULT](implicit entityDesc: EntityDesc[RESULT]): RESULT = { if (query.getMethodName == null) query.setCallerMethodName("getEntitySingleResult") query.setEntityType(entityDesc) val handler = new EntitySingleResultHandler[RESULT](entityDesc) execute(handler) } def getOptionEntitySingleResult[RESULT](implicit entityDesc: EntityDesc[RESULT]): Option[RESULT] = { if (query.getMethodName == null) query.setCallerMethodName("getOptionalEntitySingleResult") query.setEntityType(entityDesc) val handler = new OptionalEntitySingleResultHandler[RESULT](entityDesc) OptionConverters.asScala(execute(handler)) } @SuppressWarnings(Array("unchecked", "rawtypes")) def getScalarSingleResult[RESULT](implicit cTag: ClassTag[RESULT]): RESULT = { if (query.getMethodName == null) query.setCallerMethodName("getScalarSingleResult") val supplier = createScalarSupplier("resultClass", cTag.runtimeClass, false).asInstanceOf[Supplier[Scalar[Any, RESULT]]] val handler = new ScalarSingleResultHandler(supplier) execute(handler) } @SuppressWarnings(Array("unchecked", "rawtypes")) def getOptionScalarSingleResult[RESULT](implicit cTag: ClassTag[RESULT]): Option[RESULT] = { if (query.getMethodName == null) query.setCallerMethodName("getOptionScalarSingleResult") val supplier = createScalarSupplier("resultClass", cTag.runtimeClass, true).asInstanceOf[Supplier[Scalar[Any, RESULT]]] val handler = new ScalarSingleResultHandler(supplier) execute(handler).asInstanceOf[Option[RESULT]] } def getMapSingleResult(mapKeyNamingType: MapKeyNamingType = MapKeyNamingType.NONE): Map[String, AnyRef] = { if (mapKeyNamingType == null) throw new DomaNullPointerException("mapKeyNamingType") if (query.getMethodName == null) query.setCallerMethodName("getMapSingleResult") val handler = new MapSingleResultHandler(mapKeyNamingType) Option(execute(handler)).map(_.asScala.toMap).orNull } def getOptionMapSingleResult( mapKeyNamingType: MapKeyNamingType = MapKeyNamingType.NONE): Option[Map[String, AnyRef]] = { if (mapKeyNamingType == null) throw new DomaNullPointerException("mapKeyNamingType") if (query.getMethodName == null) query.setCallerMethodName("getOptionMapSingleResult") val handler = new OptionalMapSingleResultHandler(mapKeyNamingType) OptionConverters.asScala(execute(handler)).map(_.asScala.toMap) } def getEntityResultSeq[ELEMENT](implicit entityDesc: EntityDesc[ELEMENT]): Seq[ELEMENT] = { if (query.getMethodName == null) query.setCallerMethodName("getEntityResultSeq") query.setEntityType(entityDesc) val handler = new EntityResultListHandler[ELEMENT](entityDesc) execute(handler).asScala } @SuppressWarnings(Array("unchecked", "rawtypes")) def getScalarResultSeq[ELEMENT]( implicit cTag: ClassTag[ELEMENT]): Seq[ELEMENT] = { if (query.getMethodName == null) query.setCallerMethodName("getScalarResultSeq") val supplier = createScalarSupplier("elementClass", cTag.runtimeClass, false).asInstanceOf[Supplier[Scalar[Any, Any]]] val handler = new ScalarResultListHandler(supplier) execute(handler).asScala.asInstanceOf[Seq[ELEMENT]] } @SuppressWarnings(Array("unchecked", "rawtypes")) def getOptionalScalarResultSeq[ELEMENT](cTag: ClassTag[ELEMENT]): Seq[Option[ELEMENT]] = { if (query.getMethodName == null) query.setCallerMethodName("getOptionalScalarResultSeq") val supplier = createScalarSupplier("elementClass", cTag.runtimeClass, true).asInstanceOf[Supplier[Scalar[Any, Any]]] val handler = new ScalarResultListHandler(supplier) execute(handler).asScala.map(x => Option(x.asInstanceOf[ELEMENT])) } def getMapResultSeq(mapKeyNamingType: MapKeyNamingType = MapKeyNamingType.NONE): Seq[Map[String, AnyRef]] = { if (mapKeyNamingType == null) throw new DomaNullPointerException("mapKeyNamingType") if (query.getMethodName == null) query.setCallerMethodName("getMapResultSeq") val handler = new MapResultListHandler(mapKeyNamingType) execute(handler).asScala.map(_.asScala.toMap) } def iteratorEntity[TARGET, RESULT]( mapper: Iterator[TARGET] => RESULT)(implicit entityDesc: EntityDesc[TARGET]): RESULT = { if (mapper == null) throw new DomaNullPointerException("mapper") iteratorEntityInternal(mapper) } protected def iteratorEntityInternal[TARGET, RESULT]( mapper: Iterator[TARGET] => RESULT)(implicit entityDesc: EntityDesc[TARGET]): RESULT = { if (query.getMethodName == null) query.setCallerMethodName("iteratorEntity") query.setEntityType(entityDesc) val handler = new EntityStreamHandler(entityDesc, (p: java.util.stream.Stream[TARGET]) => mapper(WrapIterator.of(p))) execute(handler) } def iteratorScalar[RESULT, TARGET]( mapper: Iterator[TARGET] => RESULT)(implicit cTag: ClassTag[TARGET]): RESULT = { if (mapper == null) throw new DomaNullPointerException("mapper") iteratorScalarInternal(cTag, mapper) } @SuppressWarnings(Array("unchecked", "rawtypes")) protected def iteratorScalarInternal[ RESULT, TARGET](cTag: ClassTag[TARGET], mapper: Iterator[TARGET] => RESULT): RESULT = { if (query.getMethodName == null) query.setCallerMethodName("iteratorScalar") val supplier = createScalarSupplier("targetClass", cTag.runtimeClass, false).asInstanceOf[Supplier[Scalar[Any, TARGET]]] val handler = new ScalarStreamHandler(supplier, (p: java.util.stream.Stream[TARGET]) => mapper(WrapIterator.of(p))) execute(handler) } def iteratorOptionalScalar[RESULT, TARGET]( mapper: Iterator[Option[TARGET]] => RESULT)(implicit cTag: ClassTag[TARGET]): RESULT = { if (mapper == null) throw new DomaNullPointerException("mapper") iteratorOptionalScalarInternal(cTag, mapper) } @SuppressWarnings(Array("unchecked", "rawtypes")) protected def iteratorOptionalScalarInternal[RESULT, TARGET](cTag: ClassTag[TARGET], mapper: Iterator[Option[TARGET]] => RESULT): RESULT = { if (query.getMethodName == null) query.setCallerMethodName("iteratorOptionalScalar") val supplier: Supplier[Scalar[TARGET, TARGET]] = createScalarSupplier("targetClass", cTag.runtimeClass, true).asInstanceOf[Supplier[Scalar[TARGET, TARGET]]] val bridgeMapper: Function[stream.Stream[TARGET], RESULT] = (p: java.util.stream.Stream[TARGET]) => mapper(WrapIterator.of(p).map(x => Option(x))) val handler = new ScalarStreamHandler[TARGET, TARGET, RESULT](supplier, bridgeMapper) execute(handler) } def iteratorMap[RESULT]( mapper: Iterator[Map[String, AnyRef]] => RESULT, mapKeyNamingType: MapKeyNamingType = MapKeyNamingType.NONE): RESULT = { if (mapKeyNamingType == null) throw new DomaNullPointerException("mapKeyNamingType") if (mapper == null) throw new DomaNullPointerException("mapper") iteratorMapInternal(mapper, mapKeyNamingType) } protected def iteratorMapInternal[RESULT]( mapper: Iterator[Map[String, AnyRef]] => RESULT, mapKeyNamingType: MapKeyNamingType = MapKeyNamingType.NONE ): RESULT = { if (query.getMethodName == null) query.setCallerMethodName("iteratorMap") val handler = new MapStreamHandler[RESULT](mapKeyNamingType, (p: java.util.stream.Stream[java.util.Map[String, Object]]) => mapper(WrapIterator.of(p).map(_.asScala.toMap))) execute(handler) } def execute[RESULT](resultSetHandler: ResultSetHandler[RESULT]): RESULT = { prepare() val command = new SelectCommand[RESULT](query, resultSetHandler) val result = command.execute query.complete() result } private def prepare(): Unit = { query.clearParameters() helper.getParams.foreach{ p => query.addParameter(p.name, p.paramClass, p.param) } query.setSqlNode(helper.getSqlNode) query.prepare() } def ensureResult(ensureResult: Boolean): Unit = { query.setResultEnsured(ensureResult) } def ensureResultMapping(ensureResultMapping: Boolean): Unit = { query.setResultMappingEnsured(ensureResultMapping) } def fetch(fetchType: FetchType): Unit = { query.setFetchType(fetchType) } def fetchSize(fetchSize: Int): Unit = { query.setFetchSize(fetchSize) } def maxRows(maxRows: Int): Unit = { query.setMaxRows(maxRows) } def queryTimeout(queryTimeout: Int): Unit = { query.setQueryTimeout(queryTimeout) } def sqlLogType(sqlLogType: SqlLogType): Unit = { if (sqlLogType == null) throw new DomaNullPointerException("sqlLogType") query.setSqlLogType(sqlLogType) } def callerClassName(className: String): Unit = { if (className == null) throw new DomaNullPointerException("className") query.setCallerClassName(className) } def callerMethodName(methodName: String): Unit = { if (methodName == null) throw new DomaNullPointerException("methodName") query.setCallerMethodName(methodName) } def options(options: SelectOptions): Unit = { if (options == null) throw new DomaNullPointerException("options") query.setOptions(options) } def getSql: Sql[_] = { if (query.getMethodName == null) query.setCallerMethodName("getSql") prepare() query.getSql } def createScalarSupplier(parameterName: String, clazz: Class[_], optional: Boolean): Supplier[Scalar[_, _]] = try Scalars.wrap(null, clazz, optional, config.getClassHelper) catch { case e: ScalarException => throw new DomaIllegalArgumentException( parameterName, Message.DOMA2204.getMessage(clazz, e)) } } private class SubsequentSelectBuilder ( config: Config, builder: BuildingHelper, query: SqlSelectQuery, paramIndex: ParamIndex) extends SelectBuilder(config, builder, query, paramIndex) { override def sql(fragment: String): SelectBuilder = { helper.appendSql(fragment) this } } object SelectBuilder { def newInstance(config: Config): SelectBuilder = { if (config == null) throw new DomaNullPointerException("config") new SelectBuilder(config) } }
bakenezumi/domala
core/src/main/scala/domala/jdbc/builder/SelectBuilder.scala
Scala
apache-2.0
13,460
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.io import java.io._ import com.ning.compress.lzf.{LZFInputStream, LZFOutputStream} import net.jpountz.lz4.LZ4BlockOutputStream import org.xerial.snappy.{Snappy, SnappyInputStream, SnappyOutputStream} import org.apache.spark.SparkConf import org.apache.spark.annotation.DeveloperApi import org.apache.spark.util.Utils /** * :: DeveloperApi :: * CompressionCodec allows the customization of choosing different compression implementations * to be used in block storage. * * @note The wire protocol for a codec is not guaranteed compatible across versions of Spark. * This is intended for use as an internal compression utility within a single Spark application. */ @DeveloperApi trait CompressionCodec { def compressedOutputStream(s: OutputStream): OutputStream def compressedInputStream(s: InputStream): InputStream } private[spark] object CompressionCodec { private val configKey = "spark.io.compression.codec" private[spark] def supportsConcatenationOfSerializedStreams(codec: CompressionCodec): Boolean = { (codec.isInstanceOf[SnappyCompressionCodec] || codec.isInstanceOf[LZFCompressionCodec] || codec.isInstanceOf[LZ4CompressionCodec]) } private val shortCompressionCodecNames = Map( "lz4" -> classOf[LZ4CompressionCodec].getName, "lzf" -> classOf[LZFCompressionCodec].getName, "snappy" -> classOf[SnappyCompressionCodec].getName) def getCodecName(conf: SparkConf): String = { conf.get(configKey, DEFAULT_COMPRESSION_CODEC) } def createCodec(conf: SparkConf): CompressionCodec = { createCodec(conf, getCodecName(conf)) } def createCodec(conf: SparkConf, codecName: String): CompressionCodec = { val codecClass = shortCompressionCodecNames.getOrElse(codecName.toLowerCase, codecName) val codec = try { val ctor = Utils.classForName(codecClass).getConstructor(classOf[SparkConf]) Some(ctor.newInstance(conf).asInstanceOf[CompressionCodec]) } catch { case e: ClassNotFoundException => None case e: IllegalArgumentException => None } codec.getOrElse(throw new IllegalArgumentException(s"Codec [$codecName] is not available. " + s"Consider setting $configKey=$FALLBACK_COMPRESSION_CODEC")) } /** * Return the short version of the given codec name. * If it is already a short name, just return it. */ def getShortName(codecName: String): String = { if (shortCompressionCodecNames.contains(codecName)) { codecName } else { shortCompressionCodecNames .collectFirst { case (k, v) if v == codecName => k } .getOrElse { throw new IllegalArgumentException(s"No short name for codec $codecName.") } } } val FALLBACK_COMPRESSION_CODEC = "snappy" val DEFAULT_COMPRESSION_CODEC = "lz4" val ALL_COMPRESSION_CODECS = shortCompressionCodecNames.values.toSeq } /** * :: DeveloperApi :: * LZ4 implementation of [[org.apache.spark.io.CompressionCodec]]. * Block size can be configured by `spark.io.compression.lz4.blockSize`. * * @note The wire protocol for this codec is not guaranteed to be compatible across versions * of Spark. This is intended for use as an internal compression utility within a single Spark * application. */ @DeveloperApi class LZ4CompressionCodec(conf: SparkConf) extends CompressionCodec { override def compressedOutputStream(s: OutputStream): OutputStream = { val blockSize = conf.getSizeAsBytes("spark.io.compression.lz4.blockSize", "32k").toInt new LZ4BlockOutputStream(s, blockSize) } override def compressedInputStream(s: InputStream): InputStream = new LZ4BlockInputStream(s) } /** * :: DeveloperApi :: * LZF implementation of [[org.apache.spark.io.CompressionCodec]]. * * @note The wire protocol for this codec is not guaranteed to be compatible across versions * of Spark. This is intended for use as an internal compression utility within a single Spark * application. */ @DeveloperApi class LZFCompressionCodec(conf: SparkConf) extends CompressionCodec { override def compressedOutputStream(s: OutputStream): OutputStream = { new LZFOutputStream(s).setFinishBlockOnFlush(true) } override def compressedInputStream(s: InputStream): InputStream = new LZFInputStream(s) } /** * :: DeveloperApi :: * Snappy implementation of [[org.apache.spark.io.CompressionCodec]]. * Block size can be configured by `spark.io.compression.snappy.blockSize`. * * @note The wire protocol for this codec is not guaranteed to be compatible across versions * of Spark. This is intended for use as an internal compression utility within a single Spark * application. */ @DeveloperApi class SnappyCompressionCodec(conf: SparkConf) extends CompressionCodec { val version = SnappyCompressionCodec.version override def compressedOutputStream(s: OutputStream): OutputStream = { val blockSize = conf.getSizeAsBytes("spark.io.compression.snappy.blockSize", "32k").toInt new SnappyOutputStreamWrapper(new SnappyOutputStream(s, blockSize)) } override def compressedInputStream(s: InputStream): InputStream = new SnappyInputStream(s) } /** * Object guards against memory leak bug in snappy-java library: * (https://github.com/xerial/snappy-java/issues/131). * Before a new version of the library, we only call the method once and cache the result. */ private final object SnappyCompressionCodec { private lazy val version: String = try { Snappy.getNativeLibraryVersion } catch { case e: Error => throw new IllegalArgumentException(e) } } /** * Wrapper over `SnappyOutputStream` which guards against write-after-close and double-close * issues. See SPARK-7660 for more details. This wrapping can be removed if we upgrade to a version * of snappy-java that contains the fix for https://github.com/xerial/snappy-java/issues/107. */ private final class SnappyOutputStreamWrapper(os: SnappyOutputStream) extends OutputStream { private[this] var closed: Boolean = false override def write(b: Int): Unit = { if (closed) { throw new IOException("Stream is closed") } os.write(b) } override def write(b: Array[Byte]): Unit = { if (closed) { throw new IOException("Stream is closed") } os.write(b) } override def write(b: Array[Byte], off: Int, len: Int): Unit = { if (closed) { throw new IOException("Stream is closed") } os.write(b, off, len) } override def flush(): Unit = { if (closed) { throw new IOException("Stream is closed") } os.flush() } override def close(): Unit = { if (!closed) { closed = true os.close() } } }
sh-cho/cshSpark
io/CompressionCodec.scala
Scala
apache-2.0
7,429
package com.whisk.docker.testkit.scalatest import java.util.concurrent.ForkJoinPool import com.spotify.docker.client.{DefaultDockerClient, DockerClient} import com.whisk.docker.testkit._ import org.scalatest.{Args, Status, Suite, SuiteMixin} import scala.concurrent.ExecutionContext import scala.language.implicitConversions trait DockerTestKitForAll extends SuiteMixin { self: Suite => val dockerClient: DockerClient = DefaultDockerClient.fromEnv().build() val dockerExecutionContext: ExecutionContext = ExecutionContext.fromExecutor(new ForkJoinPool()) val managedContainers: ManagedContainers val dockerTestTimeouts: DockerTestTimeouts = DockerTestTimeouts.Default implicit lazy val dockerExecutor: ContainerCommandExecutor = new ContainerCommandExecutor(dockerClient) lazy val containerManager = new DockerContainerManager( managedContainers, dockerExecutor, dockerTestTimeouts, dockerExecutionContext ) abstract override def run(testName: Option[String], args: Args): Status = { containerManager.start() afterStart() try { super.run(testName, args) } finally { try { beforeStop() } finally { containerManager.stop() } } } def afterStart(): Unit = {} def beforeStop(): Unit = {} }
whisklabs/docker-it-scala
scalatest/src/main/scala/com/whisk/docker/testkit/scalatest/DockerTestKitForAll.scala
Scala
mit
1,301
/* * Copyright 2013 - 2015, Daniel Krzywicki <daniel.krzywicki@agh.edu.pl> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package pl.edu.agh.scalamas.emas import pl.edu.agh.scalamas.genetic.GeneticOps import pl.edu.agh.scalamas.mas.LogicTypes import pl.edu.agh.scalamas.mas.LogicTypes.{Population, Behaviour} /** * Holder for the types of agents and behaviours used in EMAS. */ object EmasTypes { case class Agent[G <: GeneticOps[G]](val solution: G#Solution, val fitness: G#Evaluation, var energy: Int) extends LogicTypes.Agent case class Death(capacity: Int) extends Behaviour case class Fight(capacity: Int) extends Behaviour case class Reproduction(capacity: Int) extends Behaviour def checked[G <: GeneticOps[G]](pop: Population) = pop.collect { case a: EmasTypes.Agent[G] => a} }
ros3n/IntOb
emas/src/main/scala/pl/edu/agh/scalamas/emas/EmasTypes.scala
Scala
mit
1,838
/* * The MIT License * * Copyright (c) 2016 Fulcrum Genomics LLC * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package dagr.cmdline import dagr.core.cmdline.DagrCoreMain object DagrMain { /** The packages we wish to include in our command line **/ protected def getPackageList: List[String] = List[String]("dagr") /** The main method */ def main(args: Array[String]): Unit = { System.exit(new DagrCoreMain().makeItSo(args, packageList = getPackageList)) } }
fulcrumgenomics/dagr
src/main/scala/dagr/cmdline/DagrMain.scala
Scala
mit
1,512
package org.jetbrains.plugins.scala.lang.psi import org.jetbrains.plugins.scala.lang.psi.types.recursiveUpdate.ScSubstitutor import org.jetbrains.plugins.scala.lang.resolve.ScalaResolveResult /** * @author Nikolay.Tropin */ package object implicits { type Candidate = (ScalaResolveResult, ScSubstitutor) }
jastice/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/implicits/package.scala
Scala
apache-2.0
314
package tmvault.util import org.junit._ class SHA1HashTest { @Test def toStringParseRoundtripTest() : Unit = { val testData = (0 until 1000).map(_.toString.getBytes("UTF-8")) for(bytes <- testData) { val hash0 = SHA1Hash.hash(bytes) val text0 = hash0.toString val hash1 = SHA1Hash.parse(text0) assert(hash0 == hash1) } } }
rklaehn/tmvault
tmvault/src/test/scala/tmvault/util/SHA1HashTest.scala
Scala
apache-2.0
369
package spark import java.util.concurrent.LinkedBlockingQueue import java.util.concurrent.TimeUnit import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet, Map} // A task created by the DAG scheduler. Knows its stage ID and map ouput tracker generation. abstract class DAGTask[T](val stageId: Int) extends Task[T] { val gen = SparkEnv.get.mapOutputTracker.getGeneration override def generation: Option[Long] = Some(gen) } // A completion event passed by the underlying task scheduler to the DAG scheduler case class CompletionEvent(task: DAGTask[_], reason: TaskEndReason, result: Any, accumUpdates: Map[Long, Any]) // Various possible reasons why a DAG task ended. The underlying scheduler is supposed // to retry tasks several times for "ephemeral" failures, and only report back failures // that require some old stages to be resubmitted, such as shuffle map fetch failures. sealed trait TaskEndReason case object Success extends TaskEndReason case class FetchFailed(serverUri: String, shuffleId: Int, mapId: Int, reduceId: Int) extends TaskEndReason case class OtherFailure(message: String) extends TaskEndReason /** * A Scheduler subclass that implements stage-oriented scheduling. It computes * a DAG of stages for each job, keeps track of which RDDs and stage outputs * are materialized, and computes a minimal schedule to run the job. Subclasses * only need to implement the code to send a task to the cluster and to report * fetch failures (the submitTasks method, and code to add CompletionEvents). */ private trait DAGScheduler extends Scheduler with Logging { // Must be implemented by subclasses to start running a set of tasks def submitTasks(tasks: Seq[Task[_]]): Unit // Must be called by subclasses to report task completions or failures def taskEnded(task: Task[_], reason: TaskEndReason, result: Any, accumUpdates: Map[Long, Any]) { val dagTask = task.asInstanceOf[DAGTask[_]] completionEvents.put(CompletionEvent(dagTask, reason, result, accumUpdates)) } // The time, in millis, to wait for fetch failure events to stop coming in after // one is detected; this is a simplistic way to avoid resubmitting tasks in the // non-fetchable map stage one by one as more failure events come in val RESUBMIT_TIMEOUT = 2000L // The time, in millis, to wake up between polls of the completion queue // in order to potentially resubmit failed stages val POLL_TIMEOUT = 500L private val completionEvents = new LinkedBlockingQueue[CompletionEvent] var nextStageId = 0 def newStageId() = { var res = nextStageId nextStageId += 1 res } val idToStage = new HashMap[Int, Stage] val shuffleToMapStage = new HashMap[Int, Stage] var cacheLocs = new HashMap[Int, Array[List[String]]] val cacheTracker = SparkEnv.get.cacheTracker val mapOutputTracker = SparkEnv.get.mapOutputTracker def getCacheLocs(rdd: RDD[_]): Array[List[String]] = { cacheLocs(rdd.id) } def updateCacheLocs() { cacheLocs = cacheTracker.getLocationsSnapshot() } def getShuffleMapStage(shuf: ShuffleDependency[_,_,_]): Stage = { shuffleToMapStage.get(shuf.shuffleId) match { case Some(stage) => stage case None => val stage = newStage(shuf.rdd, Some(shuf)) shuffleToMapStage(shuf.shuffleId) = stage stage } } def newStage(rdd: RDD[_], shuffleDep: Option[ShuffleDependency[_,_,_]]): Stage = { // Kind of ugly: need to register RDDs with the cache here since // we can't do it in its constructor because # of splits is unknown cacheTracker.registerRDD(rdd.id, rdd.splits.size) val id = newStageId() val stage = new Stage(id, rdd, shuffleDep, getParentStages(rdd)) idToStage(id) = stage stage } def getParentStages(rdd: RDD[_]): List[Stage] = { val parents = new HashSet[Stage] val visited = new HashSet[RDD[_]] def visit(r: RDD[_]) { if (!visited(r)) { visited += r // Kind of ugly: need to register RDDs with the cache here since // we can't do it in its constructor because # of splits is unknown cacheTracker.registerRDD(r.id, r.splits.size) for (dep <- r.dependencies) { dep match { case shufDep: ShuffleDependency[_,_,_] => parents += getShuffleMapStage(shufDep) case _ => visit(dep.rdd) } } } } visit(rdd) parents.toList } def getMissingParentStages(stage: Stage): List[Stage] = { val missing = new HashSet[Stage] val visited = new HashSet[RDD[_]] def visit(rdd: RDD[_]) { if (!visited(rdd)) { visited += rdd val locs = getCacheLocs(rdd) for (p <- 0 until rdd.splits.size) { if (locs(p) == Nil) { for (dep <- rdd.dependencies) { dep match { case shufDep: ShuffleDependency[_,_,_] => val stage = getShuffleMapStage(shufDep) if (!stage.isAvailable) missing += stage case narrowDep: NarrowDependency[_] => visit(narrowDep.rdd) } } } } } } visit(stage.rdd) missing.toList } override def runJob[T, U](finalRdd: RDD[T], func: (TaskContext, Iterator[T]) => U, partitions: Seq[Int], allowLocal: Boolean) (implicit m: ClassManifest[U]) : Array[U] = { val outputParts = partitions.toArray val numOutputParts: Int = partitions.size val finalStage = newStage(finalRdd, None) val results = new Array[U](numOutputParts) val finished = new Array[Boolean](numOutputParts) var numFinished = 0 val waiting = new HashSet[Stage] // stages we need to run whose parents aren't done val running = new HashSet[Stage] // stages we are running right now val failed = new HashSet[Stage] // stages that must be resubmitted due to fetch failures val pendingTasks = new HashMap[Stage, HashSet[Task[_]]] // missing tasks from each stage var lastFetchFailureTime: Long = 0 // used to wait a bit to avoid repeated resubmits updateCacheLocs() logInfo("Final stage: " + finalStage) logInfo("Parents of final stage: " + finalStage.parents) logInfo("Missing parents: " + getMissingParentStages(finalStage)) // Optimization for short actions like first() and take() that can be computed locally // without shipping tasks to the cluster. if (allowLocal && finalStage.parents.size == 0 && numOutputParts == 1) { logInfo("Computing the requested partition locally") val split = finalRdd.splits(outputParts(0)) val taskContext = new TaskContext(finalStage.id, outputParts(0), 0) return Array(func(taskContext, finalRdd.iterator(split))) } def submitStage(stage: Stage) { if (!waiting(stage) && !running(stage)) { val missing = getMissingParentStages(stage) if (missing == Nil) { logInfo("Submitting " + stage + ", which has no missing parents") submitMissingTasks(stage) running += stage } else { for (parent <- missing) submitStage(parent) waiting += stage } } } def submitMissingTasks(stage: Stage) { // Get our pending tasks and remember them in our pendingTasks entry val myPending = pendingTasks.getOrElseUpdate(stage, new HashSet) var tasks = ArrayBuffer[Task[_]]() if (stage == finalStage) { for (id <- 0 until numOutputParts if (!finished(id))) { val part = outputParts(id) val locs = getPreferredLocs(finalRdd, part) tasks += new ResultTask(finalStage.id, finalRdd, func, part, locs, id) } } else { for (p <- 0 until stage.numPartitions if stage.outputLocs(p) == Nil) { val locs = getPreferredLocs(stage.rdd, p) tasks += new ShuffleMapTask(stage.id, stage.rdd, stage.shuffleDep.get, p, locs) } } myPending ++= tasks submitTasks(tasks) } submitStage(finalStage) while (numFinished != numOutputParts) { val evt = completionEvents.poll(POLL_TIMEOUT, TimeUnit.MILLISECONDS) val time = System.currentTimeMillis // TODO: use a pluggable clock for testability // If we got an event off the queue, mark the task done or react to a fetch failure if (evt != null) { val stage = idToStage(evt.task.stageId) pendingTasks(stage) -= evt.task if (evt.reason == Success) { // A task ended logInfo("Completed " + evt.task) Accumulators.add(evt.accumUpdates) evt.task match { case rt: ResultTask[_, _] => results(rt.outputId) = evt.result.asInstanceOf[U] finished(rt.outputId) = true numFinished += 1 case smt: ShuffleMapTask => val stage = idToStage(smt.stageId) stage.addOutputLoc(smt.partition, evt.result.asInstanceOf[String]) if (pendingTasks(stage).isEmpty) { logInfo(stage + " finished; looking for newly runnable stages") running -= stage if (stage.shuffleDep != None) { mapOutputTracker.registerMapOutputs( stage.shuffleDep.get.shuffleId, stage.outputLocs.map(_.head).toArray) } updateCacheLocs() val newlyRunnable = new ArrayBuffer[Stage] for (stage <- waiting if getMissingParentStages(stage) == Nil) { newlyRunnable += stage } waiting --= newlyRunnable running ++= newlyRunnable for (stage <- newlyRunnable) { submitMissingTasks(stage) } } } } else { evt.reason match { case FetchFailed(serverUri, shuffleId, mapId, reduceId) => // Mark the stage that the reducer was in as unrunnable val failedStage = idToStage(evt.task.stageId) running -= failedStage failed += failedStage // TODO: Cancel running tasks in the stage logInfo("Marking " + failedStage + " for resubmision due to a fetch failure") // Mark the map whose fetch failed as broken in the map stage val mapStage = shuffleToMapStage(shuffleId) mapStage.removeOutputLoc(mapId, serverUri) mapOutputTracker.unregisterMapOutput(shuffleId, mapId, serverUri) logInfo("The failed fetch was from " + mapStage + "; marking it for resubmission") failed += mapStage // Remember that a fetch failed now; this is used to resubmit the broken // stages later, after a small wait (to give other tasks the chance to fail) lastFetchFailureTime = time // TODO: If there are a lot of fetch failures on the same node, maybe mark all // outputs on the node as dead. case _ => // Non-fetch failure -- probably a bug in the job, so bail out throw new SparkException("Task failed: " + evt.task + ", reason: " + evt.reason) // TODO: Cancel all tasks that are still running } } } // end if (evt != null) // If fetches have failed recently and we've waited for the right timeout, // resubmit all the failed stages if (failed.size > 0 && time > lastFetchFailureTime + RESUBMIT_TIMEOUT) { logInfo("Resubmitting failed stages") updateCacheLocs() for (stage <- failed) { submitStage(stage) } failed.clear() } } return results } def getPreferredLocs(rdd: RDD[_], partition: Int): List[String] = { // If the partition is cached, return the cache locations val cached = getCacheLocs(rdd)(partition) if (cached != Nil) { return cached } // If the RDD has some placement preferences (as is the case for input RDDs), get those val rddPrefs = rdd.preferredLocations(rdd.splits(partition)).toList if (rddPrefs != Nil) { return rddPrefs } // If the RDD has narrow dependencies, pick the first partition of the first narrow dep // that has any placement preferences. Ideally we would choose based on transfer sizes, // but this will do for now. rdd.dependencies.foreach(_ match { case n: NarrowDependency[_] => for (inPart <- n.getParents(partition)) { val locs = getPreferredLocs(n.rdd, inPart) if (locs != Nil) return locs; } case _ => }) return Nil } }
jperla/spark-advancers
core/src/main/scala/spark/DAGScheduler.scala
Scala
bsd-3-clause
12,822
import bio._ import org.scalatest.FlatSpec import org.scalatest.matchers.ShouldMatchers package bio.test { class PamlWriterSpec extends FlatSpec with ShouldMatchers { import bio._ import bio.DNA._ import java.io._ "PamlWriter" should "write PAML file" in { val f = new FastaReader("./test/data/fasta/nt_aln.fa") val seqlist = f.map { res => val (id,tag,dna) = res val seq = new GappedSequence(id,tag,dna) seq }.toList val tmpfn = File.createTempFile("BioScala-PAML-",".phy") import bio.io.Control._ using(new FileOutputStream(tmpfn)) { stream => // 2x write new PamlWriter(stream).write(seqlist) new PamlWriter(stream).write(seqlist) } seqlist.head.id should equal ("PITG_04081T0") } } // Spec class } // bio.test
shamim8888/bioscala
src/test/scala/bio/db/paml/pamlwriter_spec.scala
Scala
bsd-2-clause
841
// // EditScriptCRGMW96.scala -- Scala object EditScriptCRGMW96 // Project OrcScala // // $Id: EditScriptCRGMW96.scala 2933 2011-12-15 16:26:02Z jthywissen $ // // Copyright (c) 2011 The University of Texas at Austin. All rights reserved. // // Use and redistribution of this file is governed by the license terms in // the LICENSE file found in the project's top-level directory and also found at // URL: http://orc.csres.utexas.edu/license.shtml . // package orc.lib.progswap import orc.ast.AST import scala.collection.mutable.{ ArrayBuffer, HashMap, Map } import scala.collection.mutable.MapLike import scala.collection.generic.MutableMapFactory import scala.collection.generic.CanBuildFrom import scala.collection.mutable.Buffer /** Create an edit script describing the operations necessary to * modify a given AST into another given AST. * * Implemented using the EditScript algorithm in: * * Chawathe, S. S., Rajaraman, A., Garcia-Molina, H., and Widom, J. 1996. * Change detection in hierarchically structured information. In Proceedings * of the 1996 ACM SIGMOD International Conference on Management of Data * (Montreal, Quebec, Canada, 04–06 Jun 1996). ACM, 493–504. * * @author jthywiss */ object EditScriptCRGMW96 { /** Create an AstEditScript describing the operations necessary to * modify <code>oldOilAst</code> into <code>newOilAst</code>. * * @param oldOilAst * @param newOilAst * @return the computed AstEditScript */ def computeEditScript[A <: AST, B <: AST](oldOilAst: A, newOilAst: B, matching: Seq[(A, B)]): AstEditScript = { /* Old -> new and new -> old partial maps */ val matchOldNew = IdentityHashMap(matching.toBuffer: _*) val matchNewOld = matchOldNew map { case (o, n) => (n, o) } val editScript = new AstEditScript() /* Memoize a mutable children map and parent map for each AST node */ def tabulateRelations[N <: AST](parentTable: Map[N, N], childrenTable: Map[N, Buffer[N]], parent: N) { val children = parent.subtrees.toBuffer.asInstanceOf[Buffer[N]] childrenTable += ((parent, children)) for (child <- children) { parentTable += ((child, parent)) tabulateRelations(parentTable, childrenTable, child) } } val parentOld = IdentityHashMap[A, A]() val childrenOld = IdentityHashMap[A, Buffer[A]]() tabulateRelations(parentOld, childrenOld, oldOilAst) val parentNew = IdentityHashMap[B, B]() val childrenNew = IdentityHashMap[B, Buffer[B]]() tabulateRelations(parentNew, childrenNew, newOilAst) /* Set of nodes that are known to be in their new position */ val inOrder = new IdentityHashSet[AST]() /* Determine the new location of newNode among its siblings */ def findPos(newNode: B): Int = { val siblingsNew = childrenNew(parentNew(newNode)) var previousInOrderSibling = siblingsNew.head val iter = siblingsNew.view.filter({ inOrder.contains(_) }).iterator while (iter.hasNext && (previousInOrderSibling ne newNode)) { previousInOrderSibling = iter.next } if (previousInOrderSibling eq newNode) return 0 val matchOfPreviousInOrderSibling = matchNewOld(previousInOrderSibling) val i = childrenOld(parentOld(matchOfPreviousInOrderSibling)).filter((inOrder.contains(_))).toSeq.indexOf(matchOfPreviousInOrderSibling) i + 1 } /* Insert, Update, and Move phase */ for (newNode <- traverseBreadthFirst(newOilAst, childrenNew)) { var oldNode = matchNewOld.get(newNode) if (oldNode.isEmpty) { val k = findPos(newNode) oldNode = Some(newNode.asInstanceOf[A]) // A bit of a hack, but an oldNode is needed val matchOfParentNewNode = matchNewOld(parentNew(newNode)) editScript += new InsertNode(newNode, matchOfParentNewNode, parentNew(newNode), k) /* Update old tree to reflect insertion */ matchOldNew += ((oldNode.get, newNode)) matchNewOld += ((newNode, oldNode.get)) parentOld += ((oldNode.get, matchOfParentNewNode)) childrenOld += ((oldNode.get, ArrayBuffer.empty)) val siblings = childrenOld(matchOfParentNewNode) siblings.insert(k, oldNode.get) childrenOld.update(matchOfParentNewNode, siblings) } else if (newNode ne newOilAst) { val parentOldNode = parentOld(oldNode.get) val matchOfParentNewNode = matchNewOld(parentNew(newNode)) editScript += new ReplaceNode(oldNode.get, newNode) if (matchOfParentNewNode != parentOldNode) { val k = findPos(newNode) editScript += new MoveNode(oldNode.get, newNode, matchOfParentNewNode, parentNew(newNode), k) /* Update old tree to reflect move */ parentOld.update(oldNode.get, matchOfParentNewNode) childrenOld.update(parentOldNode, childrenOld(parentOldNode).filterNot(_ eq oldNode.get)) val siblings = childrenOld(matchOfParentNewNode) siblings.insert(k, oldNode.get) childrenOld.update(matchOfParentNewNode, siblings) } } else { // The only operation that is available on the tree roots is "replace" editScript += new ReplaceNode(oldOilAst, newOilAst) } /* Align children of old/new nodes */ val childrenOldNode = childrenOld(oldNode.get) val childrenNewNode = childrenNew(newNode) val s1 = childrenOldNode.filter({ n: A => matchOldNew.isDefinedAt(n) && childrenNewNode.exists(_ == matchOldNew(n)) }).toIndexedSeq val s2 = childrenNewNode.filter({ n: B => matchNewOld.isDefinedAt(n) && childrenOldNode.exists(_ == matchNewOld(n)) }).toIndexedSeq val p = LCSMyers86.lcs(s1, s2, { matchOldNew(_: A) eq (_: B) }) p._1.foreach(inOrder.add(_)) p._2.foreach(inOrder.add(_)) for (oldChild <- s1 if !inOrder.contains(oldChild)) { val newChild = matchOldNew(oldChild) val k = findPos(newChild) editScript += new MoveNode(oldChild, newChild, oldNode.get, newNode, k) /* Update old tree to reflect move */ childrenOldNode -= (oldChild) childrenOldNode.insert(k, oldChild) inOrder.add(oldChild) inOrder.add(matchOldNew(oldChild)) } childrenOld.update(oldNode.get, childrenOldNode) } /* Delete phase */ for (oldNode <- traversePostOrder(oldOilAst, childrenOld)) { if (matchOldNew.get(oldNode).isEmpty) { editScript += new DeleteNode(oldNode, parentOld(oldNode)) } } /* Assert matchNewOld is total */ assert((matchNewOld.keys.toSeq diff traversePostOrder(newOilAst, childrenNew).toSeq).isEmpty) editScript } //TODO: Rewrite these as lazy. def traversePostOrder[A](node: A, childMap: A => Traversable[A]): Traversable[A] = { val children = childMap(node).toSeq (children flatMap { traversePostOrder(_, childMap) }) :+ node } def traverseBreadthFirst[A](root: A, childMap: A => TraversableOnce[A]): Traversable[A] = { val q = ArrayBuffer[A]() q += root /* Note q.iterator, q.forEach, q.map, etc., and for statements * don't work when the underlying collection changes. */ var i = 0 while (i < q.size) { val node = q(i) i += 1 q ++= childMap(node) } q } } /** A mutable map from keys of type A to values of type B. * Keys are compared for equality using reference equality (<code>AnyRef.eq</code>), * in contrast to other Map classes, which use semantic equality (<code>Any.==</code>). * Analogously, identity hash codes are used for hashing. * * @author jthywiss */ class IdentityHashMap[A, B] extends HashMap[A, B] with MapLike[A, B, IdentityHashMap[A, B]] { override def empty: IdentityHashMap[A, B] = new IdentityHashMap[A, B]() override protected def elemHashCode(key: A) = System.identityHashCode(key) override protected def elemEquals(key1: A, key2: A): Boolean = key1.asInstanceOf[AnyRef] eq key2.asInstanceOf[AnyRef] override def par = throw new UnsupportedOperationException("IdentityHashMap.par not implemented") } /** Provides provides a set of operations to create IdentityHashMap values. * * @author jthywiss */ object IdentityHashMap extends MutableMapFactory[IdentityHashMap] { implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), IdentityHashMap[A, B]] = new MapCanBuildFrom[A, B] override def empty[A, B]: IdentityHashMap[A, B] = new IdentityHashMap[A, B]() } /** A mutable set of values of type A. * Values are compared for equality using reference equality (<code>AnyRef.eq</code>), * in contrast to other Set classes, which use semantic equality (<code>Any.==</code>). * Analogously, identity hash codes are used for hashing. * * [Minimal implementation, only <code>add</code> and <code>contains</code> methods.] * * @author jthywiss */ class IdentityHashSet[A] { val map = IdentityHashMap[A, A]() def add(elem: A): Boolean = { map.put(elem, elem).isEmpty } def contains(elem: A): Boolean = map.contains(elem) }
laurenyew/cOrcS
src/orc/lib/progswap/EditScriptCRGMW96.scala
Scala
bsd-3-clause
9,018
/* * The MIT License * * Copyright (c) 2017 Fulcrum Genomics LLC * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package dagr.tasks.fgbio import dagr.core.execsystem.{Cores, ResourceSet} import dagr.core.tasksystem.{Pipe, VariableResources} import dagr.tasks.DagrDef.PathToBam import dagr.tasks.DataTypes.SamOrBam import scala.collection.mutable.ListBuffer class CallDuplexConsensusReads(val in: PathToBam, val out: PathToBam, val readNamePrefix: Option[String] = None, val readGroupId: Option[String] = None, val errorRatePreUmi: Option[Int] = None, val errorRatePostUmi: Option[Int] = None, val minInputBaseQuality: Option[Int] = None, val minReads: Seq[Int] = Seq.empty, val maxReadsPerStrand: Option[Int] = None, val minThreads: Int = 1, val maxThreads: Int = 32 ) extends FgBioTask with VariableResources with Pipe[SamOrBam,SamOrBam] { override def pickResources(resources: ResourceSet): Option[ResourceSet] = { resources.subset(minCores=Cores(minThreads), maxCores=Cores(maxThreads), memory=this.resources.memory) } override protected def addFgBioArgs(buffer: ListBuffer[Any]): Unit = { buffer.append("-i", in) buffer.append("-o", out) readNamePrefix.foreach (x => buffer.append("-p", x)) readGroupId.foreach (x => buffer.append("-R", x)) errorRatePreUmi.foreach (x => buffer.append("-1", x)) errorRatePostUmi.foreach (x => buffer.append("-2", x)) minInputBaseQuality.foreach (x => buffer.append("-m", x)) if (minReads.nonEmpty) { buffer.append("-M") buffer.append(minReads:_*) } maxReadsPerStrand.foreach (x => buffer.append("--max-reads-per-strand", x)) buffer.append("--threads", resources.cores.toInt) } }
fulcrumgenomics/dagr
tasks/src/main/scala/dagr/tasks/fgbio/CallDuplexConsensusReads.scala
Scala
mit
3,177
/* * RotateWindow.scala * (FScape) * * Copyright (c) 2001-2022 Hanns Holger Rutz. All rights reserved. * * This software is published under the GNU Affero General Public License v3+ * * * For further information, please contact Hanns Holger Rutz at * contact@sciss.de */ package de.sciss.fscape package graph import de.sciss.fscape.Graph.{ProductReader, RefMapIn} import de.sciss.fscape.UGenSource.unwrap import de.sciss.fscape.stream.{StreamIn, StreamOut} import scala.collection.immutable.{IndexedSeq => Vec} object RotateWindow extends ProductReader[RotateWindow] { override def read(in: RefMapIn, key: String, arity: Int): RotateWindow = { require (arity == 3) val _in = in.readGE() val _size = in.readGE() val _amount = in.readGE() new RotateWindow(_in, _size, _amount) } } /** A UGen that rotates the contents of a window, wrapping around its boundaries. * For example, it can be used to align the phases prior to FFT so that the sample * that was formerly in the centre of the window moves to the beginning of the window. * * @param in the signal to window and resize * @param size the input window size * @param amount the rotation amount in sample frames. Positive values "move" the contents * to the right, negative values "move" the contents to the left. The amount * is taken modulus `size`. */ final case class RotateWindow(in: GE, size: GE, amount: GE = 0) extends UGenSource.SingleOut { protected def makeUGens(implicit b: UGenGraph.Builder): UGenInLike = unwrap(this, Vector(in.expand, size.expand, amount.expand)) protected def makeUGen(args: Vec[UGenIn])(implicit b: UGenGraph.Builder): UGenInLike = UGen.SingleOut(this, args) private[fscape] def makeStream(args: Vec[StreamIn])(implicit b: stream.Builder): StreamOut = { val Vec(in, size, amount) = args: @unchecked import in.tpe val out = stream.RotateWindow[in.A, in.Buf](in = in.toElem, size = size.toInt, amount = amount.toInt) tpe.mkStreamOut(out) } }
Sciss/FScape-next
core/shared/src/main/scala/de/sciss/fscape/graph/RotateWindow.scala
Scala
agpl-3.0
2,062
/* Copyright 2016-2019 UniCredit S.p.A. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package unicredit.lethe package client import boopickle.Default._ import crypto.NoCrypter import serialization.BooSerializer import transport.Remote class UnencryptedClient[A: Pickler](remote: Remote) extends StandardClient[A](new BooSerializer[A], new NoCrypter, remote)
unicredit/lethe
src/main/scala/unicredit/lethe/client/UnencryptedClient.scala
Scala
apache-2.0
862
package com.cterm2.miniflags import net.minecraft.nbt._ import net.minecraft.world._ import net.minecraft.client.multiplayer.WorldClient import net.minecraft.entity.player.{EntityPlayer, EntityPlayerMP} import net.minecraft.util.ChatComponentText import cpw.mods.fml.relauncher.{Side, SideOnly} import net.minecraftforge.common.DimensionManager import scalaz._, Scalaz._ import common.EnumColor final case class Coordinate(x: Int, y: Int, z: Int) final case class Link(src: Coordinate, dest: Coordinate) final case class Term(pos: Coordinate, color: EnumColor.Type, var name: Option[String]) // Client Side Structure of Links between Flags object ClientLinkManager { import scala.collection.mutable._ private var _terms = MutableList[Term]() private var _links = MutableList[Link]() def links = this._links //* readonly "links" def terms = this._terms //* readonly "terms" def addTerm(pos: Coordinate, color: EnumColor.Type) { ModInstance.logger.info(s"Added Terminal at (${pos.x}, ${pos.y}, ${pos.z}) with color ${color.getClass.getSimpleName}") this._terms += Term(pos, color, None) } def updateName(pos: Coordinate, str: String) { ModInstance.logger.info(s"Client Side UpdateName: $str") this._terms filter { case Term(tp, _, _) => tp == pos } foreach (_.name = Some(str)) } def addLink(src: Coordinate, dest: Coordinate) { this._links += Link(src, dest) } def unregisterTerm(p: Coordinate) { this._links = this.links filterNot { case Link(s, d) => s == p || d == p } this._terms = this.terms filterNot { case Term(tp, _, _) => tp == p } } def initialize() { this._links = MutableList[Link]() this._terms = MutableList[Term]() } def getLinkDestination(coord: Coordinate) = this.links collect { case Link(cs, d) if cs == coord => d } headOption def getTermFromID(world: World, id: Long) = { val coord = Coordinate(TileData.xCoordFromID(id), TileData.yCoordFromID(id), TileData.zCoordFromID(id)) this.terms find { case Term(pos, _, _) => coord == pos } } def getTermFromIDStr(world: World, id: String) = getTermFromID(world, java.lang.Long.parseUnsignedLong(id)) } // Server Side Structure of Links and Object Coordinates of Flags object ObjectManager { final val ID = ModInstance.ID + "_FlagObjectManager" def instanceForWorld(world: World) = { assert(!world.isRemote) val mappedID = s"${ID}-${world.provider.dimensionId}" (Option(world.loadItemData(classOf[ObjectManager], mappedID)) match { case Some(instance: ObjectManager) => Some(instance) case None => Option(new ObjectManager(mappedID)) map (x => { world.setItemData(mappedID, x); x }) case _ => None }) map (_.setWorld(world)) } } final class ObjectManager(id: String) extends WorldSavedData(id) { import com.cterm2.tetra.ActiveNBTRecord._, ObjectManager._ import scalaz._, Scalaz._ private var worldObj: World = null def setWorld(w: World) = { this.worldObj = w; this } private def getTag(c: Any) = c match { case Term(Coordinate(x, y, z), color, name) => val tag = new NBTTagCompound tag("x") = x; tag("y") = y; tag("z") = z; tag("cint") = color.value; tag("name") = name tag case Link(Coordinate(sx, sy, sz), Coordinate(dx, dy, dz)) => val tag = new NBTTagCompound tag("xSrc") = sx; tag("ySrc") = sy; tag("zSrc") = sz tag("xDest") = dx; tag("yDest") = dy; tag("zDest") = dz tag } private def getTermFromTag(tag: NBTTagCompound) = (tag[Int]("x") :: tag[Int]("y") :: tag[Int]("z") :: tag[Int]("cint") :: Nil).sequence map { case List(x, y, z, cint) => Term(Coordinate(x, y, z), EnumColor fromValue cint, tag[String]("name")) } private def getLinkFromTag(tag: NBTTagCompound) = (tag[Int]("xSrc") :: tag[Int]("ySrc") :: tag[Int]("zSrc") :: tag[Int]("xDest") :: tag[Int]("yDest") :: tag[Int]("zDest") :: Nil).sequence map { case List(sx, sy, sz, dx, dy, dz) => Link(Coordinate(sx, sy, sz), Coordinate(dx, dy, dz)) } private var _terminals = Seq[Term]() private var _links = Seq[Link]() // Read-only Uniform Accessing and Auto Marking as Dirty def terminals = this._terminals def links = this._links private def terminals_=(list: Seq[Term]) { this._terminals = list; this.markDirty() } private def links_=(list: Seq[Link]) { this._links = list; this.markDirty() } def register(x: Int, y: Int, z: Int, color: EnumColor.Type) { this.terminals = this.terminals :+ Term(Coordinate(x, y, z), color, None) ModInstance.logger.info(s"Registered Terminal at ($x, $y, $z) with color ${color.getClass.getSimpleName}") intercommands.NewTerm(Coordinate(x, y, z), color) broadcastIn worldObj.provider.dimensionId } def updateName(pos: Coordinate, str: String) { this.terminals filter { case Term(tp, _, _) => tp == pos } foreach (_.name = Some(str)) Option(this.worldObj.getTileEntity(pos.x, pos.y, pos.z)) foreach { case x: TileData => x.name = str case _ => ModInstance.logger.warn("TileData not found") } } def unregister(x: Int, y: Int, z: Int) { val crd = Coordinate(x, y, z) this.terminals = this.terminals filterNot { case Term(pos, _, _) => pos == crd } this.links = this.links filterNot { case Link(cs, cd) => cs == crd || cd == crd } intercommands.UnregisterTerm(crd) broadcastIn worldObj.provider.dimensionId } def link(player: EntityPlayer, sx: Int, sy: Int, sz: Int, dx: Int, dy: Int, dz: Int) { val (src, dst) = (Coordinate(sx, sy, sz), Coordinate(dx, dy, dz)) val termCoords = this.terminals map { case Term(pos, _, _) => pos } if((termCoords contains src) && (termCoords contains dst)) { this.links = this.links :+ Link(src, dst) intercommands.NewLink(src, dst) broadcastIn player.dimension player.addChatComponentMessage(new ChatComponentText(s"Successfully linked from ($sx, $sy, $sz) to ($dx, $dy, $dz)")) common.playLinkedSound(this.worldObj, sx, sy, sz) } else ModInstance.logger.warn(s"Invalid Linking from ($sx, $sy, $sz) to ($dx, $dy, $dz)") } def getLinkDestinationFrom(x: Int, y: Int, z: Int) = getLinkDestinationFrom_(Coordinate(x, y, z)) def getLinkedCoordinate(x: Int, y: Int, z: Int) = getLinkedCoordinate_(Coordinate(x, y, z)) // impl // private def getLinkDestinationFrom_(coord: Coordinate) = this.links find { case Link(src, _) => src == coord } map { case Link(_, t) => t } private def getLinkedCoordinate_(coord: Coordinate) = this.links collect { case Link(src, dest) if src == coord => dest case Link(src, dest) if dest == coord => src } headOption override def writeToNBT(tag: NBTTagCompound) { // ModInstance.logger.info("Saving World Flag Data...") val tagTerminals = new NBTTagList val tagLinks = new NBTTagList this.terminals map getTag foreach tagTerminals.appendTag this.links map getTag foreach tagLinks.appendTag tag("Terminals") = tagTerminals tag("Links") = tagLinks } override def readFromNBT(tag: NBTTagCompound) { // ModInstance.logger.info("Loading World Flag Data...") for(terminals <- tag[NBTTagList]("Terminals")) { this.terminals = ((0 until terminals.tagCount).toList map terminals.getCompoundTagAt map getTermFromTag).sequence | Nil } for(links <- tag[NBTTagList]("Links")) { this.links = ((0 until links.tagCount).toList map links.getCompoundTagAt map getLinkFromTag).sequence | Nil } } def synchronizeAllLinks(player: EntityPlayerMP) { intercommands.InitializeDimension dispatchTo player for(Term(p, c, n) <- this.terminals) { intercommands.NewTerm(p, c) dispatchTo player n foreach { intercommands.UpdateFlagName(p, _) dispatchTo player } } for(Link(src, dest) <- this.links) { intercommands.NewLink(src, dest) dispatchTo player } } }
Pctg-x8/miniflags
src/objectManager.scala
Scala
lgpl-2.1
7,636
package org.jetbrains.plugins.scala.tasty import dotty.tools.tasty.TastyFormat.NameTags // dotty.tools.tasty.TastyFormat.NameTags // dotty.tools.dotc.core.NameKinds // dotty.tools.dotc.core.StdNames private class TermName(private val value: String) extends AnyVal { override def toString: String = value def asSimpleName: TermName = new TermName(value) } // TODO Read structure & tag instead of a plain String? // TODO Handle specific tags object TermName { val EmptyTermName: TermName = new TermName("<empty>") def simpleNameKindOfTag(tag: Int)(name: TermName): TermName = new TermName(name.value + "$") def uniqueNameKindOfSeparator(separator: String)(original: TermName, num: Int): TermName = new TermName(separator + num) def numberedNameKindOfTag(tag: Int)(name: TermName, num: Int): TermName = new TermName(name.value + "$default$" + num) def qualifiedNameKindOfTag(tag: Int)(qualifier: TermName, simpleName: TermName): TermName = new TermName(qualifier.value + "." + simpleName.value) def SignedName(original: TermName, sig: String, target: TermName): TermName = new TermName(original.value + sig) }
JetBrains/intellij-scala
tasty/runtime/src/org/jetbrains/plugins/scala/tasty/TermName.scala
Scala
apache-2.0
1,128
/* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.ct600j.v3 import org.joda.time.LocalDate import uk.gov.hmrc.ct.box._ import uk.gov.hmrc.ct.ct600j.v3.retriever.CT600JBoxRetriever case class J45A(value: Option[LocalDate]) extends SchemeDateBox{ override def validate(boxRetriever: CT600JBoxRetriever): Set[CtValidation] = validateSchemeDate(boxRetriever.j40(), boxRetriever.j40A(), boxRetriever.j45()) }
hmrc/ct-calculations
src/main/scala/uk/gov/hmrc/ct/ct600j/v3/J45A.scala
Scala
apache-2.0
992
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.examples import org.apache.carbondata.core.constants.CarbonCommonConstants import org.apache.carbondata.core.util.CarbonProperties import org.apache.carbondata.examples.util.ExampleUtils object DataUpdateDeleteExample { def main(args: Array[String]) { val cc = ExampleUtils.createCarbonContext("DataUpdateDeleteExample") val testData = ExampleUtils.currentPath + "/src/main/resources/data.csv" val testData1 = ExampleUtils.currentPath + "/src/main/resources/data_update.csv" // Specify date format based on raw data CarbonProperties.getInstance() .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy/MM/dd") cc.sql("DROP TABLE IF EXISTS t3") cc.sql("DROP TABLE IF EXISTS update_table") // Create table, 6 dimensions, 1 measure cc.sql(""" CREATE TABLE IF NOT EXISTS t3 (ID Int, date Date, country String, name String, phonetype String, serialname char(10), salary Int) STORED BY 'carbondata' """) cc.sql(s""" LOAD DATA LOCAL INPATH '$testData' INTO TABLE t3 """) // 1.Update data with simple SET cc.sql(""" SELECT * FROM t3 ORDER BY ID """).show() // Update data where salary < 15003 cc.sql(""" UPDATE t3 SET (t3.country) = ('india') WHERE t3.salary < 15003 """).show() cc.sql(""" UPDATE t3 SET (t3.salary) = (t3.salary + 9) WHERE t3.name = 'aaa1' """).show() // Query data again after the above update cc.sql(""" SELECT * FROM t3 ORDER BY ID """).show() // 2.Update data with subquery result SET cc.sql(""" CREATE TABLE IF NOT EXISTS update_table (ID Int, country String, name String, phonetype String, serialname char(10), salary Int) STORED BY 'carbondata' """) cc.sql(s""" LOAD DATA LOCAL INPATH '$testData1' INTO TABLE update_table """) cc.sql(""" UPDATE t3 SET (t3.country, t3.name) = (SELECT u.country, u.name FROM update_table u WHERE u.id = 5) WHERE t3.id < 5""").show() // Query data again after the above update cc.sql(""" SELECT * FROM t3 ORDER BY ID """).show() // 3.Update data with join query result SET cc.sql(""" UPDATE t3 SET (t3.country, t3.salary) = (SELECT u.country, f.salary FROM update_table u FULL JOIN update_table f WHERE u.id = 8 and f.id=6) WHERE t3.id >6""").show() // Query data again after the above update cc.sql(""" SELECT * FROM t3 ORDER BY ID """).show() // 4.Delete data where salary > 15005 cc.sql(""" DELETE FROM t3 WHERE salary > 15005 """).show() // Query data again after delete data cc.sql(""" SELECT * FROM t3 ORDER BY ID """).show() // Drop table cc.sql("DROP TABLE IF EXISTS t3") cc.sql("DROP TABLE IF EXISTS update_table") } }
ksimar/incubator-carbondata
examples/spark/src/main/scala/org/apache/carbondata/examples/DataUpdateDeleteExample.scala
Scala
apache-2.0
3,872
package com.maxmouchet.vamk.timetables.parser.table.sources import com.maxmouchet.vamk.timetables.parser.table.HTMLTableParser class RemoteTableSource(parser: HTMLTableParser) extends TableSource { def getTable: Array[Array[String]] = parser.parse }
OpenLamas/vamk-timetables
lib/scala/src/main/scala/com/maxmouchet/vamk/timetables/parser/table/sources/RemoteTableSource.scala
Scala
mit
254
package is.hail import is.hail.backend.spark.SparkBackend import org.testng.annotations.Test class HailContextSuite extends HailSuite { @Test def testGetOrCreate(): Unit = { val backend = SparkBackend.getOrCreate() val hc2 = HailContext.getOrCreate(backend) assert(hc == hc2) } }
hail-is/hail
hail/src/test/scala/is/hail/HailContextSuite.scala
Scala
mit
298
package spire.math import java.lang.Math import scala.annotation.tailrec import scala.math.{ScalaNumber, ScalaNumericConversions} import scala.{specialized => sp} import spire.algebra._ import spire.syntax.field._ import spire.syntax.isReal._ import spire.syntax.nroot._ object Quaternion extends QuaternionInstances { def i[@sp(Float, Double) A](implicit f: Rig[A]): Quaternion[A] = Quaternion(f.zero, f.one, f.zero, f.zero) def j[@sp(Float, Double) A](implicit f: Rig[A]): Quaternion[A] = Quaternion(f.zero, f.zero, f.one, f.zero) def k[@sp(Float, Double) A](implicit f: Rig[A]): Quaternion[A] = Quaternion(f.zero, f.zero, f.zero, f.one) def zero[@sp(Float, Double) A](implicit f: Semiring[A]): Quaternion[A] = Quaternion(f.zero, f.zero, f.zero, f.zero) def one[@sp(Float, Double) A](implicit f: Rig[A]): Quaternion[A] = Quaternion(f.one, f.zero, f.zero, f.zero) def apply[@sp(Float, Double) A](a: A)(implicit f: Semiring[A]): Quaternion[A] = Quaternion(a, f.zero, f.zero, f.zero) def apply[@sp(Float, Double) A](c: Complex[A])(implicit f: Semiring[A]): Quaternion[A] = Quaternion(c.real, c.imag, f.zero, f.zero) } // really a skew field private[math] trait QuaternionAlgebra[A] extends Field[Quaternion[A]] with Eq[Quaternion[A]] with NRoot[Quaternion[A]] with InnerProductSpace[Quaternion[A], A] with FieldAlgebra[Quaternion[A], A] { implicit def f: Fractional[A] implicit def t: Trig[A] implicit def r: IsReal[A] def eqv(x: Quaternion[A], y: Quaternion[A]): Boolean = x == y override def neqv(x: Quaternion[A], y: Quaternion[A]): Boolean = x != y override def minus(a: Quaternion[A], b: Quaternion[A]): Quaternion[A] = a - b def negate(a: Quaternion[A]): Quaternion[A] = -a def one: Quaternion[A] = Quaternion.one[A] def plus(a: Quaternion[A], b: Quaternion[A]): Quaternion[A] = a + b override def pow(a: Quaternion[A], b: Int): Quaternion[A] = a.pow(b) override def times(a: Quaternion[A], b: Quaternion[A]): Quaternion[A] = a * b def zero: Quaternion[A] = Quaternion.zero[A] def div(a: Quaternion[A], b: Quaternion[A]): Quaternion[A] = a / b def quot(a: Quaternion[A], b: Quaternion[A]): Quaternion[A] = a /~ b def mod(a: Quaternion[A], b: Quaternion[A]): Quaternion[A] = a % b def gcd(a: Quaternion[A], b: Quaternion[A]): Quaternion[A] = { @tailrec def _gcd(a: Quaternion[A], b: Quaternion[A]): Quaternion[A] = if (b.isZero) a else _gcd(b, a - (a / b).round * b) _gcd(a, b) } def nroot(a: Quaternion[A], k: Int): Quaternion[A] = a.nroot(k) override def sqrt(a: Quaternion[A]): Quaternion[A] = a.sqrt def fpow(a: Quaternion[A], b: Quaternion[A]): Quaternion[A] = a.fpow(b.r) //FIXME def timesl(a: A, q: Quaternion[A]): Quaternion[A] = q * a def dot(x: Quaternion[A], y: Quaternion[A]): A = x.dot(y) } trait QuaternionInstances { implicit def QuaternionAlgebra[A](implicit fr: Fractional[A], tr: Trig[A], isr: IsReal[A]) = new QuaternionAlgebra[A] { val f = fr val t = tr val r = isr def scalar = f def nroot = f } } final case class Quaternion[@sp(Float, Double) A](r: A, i: A, j: A, k: A) extends ScalaNumber with ScalaNumericConversions with Serializable { lhs => // junky ScalaNumber stuff override def byteValue: Byte = longValue.toByte override def shortValue: Short = longValue.toShort def intValue: Int = longValue.toInt override def longValue: Long = anyToLong(r) def floatValue: Float = doubleValue.toFloat def doubleValue: Double = anyToDouble(r) private[this] def sillyIsReal: Boolean = anyIsZero(i) && anyIsZero(j) && anyIsZero(k) def underlying: Object = this def isWhole: Boolean = sillyIsReal && anyIsWhole(r) override final def isValidInt: Boolean = sillyIsReal && anyIsValidInt(r) // important to keep in sync with Complex[_] override def hashCode: Int = if (sillyIsReal) r.## else (19 * r.##) + (41 * i.##) + (13 * j.##) + (77 * k.##) + 97 // not typesafe, so this is the best we can do :( override def equals(that: Any): Boolean = that match { case that: Quaternion[_] => r == that.r && i == that.i && j == that.j && k == that.k case that: Complex[_] => r == that.real && i == that.imag && anyIsZero(j) && anyIsZero(k) case that => sillyIsReal && r == that } def isZero(implicit o: IsReal[A]): Boolean = r.isZero && i.isZero && j.isZero && k.isZero def isReal(implicit o: IsReal[A]): Boolean = i.isZero && j.isZero && k.isZero def isPure(implicit o: IsReal[A]): Boolean = r.isZero def real(implicit s: Semiring[A]): Quaternion[A] = Quaternion(r) def pure(implicit s: Semiring[A]): Quaternion[A] = Quaternion(s.zero, i, j, k) def abs(implicit f: Field[A], o: IsReal[A], n: NRoot[A]): A = (r.pow(2) + i.pow(2) + j.pow(2) + k.pow(2)).sqrt def pureAbs(implicit f: Field[A], o: IsReal[A], n: NRoot[A]): A = (i.pow(2) + j.pow(2) + k.pow(2)).sqrt def eqv(rhs: Quaternion[A])(implicit o: Eq[A]): Boolean = lhs.r === rhs.r && lhs.i === rhs.i && lhs.j === rhs.j && lhs.k === rhs.k def neqv(rhs: Quaternion[A])(implicit o: Eq[A]): Boolean = lhs.r =!= rhs.r && lhs.i =!= rhs.i && lhs.j =!= rhs.j && lhs.k =!= rhs.k override def toString: String = s"($r + ${i}i + ${j}j + ${k}k)" def toComplex: Complex[A] = Complex(r, i) def signum(implicit o: IsReal[A]): Int = r.signum match { case 0 => i.signum match { case 0 => j.signum match { case 0 => k.signum case n => n } case n => n } case n => n } def quaternionSignum(implicit f: Field[A], o: IsReal[A], n: NRoot[A]): Quaternion[A] = if (isZero) this else this / abs def pureSignum(implicit f: Field[A], o: IsReal[A], n: NRoot[A]): Quaternion[A] = if (isReal) Quaternion.zero[A] else (pure / pureAbs) def unary_-(implicit s: Rng[A]): Quaternion[A] = Quaternion(-r, -i, -j, -k) def conjugate(implicit s: Rng[A]): Quaternion[A] = Quaternion(r, -i, -j, -k) def reciprocal(implicit f: Field[A]): Quaternion[A] = conjugate / (r.pow(2) + i.pow(2) + j.pow(2) + k.pow(2)) def sqrt(implicit f: Field[A], o: IsReal[A], n0: NRoot[A]): Quaternion[A] = if (!isReal) { val n = (r + abs).sqrt Quaternion(n, i / n, j / n, k / n) / f.fromInt(2).sqrt } else if (r.signum >= 0) { Quaternion(r.sqrt) } else { Quaternion(f.zero, r.abs.sqrt, f.zero, f.zero) } def nroot(m: Int)(implicit f: Field[A], o: IsReal[A], n0: NRoot[A], tr: Trig[A]): Quaternion[A] = if (m <= 0) { throw new IllegalArgumentException(s"illegal root: $m") } else if (m == 1) { this } else if (!isReal) { val s = pureAbs val n = abs val t = acos(r / n) val v = Quaternion(f.zero, i / s, j / s, k / s) val e = if (sin(t).signum >= 0) v else -v val tm = t / m (e * sin(tm) + cos(tm)) * n.nroot(m) } else if (r.signum >= 0) { Quaternion(r.nroot(m)) } else { Quaternion(Complex(r).nroot(m)) } def unit(implicit f: Field[A], o: IsReal[A], n: NRoot[A]): Quaternion[A] = Quaternion(r.pow(2), i.pow(2), j.pow(2), k.pow(2)) / abs def +(rhs: A)(implicit s: Semiring[A]): Quaternion[A] = Quaternion(r + rhs, i, j, k) def +(rhs: Complex[A])(implicit s: Semiring[A]): Quaternion[A] = Quaternion(r + rhs.real, i + rhs.imag, j, k) def +(rhs: Quaternion[A])(implicit s: Semiring[A]): Quaternion[A] = Quaternion(lhs.r + rhs.r, lhs.i + rhs.i, lhs.j + rhs.j, lhs.k + rhs.k) def -(rhs: A)(implicit s: Rng[A]): Quaternion[A] = Quaternion(r - rhs, i, j, k) def -(rhs: Complex[A])(implicit s: Rng[A]): Quaternion[A] = Quaternion(r - rhs.real, i - rhs.imag, j, k) def -(rhs: Quaternion[A])(implicit s: Rng[A]): Quaternion[A] = Quaternion(lhs.r - rhs.r, lhs.i - rhs.i, lhs.j - rhs.j, lhs.k - rhs.k) def *(rhs: A)(implicit s: Semiring[A]): Quaternion[A] = Quaternion(r * rhs, i * rhs, j * rhs, k * rhs) def *(rhs: Complex[A])(implicit s: Rng[A]): Quaternion[A] = Quaternion( (r * rhs.real) - (i * rhs.imag), (r * rhs.imag) + (i * rhs.real), (j * rhs.real) + (k * rhs.imag), (j * rhs.imag) + (k * rhs.real) ) def *(rhs: Quaternion[A])(implicit s: Rng[A]): Quaternion[A] = Quaternion( (lhs.r * rhs.r) - (lhs.i * rhs.i) - (lhs.j * rhs.j) - (lhs.k * rhs.k), (lhs.r * rhs.i) + (lhs.i * rhs.r) + (lhs.j * rhs.k) - (lhs.k * rhs.j), (lhs.r * rhs.j) - (lhs.i * rhs.k) + (lhs.j * rhs.r) + (lhs.k * rhs.i), (lhs.r * rhs.k) + (lhs.i * rhs.j) - (lhs.j * rhs.i) + (lhs.k * rhs.r) ) def /(rhs: A)(implicit f: Field[A]): Quaternion[A] = Quaternion(r / rhs, i / rhs, j / rhs, k / rhs) def /(rhs: Complex[A])(implicit f: Field[A]): Quaternion[A] = lhs * Quaternion(rhs).reciprocal def /(rhs: Quaternion[A])(implicit f: Field[A]): Quaternion[A] = lhs * rhs.reciprocal def pow(k: Int)(implicit s: Ring[A]): Quaternion[A] = { @tailrec def loop(p: Quaternion[A], b: Quaternion[A], e: Int): Quaternion[A] = if (e == 0) p else if ((e & 1) == 1) loop(p * b, b * b, e >>> 1) else loop(p, b * b, e >>> 1) if (k >= 0) loop(Quaternion.one[A], this, k) else throw new IllegalArgumentException(s"illegal exponent: $k") } def **(k: Int)(implicit s: Ring[A]): Quaternion[A] = pow(k) def fpow(k0: A)(implicit f: Field[A], o: IsReal[A], n0: NRoot[A], tr: Trig[A]): Quaternion[A] = if (k0.signum < 0) { Quaternion.zero } else if (k0 == f.zero) { Quaternion.one } else if (k0 == f.one) { this } else if (!isReal) { val s = (i ** 2 + j ** 2 + k ** 2).sqrt val v = Quaternion(f.zero, i / s, j / s, k / s) val n = abs val t = acos(r / n) (Quaternion(cos(t * k0)) + v * sin(t * k0)) * n.fpow(k0) } else if (r.signum >= 0) { Quaternion(r.fpow(k0)) } else { Quaternion(Complex(r).pow(Complex(k0))) } def floor(implicit o: IsReal[A]): Quaternion[A] = Quaternion(r.floor, i.floor, j.floor, k.floor) def ceil(implicit o: IsReal[A]): Quaternion[A] = Quaternion(r.ceil, i.ceil, j.ceil, k.ceil) def round(implicit o: IsReal[A]): Quaternion[A] = Quaternion(r.round, i.round, j.round, k.round) // TODO: instead of floor for /~, should be round-toward-zero def /~(rhs: A)(implicit f: Field[A], o: IsReal[A]): Quaternion[A] = (lhs / rhs).floor def /~(rhs: Complex[A])(implicit f: Field[A], o: IsReal[A]): Quaternion[A] = (lhs / rhs).floor def /~(rhs: Quaternion[A])(implicit f: Field[A], o: IsReal[A]): Quaternion[A] = (lhs / rhs).floor def %(rhs: A)(implicit f: Field[A], o: IsReal[A]): Quaternion[A] = lhs - (lhs /~ rhs) def %(rhs: Complex[A])(implicit f: Field[A], o: IsReal[A]): Quaternion[A] = lhs - (lhs /~ rhs) def %(rhs: Quaternion[A])(implicit f: Field[A], o: IsReal[A]): Quaternion[A] = lhs - (lhs /~ rhs) def /%(rhs: A)(implicit f: Field[A], o: IsReal[A]): (Quaternion[A], Quaternion[A]) = { val q = lhs /~ rhs (q, lhs - q) } def /%(rhs: Complex[A])(implicit f: Field[A], o: IsReal[A]): (Quaternion[A], Quaternion[A]) = { val q = lhs /~ rhs (q, lhs - q) } def /%(rhs: Quaternion[A])(implicit f: Field[A], o: IsReal[A]): (Quaternion[A], Quaternion[A]) = { val q = lhs /~ rhs (q, lhs - q) } def dot(rhs: Quaternion[A])(implicit f: Field[A]): A = (lhs.conjugate * rhs + rhs.conjugate * lhs).r / f.fromInt(2) }
lrytz/spire
core/src/main/scala/spire/math/Quaternion.scala
Scala
mit
11,446
package org.scaladebugger.tool.commands import org.scaladebugger.api.utils.JDITools import org.scaladebugger.test.helpers.ParallelMockFunSpec import org.scalatest.concurrent.Eventually import test.{ToolConstants, ToolFixtures, ToolTestUtilities} class IgnoreBothCommandIntegrationSpec extends ParallelMockFunSpec with ToolFixtures with ToolTestUtilities with Eventually { implicit override val patienceConfig = PatienceConfig( timeout = scaled(ToolConstants.EventuallyTimeout), interval = scaled(ToolConstants.EventuallyInterval) ) describe("IgnoreBothCommand") { it("should delete a specific pending exception request by class name") { val testClass = "org.scaladebugger.test.exceptions.InsideTryBlockException" val testFile = JDITools.scalaClassStringToFileString(testClass) val testExceptionName = "org.scaladebugger.test.exceptions.CustomException" val testFakeExceptionName = "org.invalid.exception" // Create exception requests before connecting to the JVM val q = "\\"" val virtualTerminal = newVirtualTerminal() virtualTerminal.newInputLine(s"catch $q$testExceptionName$q") virtualTerminal.newInputLine(s"catchc $q$testExceptionName$q") virtualTerminal.newInputLine(s"catchu $q$testExceptionName$q") virtualTerminal.newInputLine(s"catch $q$testFakeExceptionName$q") virtualTerminal.newInputLine(s"catchc $q$testFakeExceptionName$q") virtualTerminal.newInputLine(s"catchu $q$testFakeExceptionName$q") withToolRunningUsingTerminal( className = testClass, virtualTerminal = virtualTerminal ) { (vt, sm, start) => logTimeTaken({ // Verify our exception requests were made eventually { val svm = sm.state.scalaVirtualMachines.head val ers = svm.exceptionRequests.map(er => (er.className, er.notifyCaught, er.notifyUncaught, er.isPending)) ers should contain theSameElementsAs Seq( (testExceptionName, true, true, false), (testExceptionName, true, false, false), (testExceptionName, false, true, false), (testFakeExceptionName, true, true, true), (testFakeExceptionName, true, false, true), (testFakeExceptionName, false, true, true) ) } // Delete the exception requests that are pending vt.newInputLine(s"ignore $q$testFakeExceptionName$q") // Verify our exception requests were deleted eventually { val svm = sm.state.scalaVirtualMachines.head val ers = svm.exceptionRequests.map(er => (er.className, er.notifyCaught, er.notifyUncaught, er.isPending)) ers should contain theSameElementsAs Seq( (testExceptionName, true, true, false), (testExceptionName, true, false, false), (testExceptionName, false, true, false) ) } }) } } it("should delete a specific active exception request by class name") { val testClass = "org.scaladebugger.test.exceptions.InsideTryBlockException" val testFile = JDITools.scalaClassStringToFileString(testClass) val testExceptionName = "org.scaladebugger.test.exceptions.CustomException" val testFakeExceptionName = "org.invalid.exception" // Create exception requests before connecting to the JVM val q = "\\"" val virtualTerminal = newVirtualTerminal() virtualTerminal.newInputLine(s"catch $q$testExceptionName$q") virtualTerminal.newInputLine(s"catchc $q$testExceptionName$q") virtualTerminal.newInputLine(s"catchu $q$testExceptionName$q") virtualTerminal.newInputLine(s"catch $q$testFakeExceptionName$q") virtualTerminal.newInputLine(s"catchc $q$testFakeExceptionName$q") virtualTerminal.newInputLine(s"catchu $q$testFakeExceptionName$q") withToolRunningUsingTerminal( className = testClass, virtualTerminal = virtualTerminal ) { (vt, sm, start) => logTimeTaken({ // Verify our exception requests were made eventually { val svm = sm.state.scalaVirtualMachines.head val ers = svm.exceptionRequests.map(er => (er.className, er.notifyCaught, er.notifyUncaught, er.isPending)) ers should contain theSameElementsAs Seq( (testExceptionName, true, true, false), (testExceptionName, true, false, false), (testExceptionName, false, true, false), (testFakeExceptionName, true, true, true), (testFakeExceptionName, true, false, true), (testFakeExceptionName, false, true, true) ) } // Delete the exception requests that are active vt.newInputLine(s"ignore $q$testExceptionName$q") // Verify our exception requests were deleted eventually { val svm = sm.state.scalaVirtualMachines.head val ers = svm.exceptionRequests.map(er => (er.className, er.notifyCaught, er.notifyUncaught, er.isPending)) ers should contain theSameElementsAs Seq( (testFakeExceptionName, true, true, true), (testFakeExceptionName, true, false, true), (testFakeExceptionName, false, true, true) ) } }) } } it("should delete all pending and active exception requests matching a wildcard") { val testClass = "org.scaladebugger.test.exceptions.InsideTryBlockException" val testFile = JDITools.scalaClassStringToFileString(testClass) val testExceptionName = "org.scaladebugger.test.exceptions.CustomException" val testFakeExceptionName = "org.invalid.exception" // Create exception requests before connecting to the JVM val q = "\\"" val virtualTerminal = newVirtualTerminal() virtualTerminal.newInputLine(s"catch $q$testExceptionName$q") virtualTerminal.newInputLine(s"catchc $q$testExceptionName$q") virtualTerminal.newInputLine(s"catchu $q$testExceptionName$q") virtualTerminal.newInputLine(s"catch $q$testFakeExceptionName$q") virtualTerminal.newInputLine(s"catchc $q$testFakeExceptionName$q") virtualTerminal.newInputLine(s"catchu $q$testFakeExceptionName$q") withToolRunningUsingTerminal( className = testClass, virtualTerminal = virtualTerminal ) { (vt, sm, start) => logTimeTaken({ // Verify our exception requests were made eventually { val svm = sm.state.scalaVirtualMachines.head val ers = svm.exceptionRequests.map(er => (er.className, er.notifyCaught, er.notifyUncaught, er.isPending)) ers should contain theSameElementsAs Seq( (testExceptionName, true, true, false), (testExceptionName, true, false, false), (testExceptionName, false, true, false), (testFakeExceptionName, true, true, true), (testFakeExceptionName, true, false, true), (testFakeExceptionName, false, true, true) ) } // Delete the exception requests vt.newInputLine("ignore \\"org.*\\"") // Verify our exception requests were deleted eventually { val svm = sm.state.scalaVirtualMachines.head val ers = svm.exceptionRequests.map(er => (er.className, er.notifyCaught, er.notifyUncaught, er.isPending)) ers should be (empty) } }) } } it("should delete all pending and active exception requests if no class name given") { val testClass = "org.scaladebugger.test.exceptions.InsideTryBlockException" val testFile = JDITools.scalaClassStringToFileString(testClass) val testExceptionName = "org.scaladebugger.test.exceptions.CustomException" val testFakeExceptionName = "org.invalid.exception" // Create exception requests before connecting to the JVM val q = "\\"" val virtualTerminal = newVirtualTerminal() virtualTerminal.newInputLine(s"catch $q$testExceptionName$q") virtualTerminal.newInputLine(s"catchc $q$testExceptionName$q") virtualTerminal.newInputLine(s"catchu $q$testExceptionName$q") virtualTerminal.newInputLine(s"catch $q$testFakeExceptionName$q") virtualTerminal.newInputLine(s"catchc $q$testFakeExceptionName$q") virtualTerminal.newInputLine(s"catchu $q$testFakeExceptionName$q") withToolRunningUsingTerminal( className = testClass, virtualTerminal = virtualTerminal ) { (vt, sm, start) => logTimeTaken({ // Verify our exception requests were made eventually { val svm = sm.state.scalaVirtualMachines.head val ers = svm.exceptionRequests.map(er => (er.className, er.notifyCaught, er.notifyUncaught, er.isPending)) ers should contain theSameElementsAs Seq( (testExceptionName, true, true, false), (testExceptionName, true, false, false), (testExceptionName, false, true, false), (testFakeExceptionName, true, true, true), (testFakeExceptionName, true, false, true), (testFakeExceptionName, false, true, true) ) } // Delete the exception requests vt.newInputLine(s"ignore") // Verify our exception requests were deleted eventually { val svm = sm.state.scalaVirtualMachines.head val ers = svm.exceptionRequests.map(er => (er.className, er.notifyCaught, er.notifyUncaught, er.isPending)) ers should be (empty) } }) } } } }
chipsenkbeil/scala-debugger
scala-debugger-tool/src/it/scala/org/scaladebugger/tool/commands/IgnoreBothCommandIntegrationSpec.scala
Scala
apache-2.0
9,891
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy.yarn import java.io.File import java.nio.ByteBuffer import java.util.Collections import scala.collection.JavaConverters._ import scala.collection.mutable.{HashMap, ListBuffer} import org.apache.hadoop.fs.Path import org.apache.hadoop.io.DataOutputBuffer import org.apache.hadoop.security.UserGroupInformation import org.apache.hadoop.yarn.api._ import org.apache.hadoop.yarn.api.ApplicationConstants.Environment import org.apache.hadoop.yarn.api.records._ import org.apache.hadoop.yarn.client.api.NMClient import org.apache.hadoop.yarn.conf.YarnConfiguration import org.apache.hadoop.yarn.ipc.YarnRPC import org.apache.hadoop.yarn.util.{ConverterUtils, Records} import org.apache.spark.{SecurityManager, SparkConf, SparkException} import org.apache.spark.internal.Logging import org.apache.spark.internal.config._ import org.apache.spark.network.util.JavaUtils import org.apache.spark.util.Utils private[yarn] class ExecutorRunnable( container: Option[Container], conf: YarnConfiguration, sparkConf: SparkConf, masterAddress: String, executorId: String, hostname: String, executorMemory: Int, executorCores: Int, appId: String, securityMgr: SecurityManager, localResources: Map[String, LocalResource]) extends Logging { var rpc: YarnRPC = YarnRPC.create(conf) var nmClient: NMClient = _ def run(): Unit = { logDebug("Starting Executor Container") nmClient = NMClient.createNMClient() nmClient.init(conf) nmClient.start() startContainer() } def launchContextDebugInfo(): String = { val commands = prepareCommand() val env = prepareEnvironment() s""" |=============================================================================== |YARN executor launch context: | env: |${Utils.redact(sparkConf, env.toSeq).map { case (k, v) => s" $k -> $v\\n" }.mkString} | command: | ${commands.mkString(" \\\\ \\n ")} | | resources: |${localResources.map { case (k, v) => s" $k -> $v\\n" }.mkString} |===============================================================================""".stripMargin } def startContainer(): java.util.Map[String, ByteBuffer] = { val ctx = Records.newRecord(classOf[ContainerLaunchContext]) .asInstanceOf[ContainerLaunchContext] val env = prepareEnvironment().asJava ctx.setLocalResources(localResources.asJava) ctx.setEnvironment(env) val credentials = UserGroupInformation.getCurrentUser().getCredentials() val dob = new DataOutputBuffer() credentials.writeTokenStorageToStream(dob) ctx.setTokens(ByteBuffer.wrap(dob.getData())) val commands = prepareCommand() ctx.setCommands(commands.asJava) ctx.setApplicationACLs( YarnSparkHadoopUtil.getApplicationAclsForYarn(securityMgr).asJava) // If external shuffle service is enabled, register with the Yarn shuffle service already // started on the NodeManager and, if authentication is enabled, provide it with our secret // key for fetching shuffle files later if (sparkConf.get(SHUFFLE_SERVICE_ENABLED)) { val secretString = securityMgr.getSecretKey() val secretBytes = if (secretString != null) { // This conversion must match how the YarnShuffleService decodes our secret JavaUtils.stringToBytes(secretString) } else { // Authentication is not enabled, so just provide dummy metadata ByteBuffer.allocate(0) } ctx.setServiceData(Collections.singletonMap("spark_shuffle", secretBytes)) } // Send the start request to the ContainerManager try { nmClient.startContainer(container.get, ctx) } catch { case ex: Exception => throw new SparkException(s"Exception while starting container ${container.get.getId}" + s" on host $hostname", ex) } } private def prepareCommand(): List[String] = { // Extra options for the JVM val javaOpts = ListBuffer[String]() // Set the environment variable through a command prefix // to append to the existing value of the variable var prefixEnv: Option[String] = None // Set the JVM memory val executorMemoryString = executorMemory + "m" javaOpts += "-Xmx" + executorMemoryString // Set extra Java options for the executor, if defined sparkConf.get(EXECUTOR_JAVA_OPTIONS).foreach { opts => javaOpts ++= Utils.splitCommandString(opts).map(YarnSparkHadoopUtil.escapeForShell) } sparkConf.get(EXECUTOR_LIBRARY_PATH).foreach { p => prefixEnv = Some(Client.getClusterPath(sparkConf, Utils.libraryPathEnvPrefix(Seq(p)))) } javaOpts += "-Djava.io.tmpdir=" + new Path(Environment.PWD.$$(), YarnConfiguration.DEFAULT_CONTAINER_TEMP_DIR) // Certain configs need to be passed here because they are needed before the Executor // registers with the Scheduler and transfers the spark configs. Since the Executor backend // uses RPC to connect to the scheduler, the RPC settings are needed as well as the // authentication settings. sparkConf.getAll .filter { case (k, v) => SparkConf.isExecutorStartupConf(k) } .foreach { case (k, v) => javaOpts += YarnSparkHadoopUtil.escapeForShell(s"-D$k=$v") } // Commenting it out for now - so that people can refer to the properties if required. Remove // it once cpuset version is pushed out. // The context is, default gc for server class machines end up using all cores to do gc - hence // if there are multiple containers in same node, spark gc effects all other containers // performance (which can also be other spark containers) // Instead of using this, rely on cpusets by YARN to enforce spark behaves 'properly' in // multi-tenant environments. Not sure how default java gc behaves if it is limited to subset // of cores on a node. /* else { // If no java_opts specified, default to using -XX:+CMSIncrementalMode // It might be possible that other modes/config is being done in // spark.executor.extraJavaOptions, so we don't want to mess with it. // In our expts, using (default) throughput collector has severe perf ramifications in // multi-tenant machines // The options are based on // http://www.oracle.com/technetwork/java/gc-tuning-5-138395.html#0.0.0.%20When%20to%20Use // %20the%20Concurrent%20Low%20Pause%20Collector|outline javaOpts += "-XX:+UseConcMarkSweepGC" javaOpts += "-XX:+CMSIncrementalMode" javaOpts += "-XX:+CMSIncrementalPacing" javaOpts += "-XX:CMSIncrementalDutyCycleMin=0" javaOpts += "-XX:CMSIncrementalDutyCycle=10" } */ // For log4j configuration to reference javaOpts += ("-Dspark.yarn.app.container.log.dir=" + ApplicationConstants.LOG_DIR_EXPANSION_VAR) val userClassPath = Client.getUserClasspath(sparkConf).flatMap { uri => val absPath = if (new File(uri.getPath()).isAbsolute()) { Client.getClusterPath(sparkConf, uri.getPath()) } else { Client.buildPath(Environment.PWD.$(), uri.getPath()) } Seq("--user-class-path", "file:" + absPath) }.toSeq YarnSparkHadoopUtil.addOutOfMemoryErrorArgument(javaOpts) val commands = prefixEnv ++ Seq(Environment.JAVA_HOME.$$() + "/bin/java", "-server") ++ javaOpts ++ Seq("org.apache.spark.executor.CoarseGrainedExecutorBackend", "--driver-url", masterAddress, "--executor-id", executorId, "--hostname", hostname, "--cores", executorCores.toString, "--app-id", appId) ++ userClassPath ++ Seq( s"1>${ApplicationConstants.LOG_DIR_EXPANSION_VAR}/stdout", s"2>${ApplicationConstants.LOG_DIR_EXPANSION_VAR}/stderr") // TODO: it would be nicer to just make sure there are no null commands here commands.map(s => if (s == null) "null" else s).toList } private def prepareEnvironment(): HashMap[String, String] = { val env = new HashMap[String, String]() Client.populateClasspath(null, conf, sparkConf, env, sparkConf.get(EXECUTOR_CLASS_PATH)) // lookup appropriate http scheme for container log urls val yarnHttpPolicy = conf.get( YarnConfiguration.YARN_HTTP_POLICY_KEY, YarnConfiguration.YARN_HTTP_POLICY_DEFAULT ) val httpScheme = if (yarnHttpPolicy == "HTTPS_ONLY") "https://" else "http://" System.getenv().asScala.filterKeys(_.startsWith("SPARK")) .foreach { case (k, v) => env(k) = v } sparkConf.getExecutorEnv.foreach { case (key, value) => if (key == Environment.CLASSPATH.name()) { // If the key of env variable is CLASSPATH, we assume it is a path and append it. // This is kept for backward compatibility and consistency with hadoop YarnSparkHadoopUtil.addPathToEnvironment(env, key, value) } else { // For other env variables, simply overwrite the value. env(key) = value } } // Add log urls container.foreach { c => sys.env.get("SPARK_USER").foreach { user => val containerId = ConverterUtils.toString(c.getId) val address = c.getNodeHttpAddress val baseUrl = s"$httpScheme$address/node/containerlogs/$containerId/$user" env("SPARK_LOG_URL_STDERR") = s"$baseUrl/stderr?start=-4096" env("SPARK_LOG_URL_STDOUT") = s"$baseUrl/stdout?start=-4096" } } env } }
brad-kaiser/spark
resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala
Scala
apache-2.0
10,326
/**************************************************************************** * Copyright (c) 2011, Monnet Project * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Monnet Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE MONNET PROJECT BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ********************************************************************************/ package eu.monnetproject.kap.laif.editor import eu.monnetproject.kap.laif._ import eu.monnetproject.kap.laif.rules._ import eu.monnetproject.lang.Language import eu.monnetproject.l10n.LocalizationLexicon import java.util.logging.Logger import java.net.URI import java.io._ import javax.servlet.http._ import org.apache.commons.fileupload._ import org.apache.commons.fileupload.disk._ import org.apache.commons.fileupload.servlet._ import org.osgi.framework._ import scala.collection.JavaConversions._ import scala.xml._ import scala.xml.Utility._ import scalasemweb.rdf.model._ /** * * @author John McCrae */ object HTMLConversions { private val log = Logger.getLogger(this.getClass().getName()) implicit def pimpElem(elem : Elem) = new { def %%(attrs : (String,String)*) = { var elem2 = elem for(attr <- attrs) { elem2 = elem % Attribute(None,attr._1,Text(attr._2),Null) } elem2 } } def makeValue(values : List[LAIFValue]) :Seq[Elem] = { val htmlValues = for(value <- values) yield { (value match { case resource : LAIFVariable => <span class="laifvariable">{resource.id()}</span> case value : LAIFLiteral => <span class="laifliteral">{value.value()}</span> case userCall : LAIFUserCall => <span> <span class="laifusercall_function">{ userCall.functionFrag() }</span> <span class="laifusercall_args">{ makeValue(userCall.args().toList) }</span> </span> %% ("class" -> (if(userCall.args.isEmpty) { "laifusercall_hidden"} else { "laifusercall"})) case formCall : LAIFFormCall => <span class="laifformcall"> <span class="laifformcall_form">{makeValue(formCall.args().toList)}</span> { /*for((key,value) <- formCall.props) yield { <span class="laiffrom_call_props"> <span class="laifformcall_propkey">{nn2Str(key)}</span> <span class="laifformcall_propval">{nn2Str(value)}</span> </span> } */ <span>todo</span> } </span> case form : LAIFDescriptionCall => <span class="laifdescriptioncall">DESC: {makeValue(form.args().toList)}</span> case epCall : LAIFEntryPropCall => <span class="laifentrypropcall"> TODO </span> case arg : LAIFLowerFirst => <span class="laiflowerfirst">Lower First: {makeValue(arg.args().toList)}</span> case arg : LAIFUpperFirst => <span class="laifupperfirst">Upper First: {makeValue(arg.args().toList)}</span> case arg : LAIFAllLower => <span class="laifalllower">All Lower: {makeValue(arg.args().toList)}</span> case arg : LAIFAllUpper => <span class="laifallupper">All Upper: {makeValue(arg.args().toList)}</span> case _ => <span/> }) } if(htmlValues.isEmpty || (htmlValues.last \\ "@class" == "laifliteral" && htmlValues.head \\ "@class" == "laifliteral")) { htmlValues } else if(htmlValues.head \\ "@class" == "laifliteral") { htmlValues :+ <span class="laifliteral">&#160;</span> } else if(htmlValues.last \\ "@class" == "laifliteral") { <span class="laifliteral">&#160;</span> +: htmlValues } else { <span class="laifliteral">&#160;</span> +: htmlValues :+ <span class="laifliteral">&#160;</span> } } private def clean(nodes : Elem) : Elem = { nodes.copy(child = for(node <- nodes.child if (node \\ "@class").text != "laifliteral" || node.text != "") yield { node match { case elem : Elem => clean(elem) case other => other } } ) } def unmake(nodes : Node) : Seq[LAIFValue] = { unmake2(clean(trim(nodes).asInstanceOf[Elem])) } private def unmake2(nodes : Node) : Seq[LAIFValue] = { if(nodes.label == "head") { trim(nodes).child flatMap { node => unmake(node) } } else { for(node <- trim(nodes)) yield { (node \\ "@class").text match { case "laifvariable" => new LAIFVariable(node.child.head.text) case "laifliteral" => new LAIFLiteral(node.text) case "laifusercall" => new LAIFUserCall(URI.create(node.child.head.text),node.child(1).child flatMap { unmake(_)} toList) case "laifusercall_hidden" => new LAIFUserCall(URI.create(node.child.head.text),node.child(1).child flatMap { unmake(_)} toList) case "laifformcall" => new LAIFFormCall(URI.create(node.child.head.child(0).text), (for(arg <- node.child.tail) yield { arg match { case <span><span>{propkey}</span><span>{propval}</span></span> => { unmakeRes(propkey).uri -> unmakeRes(propval).uri } } }).toMap) case "laifdescriptioncall" => new LAIFDescriptionCall(unmake(node.child(1)).head) case "laifentrypropcall" => new LAIFEntryPropCall(URI.create(node.child(0).head.text), unmake(node.child(1).head).head.asInstanceOf[LAIFVariable], (for(matcher <- node.child(2).child) yield { matcher match { case <span><span>{value}</span><span>{generates}</span></span> => { value.uri -> new LAIFCase(unmake(generates).toList) } } }).toMap[URI,LAIFCase],null) case "laiflowerfirst" => new LAIFLowerFirst(unmake(node.child(0)).head) case "laifupperfirst" => new LAIFUpperFirst(unmake(node.child(0)).head) case "laifalllower" => new LAIFAllLower(unmake(node.child(0)).head) case "laifallupper" => new LAIFAllUpper(unmake(node.child(0)).head) } } } } private val httpRegex = """<?(http://[^>]*)>?""".r private val qnameRegex = """(.*):(.*)""".r private var nsMap = Map[String,NameSpace]("lemon" -> NameSpace("lemon","http://www.monnet-project.eu/lemon#"), "laif" -> NameSpace("laif","http://www.monnet-project.eu/laif#"), "lexinfo" -> NameSpace("lexinfo","http://www.lexinfo.net/ontology/2.0/lexinfo#")) private def nameSpace(ns : String) : NameSpace = nsMap.get(ns).getOrElse({ log.warning("failed to resolve ns " + ns) RDF.base }) def updateNameSpace(nn : Resource) = { nn match { case QName(ns,_) => { nsMap += (ns.id -> ns) } case _ => } } private implicit def unmakeRes(res : Node) : NamedNode = unmakeName(res.text) def unmakeName(string : String) = string match { case httpRegex(uriString) => uriString.uri case qnameRegex(prefix,suffix) => nameSpace(prefix) & suffix } implicit def nn2Str(nn : Resource) = nn match { case QName(ns,_) => nsMap += (ns.id -> ns) ; nn.toString() case _ => nn.toString() } }
monnetproject/kap
laif.editor/src/main/scala/eu/monnetproject/kap/laif/editor/HTMLConversions.scala
Scala
bsd-3-clause
8,912
// Copyright 2014,2015,2016,2017,2018,2019,2020 Commonwealth Bank of Australia // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package commbank.grimlock.scalding.examples import commbank.grimlock.framework.Cell import commbank.grimlock.framework.content.Content import commbank.grimlock.framework.encoding.{ DateCodec, StringCodec, Value } import commbank.grimlock.framework.environment.implicits._ import commbank.grimlock.framework.metadata.ContinuousSchema import commbank.grimlock.framework.position.{ Along, Position } import commbank.grimlock.framework.window.Window import commbank.grimlock.scalding.Persist import commbank.grimlock.scalding.environment.Context import commbank.grimlock.scalding.environment.implicits._ import com.twitter.scalding.{ Args, Job } import java.util.Date import shapeless.{ HList, HNil } import shapeless.nat.{ _0, _1, _2 } // Simple gradient feature generator case class Gradient[ P <: HList, S <: HList, R <: HList, Q <: HList ](implicit ev1: Value.Box[String], ev2: Position.IndexConstraints.Aux[P, _2, Value[Date]], ev3: Position.IndexConstraints.Aux[R, _0, Value[Date]], ev4: Position.AppendConstraints.Aux[S, Value[String], Q] ) extends Window[P, S, R, Q] { type I = (Long, Option[Double]) type T = (Long, Option[Double], Value[Date]) type O = (Option[Double], Value[Date], Value[Date]) val DayInMillis = 1000 * 60 * 60 * 24 // Prepare the sliding window, the state is the time and the value. def prepare(cell: Cell[P]): I = (cell.position(_2).value.getTime, cell.content.value.as[Double]) // Initialise state to the time, value and remainder coordinates. def initialise(rem: Position[R], in: I): (T, TraversableOnce[O]) = ((in._1, in._2, rem(_0)), List()) // For each new cell, output the difference with the previous cell (contained in `t`). def update(rem: Position[R], in: I, t: T): (T, TraversableOnce[O]) = { // Get current date from `in` and previous date from `t` and compute number of days between the dates. val days = (in._1 - t._1) / DayInMillis // Get the difference between current and previous values. val delta = in._2.flatMap(dc => t._2.map(dt => dc - dt)) // Generate the gradient (delta / days). val grad = delta.map(vd => vd / days) // Update state to be current `in` and `rem`, and output the gradient. ((in._1, in._2, rem(_0)), List((grad, rem(_0), t._3))) } // If a gradient is available, output a cell for it. def present(pos: Position[S], out: O): TraversableOnce[Cell[Q]] = out._1.map(grad => Cell(pos.append(out._3.toShortString + ".to." + out._2.toShortString), Content(ContinuousSchema[Double](), grad)) ) } class DerivedData(args: Args) extends Job(args) { // Define implicit context. implicit val ctx = Context() import ctx.encoder // Path to data files, output folder val path = args.getOrElse("path", "../../data") val output = "scalding" // Generate gradient features: // 1/ Read the data as 3D matrix (instance x feature x date). // 2/ Proceed with only the data (ignoring errors). // 3/ Compute gradients along the date axis. The result is a 3D matrix (instance x feature x gradient). // 4/ Melt third dimension (gradients) into second dimension. The result is a 2D matrix (instance x // feature.from.gradient) // 5/ Persist 2D gradient features. ctx .read( s"${path}/exampleDerived.txt", Persist.textLoader, Cell.shortStringParser(StringCodec :: StringCodec :: DateCodec() :: HNil, "|") ) .data .slide(Along(_2))(true, Gradient()) .contract(_2, _1, Value.concatenate[Value[String], Value[String]](".from.")) .saveAsText(ctx, s"./demo.${output}/gradient.out", Cell.toShortString(true, "|")) .toUnit }
CommBank/grimlock
grimlock-examples/src/main/scala/commbank/grimlock/scalding/DerivedData.scala
Scala
apache-2.0
4,262
package com.querydsl.scala import java.io.File import java.io.File.pathSeparator import com.google.common.base.Charsets import com.google.common.io.Files import scala.tools.nsc._ import scala.tools.nsc.reporters.ConsoleReporter object CompileTestUtils { private def jarPathOfClass(className: String) = { Class.forName(className).getProtectionDomain.getCodeSource.getLocation } private val currentLibraries = this.getClass.getClassLoader.asInstanceOf[java.net.URLClassLoader].getURLs.toList private val cp = jarPathOfClass("scala.tools.nsc.Interpreter") :: jarPathOfClass("scala.ScalaObject") :: currentLibraries private val env = new Settings() env.classpath.value = cp.mkString(pathSeparator) env.usejavacp.value = true //env.d.value = "target" env.stopAfter.value = List("refchecks") def assertCompileSuccess(file: File): Unit = { assertCompileSuccess(recursiveFileList(file)) } def assertCompileSuccess(files: Traversable[File]): Unit = { val reporter = new ConsoleReporter(env) val g = new Global(env, reporter) val run = new g.Run run.compile(files.map(_.getPath).toList) if (reporter.hasErrors) { throw new AssertionError("Compilation failed") } } def assertCompileSuccess(source: String): Unit = { val file = File.createTempFile("source", ".scala") try { Files.write(source, file, Charsets.UTF_8) assertCompileSuccess(file) } finally { file.delete() } } private def recursiveFileList(file: File): Array[File] = { if (file.isDirectory) { file.listFiles.flatMap(recursiveFileList) } else { Array(file) } } }
balazs-zsoldos/querydsl
querydsl-scala/src/test/scala/com/querydsl/scala/CompileTestUtils.scala
Scala
apache-2.0
1,656
// See LICENSE for license details. package fringe import chisel3.Module import chisel3.iotesters.{ChiselFlatSpec, Driver, PeekPokeTester} abstract class ArgsTester[+T <: Module](c: T)(implicit args: Array[String]) extends PeekPokeTester(c) { def printFail(msg: String) = println(Console.BLACK + Console.RED_B + s"FAIL: $msg" + Console.RESET) def printPass(msg: String) = println(Console.BLACK + Console.GREEN_B + s"PASS: $msg" + Console.RESET) } trait CommonMain { /** * 'args' variable that holds commandline arguments * TODO: Is using a var the best way to handle this? */ implicit var args: Array[String] = _ case class SplitArgs(chiselArgs: Array[String], testArgs: Array[String]) type DUTType <: Module def dut: () => DUTType def tester: DUTType => ArgsTester[DUTType] def supportedTarget(t: String) = t match { case "aws" => true case "aws-sim" => true case "zynq" => true case "zcu" => true case "verilator" => true case "vcs" => true case "xsim" => true case "de1soc" => true case "arria10" => true case "asic" => true case _ => false } def target = if (args.size > 0) args(0) else "verilator" def separateChiselArgs(args: Array[String]) = { val argSeparator = "--testArgs" val (chiselArgs, otherArgs) = if (args.contains("--testArgs")) { args.splitAt(args.indexOf("--testArgs")) } else { (args, Array[String]()) } val actualChiselArgs = if (chiselArgs.size == 0) Array("--help") else chiselArgs val testArgs = otherArgs.drop(1) SplitArgs(actualChiselArgs, testArgs) } def main(args: Array[String]) { val splitArgs = separateChiselArgs(args) this.args = splitArgs.testArgs Predef.assert(supportedTarget(target), s"ERROR: Unsupported Fringe target '$target'") if (splitArgs.chiselArgs.contains("--test-command")) { val cmd = splitArgs.chiselArgs(splitArgs.chiselArgs.indexOf("--test-command")+1) Driver.run(dut, cmd)(tester) } else if (splitArgs.chiselArgs.contains("--verilog")) { chisel3.Driver.execute(Array[String]("--target-dir", s"verilog-${target}"), dut) } else { Driver.execute(splitArgs.chiselArgs, dut)(tester) } } }
stanford-ppl/spatial-lang
spatial/core/resources/chiselgen/template-level/fringeHW/CommonMain.scala
Scala
mit
2,273
package net.pointsgame.actor import akka.actor.{ActorRef, ActorSystem, Actor, Props} import net.liftweb.common.Loggable import scala.collection.mutable object AkkaManager extends Loggable { private val system = ActorSystem("root") private val roomList = mutable.HashMap[String, ActorRef]() // private val roomSupervisor = system.actorOf(Props[Rooms], "rooms") def getRoom(name: String) = synchronized { roomList.getOrElseUpdate(name, system.actorOf( Props(new Room(name) with Chatting with Gaming), name)) //roomSupervisor.path./(name).toString)) } def shutdown(): Unit = { system.terminate() } } class Rooms extends Actor with Loggable { def receive = { case m@_ => logger.error(s"room actor received unknown message $m") } }
vn971/points-wip
modules/lift-server/src/main/scala/net/pointsgame/actor/AkkaManager.scala
Scala
agpl-3.0
763
package sample.context import java.util.Locale import java.util.ResourceBundle import scala.beans.BeanProperty import scala.collection.JavaConverters._ import org.springframework.boot.context.properties.ConfigurationProperties import org.springframework.context.support.ResourceBundleMessageSource /** * ResourceBundleに対する簡易アクセスを提供します。 * <p>本コンポーネントはAPI経由でのラベル一覧の提供等、i18n用途のメッセージプロパティで利用してください。 * <p>ResourceBundleは単純な文字列変換を目的とする標準のMessageSourceとは異なる特性(リスト概念)を * 持つため、別インスタンスでの管理としています。 * (spring.messageとは別に指定[extension.messages]する必要があるので注意してください) */ @ConfigurationProperties(prefix = "extension.messages") class ResourceBundleHandler { @BeanProperty var encoding: String = "UTF-8" val factory: ResourceBundleFactory = new ResourceBundleFactory() val bundleMap: scala.collection.mutable.Map[String, ResourceBundle] = scala.collection.mutable.Map() def get(basename: String): ResourceBundle = get(basename, Locale.getDefault) def get(basename: String, locale: Locale): ResourceBundle = this.synchronized( bundleMap.getOrElseUpdate(keyname(basename, locale), factory.create(basename, locale, encoding)) ) private def keyname(basename: String, locale: Locale): String = s"${basename}_${locale.toLanguageTag()}" /** * 指定されたメッセージソースのラベルキー、値のMapを返します。 * <p>basenameに拡張子(.properties)を含める必要はありません。 */ def labels(basename: String): Map[String, String] = labels(basename, Locale.getDefault) def labels(basename: String, locale: Locale): Map[String, String] = { val bundle = get(basename) bundle.keySet().asScala.map(key => (key, bundle.getString(key))).toMap } } /** * SpringのMessageSource経由でResourceBundleを取得するFactory。 * <p>プロパティファイルのエンコーディング指定を可能にしています。 */ class ResourceBundleFactory extends ResourceBundleMessageSource { def create(basename: String, locale: Locale, encoding: String): ResourceBundle = { this.setDefaultEncoding(encoding) Option(getResourceBundle(basename, locale)).getOrElse( throw new IllegalArgumentException("指定されたbasenameのリソースファイルは見つかりませんでした。[]")) } }
jkazama/sample-boot-scala
src/main/scala/sample/context/ResourceBundleHandler.scala
Scala
mit
2,590
package com.thesamet.intellij import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.editor.Document import com.intellij.openapi.vfs.VirtualFile import scalariform.formatter.ScalaFormatter import scalariform.formatter.preferences.AlignSingleLineCaseStatements.MaxArrowIndent import scalariform.formatter.preferences._ object ScalariformFormatter { case class FileDocument(file: VirtualFile, document: Document) { def isScala: Boolean = file.getFileType.getName == "Scala" } def format(optFileDoc: Option[FileDocument]) = { lazy val pref = formattingPreferences optFileDoc.filter(_.isScala) .foreach { fileDoc => val source = fileDoc.document.getText() val formatted = ScalaFormatter.format(source, formattingPreferences = pref) if (source != formatted) { ApplicationManager.getApplication.runWriteAction(new Runnable { override def run(): Unit = { fileDoc.document.setText(formatted) } }) } } } def formattingPreferences: FormattingPreferences = { val component: ScalariformApplicationComponent = ApplicationManager.getApplication.getComponent(classOf[ScalariformApplicationComponent]) FormattingPreferences.setPreference(RewriteArrowSymbols, component.isRewriteArrowSymbols) .setPreference(IndentSpaces, component.getIndentSpaces.toInt) .setPreference(SpaceBeforeColon, component.isSpaceBeforeColon) .setPreference(CompactStringConcatenation, component.isCompactStringConcatenation) .setPreference(PreserveSpaceBeforeArguments, component.isPreserveSpaceBeforeArguments) .setPreference(AlignParameters, component.isAlignParameters) .setPreference(AlignArguments, component.isAlignArguments) .setPreference(DoubleIndentClassDeclaration, component.isDoubleIndentClassDeclaration) .setPreference(FormatXml, component.isFormatXML) .setPreference(IndentPackageBlocks, component.isIndentPackageBlocks) .setPreference(AlignSingleLineCaseStatements, component.isAlignSingleLineCase) .setPreference(MaxArrowIndent, component.getAlignSingleLineCaseStatementsMaxArrowIndent.toInt) .setPreference(IndentLocalDefs, component.isIndentLocalDefs) .setPreference(SpaceInsideParentheses, component.isSpaceInsideParenthesis) .setPreference(SpaceInsideBrackets, component.isSpaceInsideBrackets) .setPreference(SpacesWithinPatternBinders, component.isSpacesWithinPatternBinders) .setPreference(MultilineScaladocCommentsStartOnFirstLine, component.isMultilineScalaDocCommentsStartOnFirstLine) .setPreference(IndentWithTabs, component.isIndentWithTabs) .setPreference(CompactControlReadability, component.isCompactControlReadability) .setPreference(PlaceScaladocAsterisksBeneathSecondAsterisk, component.isPlaceScalaDocAsteriskBeneathSecondAsterisk) .setPreference(SpacesAroundMultiImports, component.isSpacesAroundMultiImports) } }
Mistyputt/scalariform-intellij-plugin
src/com/thesamet/intellij/ScalariformFormatter.scala
Scala
apache-2.0
3,037
package scredis.io import com.typesafe.scalalogging.LazyLogging import akka.actor._ import scredis.protocol.Request import scredis.protocol.requests.ConnectionRequests.{ Auth, Select, Quit } import scredis.protocol.requests.ServerRequests.{ ClientSetName, Shutdown } import scala.concurrent.{ ExecutionContext, Future } import scala.concurrent.duration._ import java.util.concurrent.{ CountDownLatch, TimeUnit } abstract class AbstractAkkaConnection( protected val system: ActorSystem, val host: String, val port: Int, @volatile protected var passwordOpt: Option[String], @volatile protected var database: Int, @volatile protected var nameOpt: Option[String], protected val decodersCount: Int, protected val receiveTimeoutOpt: Option[FiniteDuration], protected val connectTimeout: FiniteDuration, protected val maxWriteBatchSize: Int, protected val tcpSendBufferSizeHint: Int, protected val tcpReceiveBufferSizeHint: Int, protected val akkaListenerDispatcherPath: String, protected val akkaIODispatcherPath: String, protected val akkaDecoderDispatcherPath: String ) extends Connection with LazyLogging { private val shutdownLatch = new CountDownLatch(1) @volatile protected var isShuttingDown = false override implicit val dispatcher = system.dispatcher protected val listenerActor: ActorRef protected def updateState(request: Request[_]): Unit = request match { case Auth(password) => if (password.isEmpty) { passwordOpt = None } else { passwordOpt = Some(password) } case Select(database) => this.database = database case ClientSetName(name) => if (name.isEmpty) { nameOpt = None } else { nameOpt = Some(name) } case Quit() | Shutdown(_) => isShuttingDown = true case _ => } protected def getPasswordOpt: Option[String] = passwordOpt protected def getDatabase: Int = database protected def getNameOpt: Option[String] = nameOpt protected def watchTermination(): Unit = system.actorOf( Props( classOf[WatchActor], listenerActor, shutdownLatch ) ) /** * Waits for all the internal actors to be shutdown. * * @note This method is usually called after issuing a QUIT command * * @param timeout amount of time to wait */ def awaitTermination(timeout: Duration = Duration.Inf): Unit = { if (timeout.isFinite) { shutdownLatch.await(timeout.toMillis, TimeUnit.MILLISECONDS) } else { shutdownLatch.await() } } } class WatchActor(actor: ActorRef, shutdownLatch: CountDownLatch) extends Actor { def receive: Receive = { case Terminated(_) => { shutdownLatch.countDown() context.stop(self) } } context.watch(actor) }
Livestream/scredis
src/main/scala/scredis/io/AbstractAkkaConnection.scala
Scala
apache-2.0
2,764
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.expressions import java.sql.{Date, Time, Timestamp} import java.util.{Calendar, TimeZone} import org.apache.calcite.avatica.util.TimeUnit import org.apache.calcite.rex.RexNode import org.apache.calcite.sql.SqlIntervalQualifier import org.apache.calcite.sql.`type`.SqlTypeName import org.apache.calcite.sql.parser.SqlParserPos import org.apache.calcite.tools.RelBuilder import org.apache.calcite.util.{DateString, TimeString, TimestampString} import org.apache.flink.api.common.typeinfo.{BasicTypeInfo, LocalTimeTypeInfo, SqlTimeTypeInfo, TypeInformation} import org.apache.flink.table.calcite.FlinkTypeFactory import org.apache.flink.table.typeutils.TimeIntervalTypeInfo object Literal { private[flink] val UTC = TimeZone.getTimeZone("UTC") private[flink] def apply(l: Any): Literal = l match { case i: Int => Literal(i, BasicTypeInfo.INT_TYPE_INFO) case s: Short => Literal(s, BasicTypeInfo.SHORT_TYPE_INFO) case b: Byte => Literal(b, BasicTypeInfo.BYTE_TYPE_INFO) case l: Long => Literal(l, BasicTypeInfo.LONG_TYPE_INFO) case d: Double => Literal(d, BasicTypeInfo.DOUBLE_TYPE_INFO) case f: Float => Literal(f, BasicTypeInfo.FLOAT_TYPE_INFO) case str: String => Literal(str, BasicTypeInfo.STRING_TYPE_INFO) case bool: Boolean => Literal(bool, BasicTypeInfo.BOOLEAN_TYPE_INFO) case javaDec: java.math.BigDecimal => Literal(javaDec, BasicTypeInfo.BIG_DEC_TYPE_INFO) case scalaDec: scala.math.BigDecimal => Literal(scalaDec.bigDecimal, BasicTypeInfo.BIG_DEC_TYPE_INFO) case sqlDate: Date => Literal(sqlDate, SqlTimeTypeInfo.DATE) case sqlTime: Time => Literal(sqlTime, SqlTimeTypeInfo.TIME) case sqlTimestamp: Timestamp => Literal(sqlTimestamp, SqlTimeTypeInfo.TIMESTAMP) } } case class Literal(value: Any, resultType: TypeInformation[_]) extends LeafExpression { override def toString: String = resultType match { case _: BasicTypeInfo[_] => value.toString case _@SqlTimeTypeInfo.DATE => value.toString + ".toDate" case _@SqlTimeTypeInfo.TIME => value.toString + ".toTime" case _@SqlTimeTypeInfo.TIMESTAMP => value.toString + ".toTimestamp" case _@TimeIntervalTypeInfo.INTERVAL_MILLIS => value.toString + ".millis" case _@TimeIntervalTypeInfo.INTERVAL_MONTHS => value.toString + ".months" case _ => s"Literal($value, $resultType)" } override private[flink] def toRexNode(implicit relBuilder: RelBuilder): RexNode = { resultType match { case BasicTypeInfo.BIG_DEC_TYPE_INFO => val bigDecValue = value.asInstanceOf[java.math.BigDecimal] val decType = relBuilder.getTypeFactory.createSqlType(SqlTypeName.DECIMAL) relBuilder.getRexBuilder.makeExactLiteral(bigDecValue, decType) // create BIGINT literals for long type case BasicTypeInfo.LONG_TYPE_INFO => val bigint = java.math.BigDecimal.valueOf(value.asInstanceOf[Long]) relBuilder.getRexBuilder.makeBigintLiteral(bigint) // date/time case SqlTimeTypeInfo.DATE => val datestr = DateString.fromCalendarFields(valueAsCalendar) relBuilder.getRexBuilder.makeDateLiteral(datestr) case SqlTimeTypeInfo.TIME => val timestr = TimeString.fromCalendarFields(valueAsCalendar) relBuilder.getRexBuilder.makeTimeLiteral(timestr, 0) case SqlTimeTypeInfo.TIMESTAMP => val timestampstr = TimestampString.fromCalendarFields(valueAsCalendar) relBuilder.getRexBuilder.makeTimestampLiteral(timestampstr, 3) case TimeIntervalTypeInfo.INTERVAL_MONTHS => val interval = java.math.BigDecimal.valueOf(value.asInstanceOf[Int]) val intervalQualifier = new SqlIntervalQualifier( TimeUnit.YEAR, TimeUnit.MONTH, SqlParserPos.ZERO) relBuilder.getRexBuilder.makeIntervalLiteral(interval, intervalQualifier) case TimeIntervalTypeInfo.INTERVAL_MILLIS => val interval = java.math.BigDecimal.valueOf(value.asInstanceOf[Long]) val intervalQualifier = new SqlIntervalQualifier( TimeUnit.DAY, TimeUnit.SECOND, SqlParserPos.ZERO) relBuilder.getRexBuilder.makeIntervalLiteral(interval, intervalQualifier) case _ => relBuilder.literal(value) } } /** * Convert a Date value to a Calendar. Calcite's fromCalendarField functions use the * Calendar.get methods, so the raw values of the individual fields are preserved when * converted to the String formats. * * @return get the Calendar value */ private def valueAsCalendar: Calendar = { val date = value.asInstanceOf[java.util.Date] val cal = Calendar.getInstance cal.setTime(date) cal } } @deprecated( "Use nullOf(TypeInformation) instead. It is available through the implicit Scala DSL.", "1.8.0") case class Null(resultType: TypeInformation[_]) extends LeafExpression { override def toString = s"null" override private[flink] def toRexNode(implicit relBuilder: RelBuilder): RexNode = { val rexBuilder = relBuilder.getRexBuilder val typeFactory = relBuilder.getTypeFactory.asInstanceOf[FlinkTypeFactory] rexBuilder .makeCast( typeFactory.createTypeFromTypeInfo(resultType, isNullable = true), rexBuilder.constantNull()) } }
hequn8128/flink
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/expressions/literals.scala
Scala
apache-2.0
6,082
case class A(private[this] val foo:String) { }
folone/dotty
tests/untried/neg/t1422.scala
Scala
bsd-3-clause
47
/*********************************************************************** * Copyright (c) 2013-2020 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.index.index.attribute.legacy import org.locationtech.geomesa.index.api.ShardStrategy.AttributeShardStrategy import org.locationtech.geomesa.index.api._ import org.locationtech.geomesa.index.geotools.GeoMesaDataStore import org.locationtech.geomesa.index.index.LegacyTableNaming import org.locationtech.geomesa.index.index.attribute.legacy.AttributeIndexV6.AttributeIndexKeySpaceV6 import org.locationtech.geomesa.index.index.attribute.legacy.AttributeIndexV7.AttributeIndexKeySpaceV7 import org.locationtech.geomesa.index.index.attribute.{AttributeIndexKey, AttributeIndexKeySpace, AttributeIndexValues} import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType import org.locationtech.geomesa.utils.index.IndexMode.IndexMode import org.opengis.feature.simple.SimpleFeatureType /** * Attribute index with secondary z-curve indexing. Z-indexing is based on the sft and will be * one of Z3, XZ3, Z2, XZ2. Shards come after the attribute number, instead of before it. */ class AttributeIndexV6 protected (ds: GeoMesaDataStore[_], sft: SimpleFeatureType, version: Int, attribute: String, secondaries: Seq[String], mode: IndexMode) extends AttributeIndexV7(ds, sft, version, attribute, secondaries, mode) with LegacyTableNaming[AttributeIndexValues[Any], AttributeIndexKey] { def this(ds: GeoMesaDataStore[_], sft: SimpleFeatureType, attribute: String, secondaries: Seq[String], mode: IndexMode) = this(ds, sft, 6, attribute, secondaries, mode) override val keySpace: AttributeIndexKeySpace = { val sharding = AttributeShardStrategy(sft) if (sharding.shards.nonEmpty) { // if sharding, we need to swap the shard bytes with the idx bytes new AttributeIndexKeySpaceV6(sft, sft.getTableSharingBytes, sharding, attribute) } else { // otherwise we can skip the swap and use the parent class new AttributeIndexKeySpaceV7(sft, sft.getTableSharingBytes, sharding, attribute) } } override protected val fallbackTableNameKey: String = "tables.idx.attr.name" } object AttributeIndexV6 { /** * Map from (sharing, shard, idx0, idx1) to (sharing, idx0, idx1, shard) * * Rows in the attribute table have the following layout: * * - 1 byte identifying the sft (OPTIONAL - only if table is shared) * - 2 bytes storing the index of the attribute in the sft * - 1 byte shard (OPTIONAL) * - n bytes storing the lexicoded attribute value * - NULLBYTE as a separator * - n bytes storing the secondary z-index of the feature - identified by getSecondaryIndexKeyLength * - n bytes storing the feature ID * */ class AttributeIndexKeySpaceV6(sft: SimpleFeatureType, sharing: Array[Byte], sharding: ShardStrategy, attributeField: String) extends AttributeIndexKeySpaceV7(sft, sharing, sharding, attributeField) { private val offset = sharing.length override def toIndexKey(writable: WritableFeature, tier: Array[Byte], id: Array[Byte], lenient: Boolean): RowKeyValue[AttributeIndexKey] = { val key = super.toIndexKey(writable, tier, id, lenient) key match { case kv: SingleRowKeyValue[AttributeIndexKey] => swap(kv.row) case kv: MultiRowKeyValue[AttributeIndexKey] => kv.rows.foreach(swap) } key } override def getRangeBytes(ranges: Iterator[ScanRange[AttributeIndexKey]], tier: Boolean): Iterator[ByteRange] = { super.getRangeBytes(ranges, tier).map { case BoundedByteRange(lower, upper) => BoundedByteRange(swap(lower), swap(upper)) case SingleRowByteRange(row) => SingleRowByteRange(swap(row)) case LowerBoundedByteRange(lower, upper) => LowerBoundedByteRange(swap(lower), swap(upper)) case UpperBoundedByteRange(lower, upper) => UpperBoundedByteRange(swap(lower), swap(upper)) case UnboundedByteRange(lower, upper) => UnboundedByteRange(swap(lower), swap(upper)) case r => throw new NotImplementedError(s"Unexpected byte range: $r") } } private def swap(bytes: Array[Byte]): Array[Byte] = { if (bytes.length > 0) { val shard = bytes(offset) bytes(offset) = bytes(offset + 1) bytes(offset + 1) = bytes(offset + 2) bytes(offset + 2) = shard } bytes } } }
aheyne/geomesa
geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/index/attribute/legacy/AttributeIndexV6.scala
Scala
apache-2.0
5,152
/* * Copyright (C) 2012 The Regents of The University California. * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package shark import scala.language.implicitConversions import shark.execution.serialization.KryoSerializationWrapper import shark.execution.serialization.OperatorSerializationWrapper package object execution { type HiveDesc = java.io.Serializable // XXXDesc in Hive is the subclass of Serializable implicit def opSerWrapper2op[T <: Operator[_ <: HiveDesc]]( wrapper: OperatorSerializationWrapper[T]): T = wrapper.value implicit def kryoWrapper2object[T](wrapper: KryoSerializationWrapper[T]): T = wrapper.value }
sameeragarwal/blinkdb
src/main/scala/shark/execution/package.scala
Scala
apache-2.0
1,189
package rotationsymmetry.sxgboost import org.apache.spark.ml.param._ trait SparkXGBoostParams extends Params { final val alpha: DoubleParam = new DoubleParam(this, "alpha", "L1 regularization term on weights") setDefault(alpha -> 0.0) final val lambda: DoubleParam = new DoubleParam(this, "lambda", "L2 regularization term on weights") setDefault(lambda -> 0.0) final val eta: DoubleParam = new DoubleParam(this, "eta", "learning rate (aka step size) for gradient boosting") setDefault(eta -> 1.0) final val gamma: DoubleParam = new DoubleParam(this, "gamma", "minimum loss reduction required to make a further partition on a leaf node of the tree") setDefault(gamma -> 0.0) final val numTrees: IntParam = new IntParam(this, "numTrees", "number of trees to be grown in the boosting algorithm") setDefault(numTrees -> 1) final val maxDepth: IntParam = new IntParam(this, "maxDepth", "maximum depth of a tree. A tree with one root and two leaves is considered to have depth = 1") setDefault(maxDepth -> 5) final val minInstanceWeight: DoubleParam = new DoubleParam(this, "minInstanceWeight", "minimum weight (aka, number of data instance) required to make a further partition on a leaf node of the tree") setDefault(minInstanceWeight -> 1.0) final val sampleRatio: DoubleParam = new DoubleParam(this, "sampleRatio", "sample ratio of rows in bagging") setDefault(sampleRatio -> 1.0) final val featureSampleRatio: DoubleParam = new DoubleParam(this, "featureSampleRatio", "sample ratio of columns when constructing each tree") setDefault(featureSampleRatio -> 1.0) final val maxConcurrentNodes: IntParam = new IntParam(this, "maxConcurrentNodes", "maximal number of nodes to be process in one pass of the training data") setDefault(maxConcurrentNodes -> 50) final val maxBins: IntParam = new IntParam(this, "maxBins", "maximal number of bins for continuous variables") setDefault(maxBins -> 32) final val seed: LongParam = new LongParam(this, "seed", "random seed") setDefault(seed, 1L) }
rotationsymmetry/sparkxgboost
src/main/scala/rotationsymmetry/sxgboost/SparkXGBoostParams.scala
Scala
apache-2.0
2,095
package com.nulabinc.backlog.r2b.redmine.service import javax.inject.Inject import com.nulabinc.backlog.migration.common.utils.Logging import com.taskadapter.redmineapi.RedmineManager import com.taskadapter.redmineapi.bean.IssuePriority import scala.jdk.CollectionConverters._ /** * @author uchida */ class PriorityServiceImpl @Inject() (redmine: RedmineManager) extends PriorityService with Logging { override def allPriorities(): Seq[IssuePriority] = { try { redmine.getIssueManager.getIssuePriorities.asScala.toSeq } catch { case e: Throwable => logger.warn(e.getMessage, e) Seq.empty[IssuePriority] } } }
nulab/BacklogMigration-Redmine
redmine/src/main/scala/com/nulabinc/backlog/r2b/redmine/service/PriorityServiceImpl.scala
Scala
mit
671
package hercules.test.utils import akka.actor.ActorRef import akka.actor.Props import akka.actor.Actor import akka.actor.ActorLogging import akka.contrib.pattern.ClusterReceptionistExtension object FakeMaster { case class MasterWrapped(msg: Any) def props(testProbe: ActorRef): Props = Props(new FakeMaster(testProbe)) } class FakeMaster(testProbe: ActorRef) extends Actor with ActorLogging { // The master will register it self to the cluster receptionist. ClusterReceptionistExtension(context.system).registerService(self) def receive() = { case msg => { testProbe.tell(FakeMaster.MasterWrapped(msg), sender) } } }
johandahlberg/hercules
src/test/scala/hercules/test/utils/FakeMaster.scala
Scala
mit
646
/** * Copyright 2016, deepsense.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.deepsense.deeplang.doperations import java.io.{File, IOException} import scala.reflect.runtime.{universe => ru} import io.deepsense.commons.utils.Version import io.deepsense.commons.utils.FileOperations.deleteRecursivelyIfExists import io.deepsense.deeplang.DOperation.Id import io.deepsense.deeplang.documentation.OperationDocumentation import io.deepsense.deeplang.doperables.Transformer import io.deepsense.deeplang.doperations.exceptions.DeepSenseIOException import io.deepsense.deeplang.params.{BooleanParam, Params, StringParam} import io.deepsense.deeplang.{DOperation1To0, ExecutionContext} import java.net.URI import org.apache.hadoop.fs.{FileSystem, Path} case class WriteTransformer() extends DOperation1To0[Transformer] with Params with OperationDocumentation { override val id: Id = "58368deb-68d0-4657-ae3f-145160cb1e2b" override val name: String = "Write Transformer" override val description: String = "Writes a Transformer to a directory" override val since: Version = Version(1, 1, 0) val shouldOverwrite = BooleanParam( name = "overwrite", description = Some("Should an existing transformer with the same name be overwritten?") ) setDefault(shouldOverwrite, true) def getShouldOverwrite: Boolean = $(shouldOverwrite) def setShouldOverwrite(value: Boolean): this.type = set(shouldOverwrite, value) val outputPath = StringParam( name = "output path", description = Some("The output path for writing the Transformer.")) def getOutputPath: String = $(outputPath) def setOutputPath(value: String): this.type = set(outputPath, value) val params: Array[io.deepsense.deeplang.params.Param[_]] = Array(outputPath, shouldOverwrite) override protected def execute(transformer: Transformer)(context: ExecutionContext): Unit = { val outputDictPath = getOutputPath try { if (getShouldOverwrite) { removeDirectory(context, outputDictPath) } transformer.save(context, outputDictPath) } catch { case e: IOException => logger.error(s"WriteTransformer error. Could not write transformer to the directory", e) throw DeepSenseIOException(e) } } private def removeDirectory(context: ExecutionContext, path: String): Unit = { if (path.startsWith("hdfs://")) { val configuration = context.sparkContext.hadoopConfiguration val hdfs = FileSystem.get(new URI(extractHdfsAddress(path)), configuration) hdfs.delete(new Path(path), true) } else { deleteRecursivelyIfExists(new File(path)) } } private def extractHdfsAddress(path: String): String = { // first group: "hdfs://ip.addr.of.hdfs", second group: "/some/path/on/hdfs" val regex = "(hdfs:\\\\/\\\\/[^\\\\/]*)(.*)".r val regex(hdfsAddress, _) = path hdfsAddress } @transient override lazy val tTagTI_0: ru.TypeTag[Transformer] = ru.typeTag[Transformer] } object WriteTransformer { def apply(outputPath: String): WriteTransformer = { new WriteTransformer().setOutputPath(outputPath) } }
deepsense-io/seahorse-workflow-executor
deeplang/src/main/scala/io/deepsense/deeplang/doperations/WriteTransformer.scala
Scala
apache-2.0
3,640
object Test { def main(args: Array[String]): Unit = { val f1 = (x1: Int, x2: Int) => (x1, x2, x1 + x2) val g1 = (x1: Int, x2: Int, x3: Int) => x1 + x2 + x3 val h1 = f1.andThen(g1) println(h1(1, 2)) val f2 = (x1: Int, x2: Int) => (1, x1, x2, x1 + x2, x1 * x2) val g2 = (x1: Int, x2: Int, x3: Int, x4: Int, x5: Int) => (x1 + x2, x3 + x4 + x5) val h2 = f2.andThen(g2) println(h2(1, 2)) val h3 = h2.andThen(h1) println(h3(1, 2)) val f25 = (x0: Int, x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int, x23: Int, x24: Int) => (2 * x0, 2 * x1, 2 * x2, 2 * x3, 2 * x4, 2 * x5, 2 * x6, 2 * x7, 2 * x8, 2 * x9, 2 * x10, 2 * x11, 2 * x12, 2 * x13, 2 * x14, 2 * x15, 2 * x16, 2 * x17, 2 * x18, 2 * x19, 2 * x20, 2 * x21, 2 * x22, 2 * x23, 2 * x24) val g25 = (x0: Int, x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int, x23: Int, x24: Int) => (3 * x0, 3 * x1, 3 * x2, 3 * x3, 3 * x4, 3 * x5, 3 * x6, 3 * x7, 3 * x8, 3 * x9, 3 * x10, 3 * x11, 3 * x12, 3 * x13, 3 * x14, 3 * x15, 3 * x16, 3 * x17, 3 * x18, 3 * x19, 3 * x20, 3 * x21, 3 * x22, 3 * x23, 3 * x24) val h25 = f25.andThen(g25) println(h25(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25)) } /** Composes two instances of TupledFunctions in a new TupledFunctions, with this function applied first * * @tparam F a function type * @tparam G a function type * @tparam FArgs the tuple type with the same types as the function arguments of F * @tparam GArgs the tuple type with the same types as the function arguments of G and return type of F * @tparam R the return type of G */ def [F, G, FArgs <: Tuple, GArgs <: Tuple, R](f: F) andThen (g: G)(using tf: TupledFunction[F, FArgs => GArgs], tg: TupledFunction[G, GArgs => R]): FArgs => R = { x => tg.tupled(g)(tf.tupled(f)(x)) } }
som-snytt/dotty
tests/run/tupled-function-andThen.scala
Scala
apache-2.0
2,221
package example import akka.actor.{ActorSystem, Props} import example.StudentMessages.Study object SchoolApplication extends App { implicit val system = ActorSystem("SchoolApplication") val teacher = system.actorOf(Props[TeacherActor], "teacher") val student = system.actorOf(Props(new StudentActor2(teacher)), "student") student ! Study Thread.sleep(1000) system.terminate() }
jvorhauer/akka-workshop
src/main/scala/example/SchoolApplication.scala
Scala
apache-2.0
394
package com.wuyuntao.aeneas.dsl import com.wuyuntao.aeneas.util.Text abstract class Query extends Table { def name = "{}_queries".format(Text.underscore(getClass.getSimpleName)) }
wuyuntao/Aeneas
aeneas-dsl/src/main/scala/com/wuyuntao/aeneas/dsl/Query.scala
Scala
apache-2.0
189
package net.tridb.core case class Athlete(uid : String, name : String)
tridb/core
src/main/scala/tridb/core/Athlete.scala
Scala
mit
72
package org.jetbrains.plugins.scala package lang package psi package stubs import com.intellij.psi.stubs.StubElement trait ScPackagingStub extends StubElement[api.toplevel.ScPackaging] { def packageName: String def parentPackageName: String def isExplicit: Boolean }
JetBrains/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/stubs/ScPackagingStub.scala
Scala
apache-2.0
277
trait GenTraversableLike[+A, +Repr] extends Any object O { (null: Any) match { case _: LongTraversableLike[_] => } } trait LongTraversable extends LongTraversableLike[LongTraversable] trait LongTraversableLike[+Repr <: LongTraversableLike[Repr]] extends GenTraversableLike[Any, Repr] /* % scalac-hash v2.11.0-M8 test/files/pos/t1786-cycle.scala [warn] v2.11.0-M8 failed, using closest available test/files/pos/t1786-cycle.scala:11: error: illegal cyclic reference involving trait LongTraversableLike trait LongTraversableLike[+Repr <: LongTraversableLike[Repr]] extends GenTraversableLike[Any, Repr] ^ one error found Okay again after SI-1786 was reverted. |-- object O BYVALmode-EXPRmode (site: package <empty>) | |-- super EXPRmode-POLYmode-QUALmode (silent: <init> in O) | | |-- this EXPRmode (silent: <init> in O) | | | \\-> O.type | | \\-> O.type | |-- (null: Any) match { case (_: LongTraversableLike[(_ @ <em... BYVALmode-EXPRmode (site: value <local O> in O) | | |-- (null: Any) BYVALmode-EXPRmode (site: value <local O> in O) | | | |-- Any TYPEmode (site: value <local O> in O) | | | | \\-> Any | | | |-- null : pt=Any EXPRmode (site: value <local O> in O) | | | | \\-> Null(null) | | | \\-> Any | | |-- (_: LongTraversableLike[(_ @ <empty>)]) : pt=Any PATTERNmode (site: value <local O> in O) enrichment only | | | |-- LongTraversableLike[(_ @ <empty>)] TYPEPATmode-TYPEmode (site: value <local O> in O) enrichment only | | | | |-- <: LongTraversableLike[Repr] TYPEmode (site: type Repr in <empty>) | | | | | |-- LongTraversableLike[Repr] TYPEmode (site: type Repr in <empty>) | | | | | | |-- Repr NOmode (site: type Repr in <empty>) | | | | | | | \\-> Repr | | | | | | \\-> LongTraversableLike[Repr] | | | | | [adapt] <: LongTraversableLike[Repr] is now a TypeTree( <: LongTraversableLike[Repr]) | | | | | \\-> <: LongTraversableLike[Repr] | | | | |-- (_ @ <empty>) TYPEPATmode-TYPEmode (site: value <local O> in O) enrichment only | | | | | \\-> _ | | | | |-- GenTraversableLike FUNmode-TYPEmode (site: trait LongTraversableLike) | | | | | \\-> GenTraversableLike | | | | |-- GenTraversableLike[Any, Repr] TYPEmode (site: trait LongTraversableLike) | | | | | |-- Any TYPEmode (site: trait LongTraversableLike) | | | | | | \\-> Any | | | | | |-- Repr TYPEmode (site: trait LongTraversableLike) | | | | | | \\-> Repr | | | | | caught scala.reflect.internal.Symbols$CyclicReference: illegal cyclic reference involving trait LongTraversableLike: while typing GenTraversableLike[Any, Repr] test/files/pos/t1786-cycle.scala:11: error: illegal cyclic reference involving trait LongTraversableLike trait LongTraversableLike[+Repr <: LongTraversableLike[Repr]] extends GenT */
felixmulder/scala
test/files/pos/t1786-cycle.scala
Scala
bsd-3-clause
3,115
package org.ai4fm.proofprocess.ui.internal import java.net.{MalformedURLException, URL} import org.eclipse.jface.resource.ImageDescriptor /** ProofProcess image definitions. * * When images are used in label providers (e.g. where Image) is required, they must be disposed manually. * For convenience, [[org.eclipse.jface.resource.ResourceManager]] could be used. * * @author Andrius Velykis */ object PProcessImages { private lazy val ICON_BASE_URL = PProcessUIPlugin.plugin.getBundle.getEntry("icons/") val MISSING_ICON = ImageDescriptor.getMissingImageDescriptor lazy val GOAL_IN = create("goal_in.gif") lazy val GOAL_OUT = create("goal_out.gif") lazy val SUCCESS = create("success.gif") lazy val FILTER = create("filter.gif") private def create(iconPath: String) = { try { val url = new URL(ICON_BASE_URL, iconPath) ImageDescriptor.createFromURL(url) } catch { case _: MalformedURLException => MISSING_ICON } } }
andriusvelykis/proofprocess
org.ai4fm.proofprocess.ui/src/org/ai4fm/proofprocess/ui/internal/PProcessImages.scala
Scala
epl-1.0
987
package lore.compiler.constraints import lore.compiler.constraints.ReturnConstraints.{DeadCode, DefinitelyReturns, ImpossibleReturn, IsReturnAllowed} import lore.compiler.feedback.{Feedback, Reporter} import lore.compiler.syntax.visitor.{CombiningTopLevelExprVisitor, TopLevelExprVisitor, VerificationTopLevelExprVisitor} import lore.compiler.syntax.{ExprNode, TopLevelExprNode} object ReturnConstraints { type DefinitelyReturns = Boolean type IsReturnAllowed = Boolean case class DeadCode(node: TopLevelExprNode) extends Feedback.Error(node) { override def message = s"This node represents dead code after a previous return." } case class ImpossibleReturn(node: TopLevelExprNode.ReturnNode) extends Feedback.Error(node) { override def message = s"You cannot return inside this expression." } /** * Verifies the following two constraints: * * - Any return must not be followed by code in the same block. This effectively disallows dead code after a return * top-level expression. * - Constructions such as `if ({ return 0 }) a else b` are not allowed. Returning should only be possible from * top-level expressions. */ def verify(body: TopLevelExprNode)(implicit reporter: Reporter): Unit = { verifyNoDeadCode(body) verifyReturnsAllowed(body) } private def verifyNoDeadCode(body: TopLevelExprNode)(implicit reporter: Reporter): Unit = { TopLevelExprVisitor.visit(new ReturnDeadCodeVisitor())(body) } private def verifyReturnsAllowed(body: TopLevelExprNode)(implicit reporter: Reporter): Unit = { new ReturnAllowedApplicator().visit(body, true) } } /** * Ensures that expressions cannot follow a `return` expression. Reports errors for any violating expressions. */ private class ReturnDeadCodeVisitor(implicit reporter: Reporter) extends CombiningTopLevelExprVisitor.Identity[DefinitelyReturns] { override def combine(returns: Vector[DefinitelyReturns]): DefinitelyReturns = { if (returns.isEmpty) false else returns.forall(identity) } override def visit(node: TopLevelExprNode, returns: Vector[DefinitelyReturns]): DefinitelyReturns = node match { case TopLevelExprNode.ReturnNode(_, _) => true case ExprNode.BlockNode(expressions, _) => // Check that a return statement isn't followed by any other code. If we have a "definitely returns" at any // point before the last element, this is such a point. if (returns.isEmpty) false else { val returnIndex = returns.init.indexOf(true) if (returnIndex >= 0) { val firstDeadNode = expressions(returnIndex + 1) reporter.error(DeadCode(firstDeadNode)) // If we report `true` here, the DeadCode error will potentially be reported multiple times. Hence, even // though there was a `return` expression, we don't want this to be reported up the chain. false } else returns.last } case ExprNode.IfElseNode(_, _, _, _) => returns.tail.forall(identity) // `tail` ignores the condition. case ExprNode.WhileNode(_, _, _) => returns.last // `last` ignores the condition. case ExprNode.ForNode(_, _, _) => returns.last // `last` ignores the condition. case _ => super.visit(node, returns) } } /** * Ensures that only top-level expressions contain a return. Reports errors for any violating expressions. */ private class ReturnAllowedApplicator(implicit reporter: Reporter) extends TopLevelExprVisitor.Applicator[Unit, IsReturnAllowed](new VerificationTopLevelExprVisitor { }) { override def handleMatch(node: TopLevelExprNode, isReturnAllowed: IsReturnAllowed): Unit = node match { case TopLevelExprNode.VariableDeclarationNode(_, _, _, value, _) => visit(value, isReturnAllowed) case TopLevelExprNode.AssignmentNode(address, value, _) => visit(address, false) visit(value, isReturnAllowed) case node@TopLevelExprNode.ReturnNode(expr, _) => visit(expr, false) if (!isReturnAllowed) { reporter.error(ImpossibleReturn(node)) } case ExprNode.BlockNode(expressions, _) => expressions.foreach(statement => visit(statement, isReturnAllowed)) case ExprNode.IfElseNode(condition, onTrue, onFalse, _) => visit(condition, false) visit(onTrue, isReturnAllowed) onFalse.foreach(visit(_, isReturnAllowed)) case ExprNode.CondNode(cases, _) => cases.foreach { condCase => visit(condCase.condition, false) visit(condCase.body, isReturnAllowed) } case ExprNode.WhileNode(condition, body, _) => visit(condition, false) visit(body, isReturnAllowed) case ExprNode.ForNode(extractors, body, _) => extractors.foreach { case ExprNode.ExtractorNode(_, collection, _) => visit(collection, false) } visit(body, isReturnAllowed) case _ => super.handleMatch(node, false) } }
marcopennekamp/lore
compiler/src/lore/compiler/constraints/ReturnConstraints.scala
Scala
mit
4,911
package jkm.cineclub.raft import jkm.cineclub.raft.PersistentState._ import com.typesafe.config.{ConfigFactory, Config, ConfigValue} import scala.collection.JavaConversions._ /** * Created with IntelliJ IDEA. * User: cineclub * Date: 12/26/13 * Time: 9:04 PM * To change this template use File | Settings | File Templates. */ class RaftConfig { import RaftConfig._ var id:RaftMemberId=null var serviceAddress:TcpAddress=null var persistentStateDBInfo:DBInfo=null var logEntryDBInfo:DBInfo=null var membership:RaftMembership=null var addressTable:Map[RaftMemberId,TcpAddress]=null var electionTimeout:Int = -1 } object RaftConfig { case class TcpAddress(hostname:String,port:Int) case class DBInfo(dbName:String,dbRootPath:String) { override def toString= "("+"dbName="+dbName+","+"dbRootPath="+dbRootPath+")" } def convertToTcpAddress(a:ConfigValue): TcpAddress= { val address=a.unwrapped().asInstanceOf[java.util.ArrayList[Object]].toList val hostname=address(0).asInstanceOf[String] val port = address(1).asInstanceOf[Int] TcpAddress(hostname,port) } implicit def convertConfigToTcpAddress(a:Config): TcpAddress= { val hostname=a.getString("hostname") val port = a.getInt("port") TcpAddress(hostname,port) } implicit def convertConfigToDBInfo(a:Config):DBInfo = { val dbName=a.getString("dbName") var dbRootPath=a.getString("rootPath") if (dbRootPath.isEmpty) dbRootPath=null DBInfo(dbName,dbRootPath) } def getRaftMembership(a:Config):RaftMembership={ RaftMembership( RaftMembership.getConfigType(a.getString("configType")), a.getStringList("newMembers").toList , a.getStringList("oldMembers").toList ) } def readConfig(configName:String,prefix:String):RaftConfig ={ val conf=ConfigFactory.load(configName) if (conf==null) return null val raftConfig=new RaftConfig def addPrefix(path:String) =prefix+"."+ path raftConfig.id=conf.getString(addPrefix("id")) raftConfig.serviceAddress=conf.getConfig(addPrefix("serviceAddress")) raftConfig.persistentStateDBInfo=conf.getConfig(addPrefix("persistentStateDB")) raftConfig.logEntryDBInfo=conf.getConfig(addPrefix("logEntryDB")) raftConfig.membership=getRaftMembership(conf.getConfig(addPrefix("init.membership"))) raftConfig.addressTable=conf.getConfig(addPrefix("init.addressTable")).entrySet().toList.map( x=> (x.getKey,convertToTcpAddress(x.getValue))).toMap raftConfig.electionTimeout=conf.getInt( addPrefix("init.electionTimeout")) raftConfig } def printRaftConfig(raftConfig:RaftConfig) ={ println("id="+raftConfig.id) println("serverAddress="+raftConfig.serviceAddress) println("persistentStateDBInfo="+raftConfig.persistentStateDBInfo) println("logEntryDBInfo="+raftConfig.logEntryDBInfo) println("members="+raftConfig.membership) println("addressTable="+raftConfig.addressTable) println("electionTimeout="+raftConfig.electionTimeout) } }
stepist/scalaraft
src/main/scala/jkm/cineclub/raft/RaftConfig.scala
Scala
apache-2.0
3,008
package slick.migration.dialect import java.net.{URL, URLClassLoader} import java.sql.Driver import com.typesafe.config.Config import scala.collection.mutable import scala.slick.driver.{JdbcProfile => SlickDriver} import scala.slick.jdbc.meta._ import scala.slick.jdbc.{StaticQuery => Q} class DbInit(confName: String, val driver: SlickDriver) { private lazy val database = driver.backend.Database private lazy val config: Config = TestConfig.testConfig(confName) private val jdbcDriver = confString("driver") private val create = confStrings("create") private val postCreate = confStrings("postCreate") private val drop = confStrings("drop") override def toString = confString("testConn.url") def confOptionalString(path: String) = if (config.hasPath(path)) Some(config.getString(path)) else None def confString(path: String) = confOptionalString(path).getOrElse(null) def confStrings(path: String) = TestConfig.getStrings(config, path).getOrElse(Nil) def databaseFor(path: String): driver.Backend#Database = database.forConfig(path, config, loadCustomDriver().getOrElse(null)) def conn = databaseFor("testConn") def cleanUpBefore() = databaseFor("adminConn") withSession { implicit session => if (drop.nonEmpty || create.nonEmpty) { println(s"[Creating test database $this]") for (s <- drop) (Q.u + s).execute for (s <- create) (Q.u + s).execute } if (postCreate.nonEmpty) { conn withSession { implicit session => for (s <- postCreate) (Q.u + s).execute } } } def cleanUpAfter() = databaseFor("adminConn") withSession { implicit session => if (drop.nonEmpty) { println(s"[Dropping test database $this]") for (s <- drop) (Q.u + s).execute } } def getTables = conn withSession { implicit s => MTable.getTables(None, None, None, Some(Seq("TABLE"))).list } def getPrimaryKeys(table: String): Seq[MPrimaryKey] = { val t = findTable(table) conn withSession { implicit s => t.map { mt => MPrimaryKey.getPrimaryKeys(mt.name).list } getOrElse Nil } } def getForeignKeys(table: String): Seq[MForeignKey] = { val t = findTable(table) conn withSession { implicit s => t.map { mt => MForeignKey.getImportedKeys(mt.name).list } getOrElse Nil } } def getIndexes(table: String, unique: Boolean = false): Seq[MIndexInfo] = { val t = findTable(table) conn withSession { implicit s => t.map { mt => MIndexInfo.getIndexInfo(mt.name, unique).list } getOrElse Nil } } def getColumns(table: String, column: String = "%"): Seq[MColumn] = { val t = findTable(table) conn withSession { implicit s => t.map { mt => MColumn.getColumns(mt.name, column).list } getOrElse Nil } } def findTable(table: String) = getTables.find(_.name.name == table) def quoteIdentifier(id: String): String = { val s = new StringBuilder(id.length + 4) append '"' for(c <- id) if(c == '"') s append "\\"\\"" else s append c (s append '"').toString } private def loadCustomDriver() = confOptionalString("driverJar").map { jar => DbInit.getCustomDriver(jar, jdbcDriver) } } object DbInit { // A cache for custom drivers to avoid excessive reloading and memory leaks private[this] val driverCache = new mutable.HashMap[(String, String), Driver]() def getCustomDriver(url: String, driverClass: String): Driver = synchronized { driverCache.getOrElseUpdate((url, driverClass), new URLClassLoader(Array(new URL(url)), getClass.getClassLoader).loadClass(driverClass).newInstance.asInstanceOf[Driver] ) } }
itryapitsin/slick-migration
drivers/sybase/src/test/scala/slick/migration/dialect/DbInit.scala
Scala
apache-2.0
3,680
import io.SdfIO import parsers.ResourceUrlParser import services.ResourceCopyService import scala.concurrent.Await import scala.concurrent.duration.Duration object SimpleFileDownloaderClient extends App { override def main(args: Array[String]): Unit = { val stdIO = new SdfIO val resourceUrlParser = new ResourceUrlParser val resourceCopyService = new ResourceCopyService(resourceUrlParser) stdIO.printWelcomeMessage() val destinationDirectory = stdIO.getDestinationDirectory val resourceUrls = stdIO.getUrls val process = resourceCopyService.copyResources(resourceUrls, destinationDirectory) Await.result(process, Duration.Inf) stdIO.printExitMessage() } }
arpanchaudhury/SFD
src/main/scala/SimpleFileDownloaderClient.scala
Scala
mit
704
/* * Copyright (C) 2005, The Beangle Software. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.beangle.data.dao import org.beangle.commons.collection.page.Page import org.beangle.commons.collection.page.PageLimit import org.beangle.commons.collection.page.SinglePage import org.beangle.data.model.Entity /** * QueryPage class. * * @author chaostone */ class QueryPage[T <: Entity[_]](query: LimitQuery[T], val entityDao: EntityDao) extends AbstractQueryPage[T](query) { next() def moveTo(pageIndex: Int): Page[T] = { query.limit(PageLimit(pageIndex, query.limit.pageSize)) updatePage(entityDao.search(query).asInstanceOf[SinglePage[T]]) this } }
beangle/data
orm/src/main/scala/org/beangle/data/dao/QueryPage.scala
Scala
lgpl-3.0
1,310
/* * Copyright (c) 2014-2020 by The Monix Project Developers. * See the project homepage at: https://monix.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.execution import monix.execution.Cancelable.IsDummy import monix.execution.CancelableFuture.{Async, Never, Pure} import monix.execution.cancelables.{ChainedCancelable, SingleAssignCancelable} import monix.execution.schedulers.TrampolinedRunnable import monix.execution.schedulers.TrampolineExecutionContext.immediate import scala.concurrent._ import scala.concurrent.duration.Duration import scala.reflect.ClassTag import scala.util.{Failure, Success, Try} import scala.util.control.NonFatal /** Represents an asynchronous computation that can be canceled * as long as it isn't complete. */ sealed abstract class CancelableFuture[+A] extends Future[A] with Cancelable { self => /** Returns this future's underlying [[Cancelable]] reference. */ private[monix] def cancelable: Cancelable /** Returns the underlying `Future` reference. */ private[monix] def underlying: Future[A] override final def failed: CancelableFuture[Throwable] = { implicit val ec = immediate transformWith { case Success(_) => CancelableFuture.failed(new NoSuchElementException("failed")) case Failure(e) => CancelableFuture.successful(e) } } override final def transform[S](s: (A) => S, f: (Throwable) => Throwable)(implicit executor: ExecutionContext): CancelableFuture[S] = transform { case Success(a) => Success(s(a)) case Failure(e) => Failure(f(e)) } override final def map[S](f: (A) => S)(implicit executor: ExecutionContext): CancelableFuture[S] = transform { case Success(a) => Success(f(a)) case fail => fail.asInstanceOf[Try[S]] } override final def filter(p: (A) => Boolean)(implicit executor: ExecutionContext): CancelableFuture[A] = transform { case Success(a) if !p(a) => throw new NoSuchElementException("Future.filter predicate is not satisfied") case pass => pass } override final def collect[S](pf: PartialFunction[A, S])(implicit executor: ExecutionContext): CancelableFuture[S] = transform { case Success(a) => if (pf.isDefinedAt(a)) Success(pf(a)) else throw new NoSuchElementException("Future.collect partial function is not defined at: " + a) case fail @ Failure(_) => fail.asInstanceOf[Failure[S]] } override final def recover[U >: A](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): CancelableFuture[U] = transform { case ref @ Success(_) => ref case Failure(e) => if (!pf.isDefinedAt(e)) throw e Success(pf(e)) } override final def recoverWith[U >: A](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): CancelableFuture[U] = transformWith { case Success(_) => this case Failure(e) => if (!pf.isDefinedAt(e)) this else pf(e) } override final def zip[U](that: Future[U]): CancelableFuture[(A, U)] = { implicit val ec = immediate for (a <- this; b <- that) yield (a, b) } override final def fallbackTo[U >: A](that: Future[U]): CancelableFuture[U] = { implicit val ec = immediate transformWith { case Success(_) => this case Failure(_) => that } } override final def mapTo[S](implicit tag: ClassTag[S]): CancelableFuture[S] = { this match { case Async(other, cRef) => CancelableFuture(other.mapTo[S], cRef) case _: Pure[_] => CancelableFuture(super.mapTo[S], Cancelable.empty) case Never => Never } } override final def andThen[U](pf: PartialFunction[Try[A], U])(implicit executor: ExecutionContext): CancelableFuture[A] = transformWith { r => if (pf.isDefinedAt(r)) pf(r) this } override final def flatMap[S](f: (A) => Future[S])(implicit executor: ExecutionContext): CancelableFuture[S] = transformWith { case Success(s) => f(s) case Failure(_) => this.asInstanceOf[CancelableFuture[S]] } def transform[S](f: Try[A] => Try[S])(implicit executor: ExecutionContext): CancelableFuture[S] = { val next = FutureUtils.transform(underlying, f) CancelableFuture(next, cancelable) } def transformWith[S](f: Try[A] => Future[S])(implicit executor: ExecutionContext): CancelableFuture[S] = { // Cancelable reference that needs to be chained with other // cancelable references created in the loop val cRef = ChainedCancelable(cancelable) // FutureUtils will use a polyfill for Scala 2.11 and will // use the real `transformWith` on Scala 2.12 val f2 = FutureUtils.transformWith( underlying, { result: Try[A] => val nextRef: Future[S] = try f(result) catch { case e if NonFatal(e) => Future.failed(e) } // Checking to see if we are dealing with a "flatMap" // future, in which case we need to chain the cancelable // reference in order to not create a memory leak nextRef match { case ref: CancelableFuture[_] if ref ne Never => val cf = ref.asInstanceOf[CancelableFuture[S]] // If the resulting Future is completed, there's no reason // to chain cancelable tokens if (!cf.isCompleted) cf.cancelable match { case cRef2: ChainedCancelable => // Chaining ensures we don't leak cRef2.forwardTo(cRef) case cRef2 => if (!cRef2.isInstanceOf[IsDummy]) cRef := cRef2 } // Returning underlying b/c otherwise we leak memory in // infinite loops cf.underlying case _ => nextRef } } ) CancelableFuture(f2, cRef) } } object CancelableFuture extends internal.CancelableFutureForPlatform { /** Builder for a [[CancelableFuture]]. * * @param underlying is an underlying `Future` reference that will respond to `onComplete` calls * @param cancelable is a [[monix.execution.Cancelable Cancelable]] * that can be used to cancel the active computation */ def apply[A](underlying: Future[A], cancelable: Cancelable): CancelableFuture[A] = new Async[A](underlying, cancelable) /** Promotes a strict `value` to a [[CancelableFuture]] that's * already complete. * * @param value is the value that's going to be signaled in the * `onComplete` callback. */ def successful[A](value: A): CancelableFuture[A] = new Pure[A](Success(value)) /** Promotes a strict `Throwable` to a [[CancelableFuture]] that's * already complete with a failure. * * @param e is the error that's going to be signaled in the * `onComplete` callback. */ def failed[A](e: Throwable): CancelableFuture[A] = new Pure[A](Failure(e)) /** Promotes a strict `value` to a [[CancelableFuture]] that's * already complete. * * Alias for [[successful]]. * * @param value is the value that's going to be signaled in the * `onComplete` callback. */ def pure[A](value: A): CancelableFuture[A] = successful(value) /** Promotes a strict `Throwable` to a [[CancelableFuture]] that's * already complete with a failure. * * Alias for [[failed]]. * * @param e is the error that's going to be signaled in the * `onComplete` callback. */ def raiseError[A](e: Throwable): CancelableFuture[A] = failed(e) /** An already completed [[CancelableFuture]]. */ val unit: CancelableFuture[Unit] = successful(()) /** Returns a [[CancelableFuture]] instance that will never complete. */ final def never[A]: CancelableFuture[A] = Never /** Promotes a strict `Try[A]` to a [[CancelableFuture]] that's * already complete. * * @param value is the `Try[A]` value that's going to be signaled * in the `onComplete` callback. */ def fromTry[A](value: Try[A]): CancelableFuture[A] = new Pure[A](value) /** Given a registration function that can execute an asynchronous * process, executes it and builds a [[CancelableFuture]] value * out of it. * * The given `registration` function can return a [[Cancelable]] * reference that can be used to cancel the executed async process. * This reference can be [[Cancelable.empty empty]]. * * {{{ * def delayedResult[A](f: => A)(implicit s: Scheduler): CancelableFuture[A] = * CancelableFuture.async { complete => * val task = s.scheduleOnce(10.seconds) { complete(Try(f)) } * * Cancelable { () => * println("Cancelling!") * task.cancel() * } * } * }}} * * This is much like working with Scala's * [[scala.concurrent.Promise Promise]], only safer. */ def async[A](register: (Try[A] => Unit) => Cancelable)(implicit ec: ExecutionContext): CancelableFuture[A] = { val p = Promise[A]() val cRef = SingleAssignCancelable() // Light async boundary to guard against stack overflows ec.execute(new TrampolinedRunnable { def run(): Unit = { try { cRef := register { v => p.complete(v); () } () } catch { case e if NonFatal(e) => if (!p.tryComplete(Failure(e))) ec.reportFailure(e) } } }) CancelableFuture(p.future, cRef) } /** A [[CancelableFuture]] instance that will never complete. */ private[execution] object Never extends CancelableFuture[Nothing] { def onComplete[U](f: (Try[Nothing]) => U)(implicit executor: ExecutionContext): Unit = () val isCompleted = false val value = None def cancelable = Cancelable.empty def underlying = this @scala.throws[Exception](classOf[Exception]) def result(atMost: Duration)(implicit permit: CanAwait): Nothing = throw new TimeoutException("This CancelableFuture will never finish!") @scala.throws[InterruptedException](classOf[InterruptedException]) @scala.throws[TimeoutException](classOf[TimeoutException]) def ready(atMost: Duration)(implicit permit: CanAwait): this.type = throw new TimeoutException("This CancelableFuture will never finish!") def cancel(): Unit = () override def transform[S](f: (Try[Nothing]) => Try[S])(implicit executor: ExecutionContext): CancelableFuture[S] = this override def transformWith[S](f: (Try[Nothing]) => Future[S])(implicit executor: ExecutionContext): CancelableFuture[S] = this } /** An internal [[CancelableFuture]] implementation. */ private[execution] final class Pure[+A](immediate: Try[A]) extends CancelableFuture[A] { def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this def result(atMost: Duration)(implicit permit: CanAwait): A = immediate.get def cancelable = Cancelable.empty val underlying = Future.fromTry(immediate) def cancel(): Unit = () def isCompleted: Boolean = true def value: Option[Try[A]] = underlying.value def onComplete[U](f: (Try[A]) => U)(implicit executor: ExecutionContext): Unit = executor.execute(new Runnable { def run(): Unit = { f(immediate); () } }) } /** An actual [[CancelableFuture]] implementation; internal. */ private[execution] final case class Async[+A](underlying: Future[A], cancelable: Cancelable) extends CancelableFuture[A] { override def onComplete[U](f: (Try[A]) => U)(implicit executor: ExecutionContext): Unit = underlying.onComplete(f)(executor) override def isCompleted: Boolean = underlying.isCompleted override def value: Option[Try[A]] = underlying.value @throws[Exception](classOf[Exception]) def result(atMost: Duration)(implicit permit: CanAwait): A = underlying.result(atMost)(permit) @throws[InterruptedException](classOf[InterruptedException]) @throws[TimeoutException](classOf[TimeoutException]) def ready(atMost: Duration)(implicit permit: CanAwait): this.type = { underlying.ready(atMost)(permit) this } override def cancel(): Unit = cancelable.cancel() } }
alexandru/monifu
monix-execution/shared/src/main/scala/monix/execution/CancelableFuture.scala
Scala
apache-2.0
12,868
package com.googlecode.kanbanik.migrate import com.googlecode.kanbanik.db.HasMongoConnection import com.mongodb.casbah.Imports._ import com.mongodb.casbah.commons.MongoDBObject import com.googlecode.kanbanik.commands.CreateUserCommand import com.mongodb.DBObject import com.googlecode.kanbanik.model._ import org.bson.types.ObjectId import com.mongodb.BasicDBList import com.googlecode.kanbanik.builders.TaskBuilder import com.googlecode.kanbanik.commons._ import com.googlecode.kanbanik.dtos.{WorkfloVerticalSizing, ManipulateUserDto} class MigrateDb extends HasMongoConnection { val versionMigrations = Map( 1 -> List(new From1To2, new From2To3, new From3To4), 2 -> List(new From2To3, new From3To4), 3 -> List(new From3To4) ) def migrateDbIfNeeded { System.out.println("migration started") using(createConnection) { conn => val version = coll(conn, Coll.KanbanikVersion).findOne() if (version.isDefined) { val curVersion = version.get.get("version").asInstanceOf[Int] System.out.println("version defined and is: " + curVersion) runAllFrom(curVersion) } else { System.out.println("version is 1") coll(conn, Coll.KanbanikVersion) += MongoDBObject("version" -> 1) runAllFrom(1) } } } def runAllFrom(curVersion: Int) { System.out.println("running all from: " + curVersion) val migrationParts = versionMigrations.get(curVersion) if (migrationParts.isDefined) { for (part <- migrationParts.get) { System.out.println("START migration using " + part.getClass) part.migrate System.out.println("END migration using " + part.getClass) } } } } trait MigrationPart extends HasMongoConnection { def migrate; def setVersionTo(version: Int) { using(createConnection) { conn => coll(conn, Coll.KanbanikVersion).update(MongoDBObject(), $set("version" -> version)) } } } // from 0.2.3 -> 0.2.4 class From1To2 extends MigrationPart { def migrate { // create a default user val userDto = ManipulateUserDto( "admin", "Default User", null, "sessionId", 1, "admin", "admin") // create the first user new CreateUserCommand().execute(userDto) setVersionTo(2) } } // from 0.2.4 -> 0.2.5 class From2To3 extends MigrationPart { private val oldTasksCollection = "tasks" private val boardsCollection = "boards" private lazy val taskBuilder = new TaskBuilder() def migrate { val classesOfServices = createDefaultClassesOfService() migrateBoard migrateTasks(classesOfServices) deleteOldTasks() setVersionTo(3) } def migrateBoard() { def migrateBalanced(board: DBObject, boards: List[Board]) { val id = board.get(Board.Fields.id.toString()).asInstanceOf[ObjectId] val balanced = board.get("balanceWorkflowitems") val isBalanced = if (balanced == null) true else balanced.asInstanceOf[Boolean] if (!isBalanced) { // consider only non balanced one as the balanced is the default val boardToUpdate = boards.find(_.id.get == id).getOrElse(return) boardToUpdate.copy(workfloVerticalSizing = WorkfloVerticalSizing.MIN_POSSIBLE).store } } using(createConnection) { conn => val rawBoards = coll(conn, Coll.Boards).find(MongoDBObject()) val realBoards = Board.all(false) rawBoards.foreach(migrateBalanced(_, realBoards)) } } def createDefaultClassesOfService() = { Map(2 -> new ClassOfService( None, "Standard", "For typical tasks that flow through the system.", "5d8eef", 1).store) + (0 -> new ClassOfService( None, "Expedite", "For critical tasks. Can break all rules on the system.", "e21714", 1).store) + (3 -> new ClassOfService( None, "Intangible", "Nice to have but not critical.", "1eaa25", 1).store) + (1 -> new ClassOfService( None, "Fixed Delivery Date", "Has to be done until specified date.", "ffeb00", 1).store) } def migrateTasks(classesOfService: Map[Int, ClassOfService]) { using(createConnection) { conn => val oldTasks = coll(conn, oldTasksCollection).find().map(asOldEntity(_)) val newTasks = oldTasks.map(_.asNewTask(classesOfService)) var order = 0 for (newTask <- newTasks if (newTask.project != null && newTask.workflowitem != null)) { newTask.copy(order = Integer.toString(order)).store order += 100 } } def asOldEntity(dbObject: DBObject) = { new OldTask( Some(dbObject.get(Task.Fields.id.toString()).asInstanceOf[ObjectId]), dbObject.get(Task.Fields.name.toString()).asInstanceOf[String], dbObject.get(Task.Fields.description.toString()).asInstanceOf[String], dbObject.get(Task.Fields.classOfService.toString()).asInstanceOf[Int], dbObject.get(Task.Fields.ticketId.toString()).asInstanceOf[String], { val res = dbObject.get(Task.Fields.version.toString()) if (res == null) { 1 } else { res.asInstanceOf[Int] } }, dbObject.get(Task.Fields.workflowitem.toString()).asInstanceOf[ObjectId]) } } def deleteOldTasks() { using(createConnection) { conn => coll(conn, oldTasksCollection).remove(MongoDBObject()) } } case class NewTask(id: Option[ObjectId], name: String, description: String, classOfService: Option[ClassOfService], ticketId: String, version: Int, order: String, assignee: Option[User], dueData: String, workflowitem: Workflowitem, project: Project) { object Fields extends DocumentField { val description = Value("description") val ticketId = Value("ticketId") val order = Value("order") val projectId = Value("projectId") val workflowitem = Value("workflowitem") val classOfService = Value("classOfService") val assignee = Value("assignee") val dueDate = Value("dueDate") } def asDBObject(entity: NewTask): DBObject = { MongoDBObject( Fields.id.toString() -> { if (entity.id == null || !entity.id.isDefined) new ObjectId else entity.id }, Fields.name.toString() -> entity.name, Fields.description.toString() -> entity.description, Fields.classOfService.toString() -> entity.classOfService, Fields.ticketId.toString() -> entity.ticketId, Fields.version.toString() -> entity.version, Fields.order.toString() -> entity.order, Fields.projectId.toString() -> entity.project.id, Fields.classOfService.toString() -> { if (entity.classOfService.isDefined) entity.classOfService.get.id else None }, Fields.assignee.toString() -> { if (entity.assignee.isDefined) entity.assignee.get.name else None }, Fields.dueDate.toString() -> entity.dueData, Fields.workflowitem.toString() -> entity.workflowitem.id.getOrElse(throw new IllegalArgumentException("Task can not exist without a workflowitem"))) } def store() { using(createConnection) { conn => val update = $push(Coll.Tasks.toString() -> asDBObject(this)) coll(conn, Coll.Boards).findAndModify(MongoDBObject(Fields.id.toString() -> workflowitem.parentWorkflow.board.id.get), null, null, false, update, true, false) } } } class OldTask(val id: Option[ObjectId], val name: String, val description: String, val classOfService: Int, val ticketId: String, val version: Int, val workflowitemId: ObjectId) { def asNewTask(classesOfService: Map[Int, ClassOfService]): NewTask = { new NewTask( None, // because I want to create a new one name, description, { if (classesOfService.contains(classOfService)) { classesOfService.get(classOfService) } else { classesOfService.get(2) } }, ticketId, 1, // because I basically want to create a new one "", None, "", findWorkflowitem(), findProject() ) } def findWorkflowitem(): Workflowitem = { val board = Board.all(false).find(board => board.workflow.containsItem(Workflowitem().copy(id = Some(workflowitemId)))).getOrElse(return null) val workflowitem = board.workflow.findItem(Workflowitem().copy(id = Some(workflowitemId))) workflowitem.orNull } def findProject(): Project = { using(createConnection) { conn => val projects = for (project <- coll(conn, Coll.Projects).find() if (isOnProject(project))) yield project if (projects == null || projects.isEmpty) { null } else { val projectId = projects.next.get(Project.Fields.id.toString()) Project().copy(id = Some(projectId.asInstanceOf[ObjectId])) } } } def isOnProject(dbObject: DBObject): Boolean = { val tasks = dbObject.get("tasks") if (tasks == null || tasks == None) { false } else { if (tasks.isInstanceOf[List[_]]) { tasks.asInstanceOf[List[_]].contains(id.get) } else { tasks.asInstanceOf[BasicDBList].toScalaList.contains(id.get) } } } } } // from 0.2.5 -> 0.2.6 class From3To4 extends MigrationPart { def migrate { using(createConnection) { conn => { for (board <- coll(conn, "boards").find()) { val tasks = board.get("tasks") if (tasks != null && tasks.isInstanceOf[BasicDBList]) { val list = board.get("tasks").asInstanceOf[BasicDBList].toArray().toList.asInstanceOf[List[DBObject]] val oldTaskIds = list.map(_.get(Fields.id.toString)) // create new tasks for (task <- list) { asNewTask(task, board.get(Fields.id.toString()).asInstanceOf[ObjectId]).store } // delete old tasks for (oldId <- oldTaskIds) { val update = $pull(Coll.Tasks.toString() -> MongoDBObject(Fields.id.toString() -> oldId.asInstanceOf[ObjectId])) coll(conn, Coll.Boards).update(MongoDBObject(Board.Fields.id.toString() -> board.get(Fields.id.toString()).asInstanceOf[ObjectId]), update) } } } } } setVersionTo(4) } object Fields extends DocumentField { val description = Value("description") val ticketId = Value("ticketId") val order = Value("order") val projectId = Value("projectId") val workflowitem = Value("workflowitem") val classOfService = Value("classOfService") val assignee = Value("assignee") val dueDate = Value("dueDate") val boardId = Value("boardId") } def asNewTask(dbObject: DBObject, boardId: ObjectId): Task = { new Task( None, dbObject.get(Fields.name.toString()).asInstanceOf[String], dbObject.get(Fields.description.toString()).asInstanceOf[String], loadOrNone[ObjectId, ClassOfService](Fields.classOfService.toString(), dbObject, loadClassOfService(_)), dbObject.get(Fields.ticketId.toString()).asInstanceOf[String], { val version = dbObject.getWithDefault[Int](Fields.version, 1) if (version != 0) { version } else { 1 } }, dbObject.get(Fields.order.toString()).asInstanceOf[String], loadOrNone[String, User](Fields.assignee.toString(), dbObject, loadUser(_)), dbObject.getWithDefault[String](Fields.dueDate, ""), dbObject.get(Fields.workflowitem.toString()).asInstanceOf[ObjectId], boardId, dbObject.get(Fields.projectId.toString()).asInstanceOf[ObjectId], None ) } def loadOrNone[T, R](dbField: String, dbObject: DBObject, f: T => Option[R]): Option[R] = { val res = dbObject.get(dbField) if (res == null) { None } else { f(res.asInstanceOf[T]) } } def loadClassOfService(id: ObjectId) = { try { Some(ClassOfService.byId(id)) } catch { case e: IllegalArgumentException => None } } def loadUser(name: String) = { try { Some(User.byId(name)) } catch { case e: IllegalArgumentException => None } } }
nagyistoce/kanbanik
kanbanik-server/src/main/scala/com/googlecode/kanbanik/migrate/MigrateDb.scala
Scala
apache-2.0
12,688
package org.jetbrains.sbt.project.data.service import com.intellij.openapi.externalSystem.model.{DataNode, ProjectKeys} import com.intellij.openapi.externalSystem.service.project.ProjectStructureHelper import com.intellij.openapi.module.Module import com.intellij.openapi.project.Project /** * @author Nikolay Obedin * @since 6/4/15. */ trait SafeProjectStructureHelper { val helper: ProjectStructureHelper def getIdeModuleByNode(node: DataNode[_], project: Project): Option[Module] = for { moduleData <- Option(node.getData(ProjectKeys.MODULE)) module <- Option(helper.findIdeModule(moduleData, project)) } yield module }
SergeevPavel/intellij-scala
src/org/jetbrains/sbt/project/data/service/SafeProjectStructureHelper.scala
Scala
apache-2.0
653
package com.haskforce.cabal.lang.psi.impl import com.intellij.psi.PsiElement import com.haskforce.cabal.lang.psi._ import com.haskforce.utils.PQ trait BuildDependsImpl extends PsiElement { /** Retrieves the package names as strings. */ def getPackageNames: Array[String] = PQ.getChildOfType(this, classOf[Dependencies]) match { case None => Array.empty case Some(el) => val res = PQ.streamChildren(el, classOf[Dependency]).flatMap(c => PQ.getChildNodes(c, CabalTypes.DEPENDENCY_NAME).headOption.map(_.getText) ).toArray res } }
carymrobbins/intellij-haskforce
src/com/haskforce/cabal/lang/psi/impl/BuildDependsImpl.scala
Scala
apache-2.0
584
import java.sql.DriverManager import scala.collection.mutable import sh.echo.simqle._ object Main extends App { Class.forName("org.h2.Driver") val db = Db("jdbc:h2:mem:demo;MODE=MYSQL;DB_CLOSE_DELAY=-1") db.withConnection { conn ⇒ val stmt = conn.createStatement() stmt.execute("create table users (id int, name varchar(255))") stmt.execute("insert into users (id, name) values (1, 'Alice'), (2, 'Bob')") stmt.close() } case class User(id: Int, name: String) println { query[User]("select * from users") } def query[T](sql: String)(implicit rm: RowMapper[T]): List[T] = db.withConnection { conn ⇒ val stmt = conn.createStatement() val rs = stmt.executeQuery(sql) val buffer = mutable.Buffer.empty[T] while (rs.next()) buffer += rm.get(rs) rs.close() stmt.close() buffer.toList } }
echojc/simqle-sdu
code/simqle-full/src/main/scala/Main.scala
Scala
mit
878
/* * Copyright 2014–2020 SlamData Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package quasar.api.push.param import cats.{Eq, Show} import scala.{math, Boolean, Int, Product, Serializable} sealed trait IntegerStep extends Product with Serializable with (Int => Boolean) object IntegerStep { final case class Factor(init: Int, n: Int) extends IntegerStep { def apply(i: Int): Boolean = i % n == init } final case class Power(init: Int, n: Int) extends IntegerStep { def apply(i: Int): Boolean = math.round(math.log(i.toDouble) / math.log(n.toDouble)) == init } implicit val equal: Eq[IntegerStep] = Eq.fromUniversalEquals[IntegerStep] implicit val show: Show[IntegerStep] = Show.fromToString[IntegerStep] }
slamdata/quasar
api/src/main/scala/quasar/api/push/param/IntegerStep.scala
Scala
apache-2.0
1,279
package mesosphere.marathon.core.task.tracker.impl import mesosphere.marathon.Protos.MarathonTask import mesosphere.marathon.core.task.Task import mesosphere.marathon.core.task.Task.LocalVolumeId import mesosphere.marathon.core.task.state.MarathonTaskStatus import mesosphere.marathon.state.{ PathId, Timestamp } import mesosphere.marathon.test.{ MarathonTestHelper, Mockito } import mesosphere.marathon.SerializationFailedException import org.apache.mesos.Protos._ import org.apache.mesos.{ Protos => MesosProtos } import org.scalatest.{ FunSuite, GivenWhenThen, Matchers } import scala.collection.immutable.Seq class TaskSerializerTest extends FunSuite with Mockito with Matchers with GivenWhenThen { import scala.collection.JavaConverters._ val f = new Fixture test("minimal marathonTask => Task") { Given("a minimal MarathonTask") val now = MarathonTestHelper.clock.now() val taskProto = MarathonTask.newBuilder() .setId("task") .setVersion(now.toString) .setStagedAt(now.toDateTime.getMillis) .setMarathonTaskStatus(MarathonTask.MarathonTaskStatus.Running) .setHost(f.sampleHost).build() When("we convert it to task") val task = TaskSerializer.fromProto(taskProto) Then("we get a minimal task State") val expectedState = MarathonTestHelper.mininimalTask(f.taskId.idString, now, None, MarathonTaskStatus.Running) task should be(expectedState) When("we serialize it again") val marathonTask2 = TaskSerializer.toProto(task) Then("we get the original state back") marathonTask2 should equal(taskProto) } test("full marathonTask with no networking => Task") { val f = new Fixture Given("a MarathonTask with all fields and host ports") val taskProto = f.completeTask When("we convert it to task") val task = TaskSerializer.fromProto(taskProto) Then("we get the expected task state") val expectedState = f.fullSampleTaskStateWithoutNetworking task should be(expectedState) When("we serialize it again") val marathonTask2 = TaskSerializer.toProto(task) Then("we get the original state back") marathonTask2 should equal(taskProto) } test("full marathonTask with host ports => Task") { val f = new Fixture Given("a MarathonTask with all fields and host ports") val samplePorts = Seq(80, 81) val taskProto = f.completeTask.toBuilder .addAllPorts(samplePorts.map(Integer.valueOf(_)).asJava) .build() When("we convert it to task") val task = TaskSerializer.fromProto(taskProto) Then("we get the expected task state") val expectedState = f.fullSampleTaskStateWithoutNetworking.copy(hostPorts = samplePorts) task should be(expectedState) When("we serialize it again") val marathonTask2 = TaskSerializer.toProto(task) Then("we get the original state back") marathonTask2 should equal(taskProto) } test("full marathonTask with NetworkInfoList in Status => Task") { val f = new Fixture Given("a MarathonTask with all fields and status with network infos") val taskProto = f.completeTask.toBuilder .setStatus( TaskStatus.newBuilder() .setTaskId(f.taskId.mesosTaskId) .setState(TaskState.TASK_RUNNING) .setContainerStatus(ContainerStatus.newBuilder().addAllNetworkInfos(f.sampleNetworks.asJava)) ) .build() When("we convert it to task") val task = TaskSerializer.fromProto(taskProto) Then("we get the expected task state") import MarathonTestHelper.Implicits._ val expectedState = f.fullSampleTaskStateWithoutNetworking.withNetworkInfos(f.sampleNetworks) task should be(expectedState) When("we serialize it again") val marathonTask2 = TaskSerializer.toProto(task) Then("we get the original state back") marathonTask2 should equal(taskProto) } test("Reserved <=> Proto") { val f = new Fixture Given("a reserved task") val proto = f.Resident.reservedProto When("We convert it to a task") val taskProto = TaskSerializer.fromProto(proto) Then("We get a correct representation") taskProto should equal (f.Resident.reservedState) When("We serialize it again") val serialized = TaskSerializer.toProto(taskProto) Then("We get the original state back") serialized should equal(proto) } test("LaunchedOnReservation <=> Proto") { val f = new Fixture Given("a LaunchedOnReservation proto") val proto = f.Resident.launchedOnReservationProto When("We convert it to a task") val task = TaskSerializer.fromProto(proto) Then("We get a correct representation") task should equal (f.Resident.launchedOnReservationState) When("We serialize it again") val serialized = TaskSerializer.toProto(task) Then("We get the original state back") serialized should equal(proto) } test("Failure case: Reserved has no Reservation") { val f = new Fixture Given("a Reserved proto missing reservation") val proto = f.Resident.reservedProtoWithoutReservation When("We convert it to a task") val error = intercept[SerializationFailedException] { TaskSerializer.fromProto(proto) } Then("We get a SerializationFailedException") error.message should startWith("Unable to deserialize") } class Fixture { private[this] val appId = PathId.fromSafePath("/test") val taskId = Task.Id("task") val sampleHost: String = "host.some" private[this] val sampleAttributes: Iterable[Attribute] = Iterable(attribute("label1", "value1")) private[this] val stagedAtLong: Long = 1 private[this] val startedAtLong: Long = 2 private[this] val appVersion: Timestamp = Timestamp(3) private[this] val sampleTaskStatus: TaskStatus = MesosProtos.TaskStatus.newBuilder() .setTaskId(MesosProtos.TaskID.newBuilder().setValue(taskId.idString)) .setState(MesosProtos.TaskState.TASK_RUNNING) .build() private[this] val sampleSlaveId: MesosProtos.SlaveID.Builder = MesosProtos.SlaveID.newBuilder().setValue("slaveId") val sampleNetworks: Seq[MesosProtos.NetworkInfo] = Seq( MesosProtos.NetworkInfo.newBuilder() .addIpAddresses(MesosProtos.NetworkInfo.IPAddress.newBuilder().setIpAddress("1.2.3.4")) .build() ) val fullSampleTaskStateWithoutNetworking: Task.LaunchedOnReservation = Task.LaunchedOnReservation( taskId, Task.AgentInfo(host = sampleHost, agentId = Some(sampleSlaveId.getValue), attributes = sampleAttributes), runSpecVersion = appVersion, status = Task.Status( stagedAt = Timestamp(stagedAtLong), startedAt = Some(Timestamp(startedAtLong)), mesosStatus = Some(sampleTaskStatus), taskStatus = MarathonTaskStatus.Running ), hostPorts = Seq.empty, reservation = Task.Reservation( Seq(LocalVolumeId(appId, "my-volume", "uuid-123")), Task.Reservation.State.Launched) ) val completeTask = MarathonTask .newBuilder() .setId(taskId.idString) .setHost(sampleHost) .addAllAttributes(sampleAttributes.asJava) .setStagedAt(stagedAtLong) .setStartedAt(startedAtLong) .setVersion(appVersion.toString) .setStatus(sampleTaskStatus) .setSlaveId(sampleSlaveId) .setMarathonTaskStatus(MarathonTask.MarathonTaskStatus.Running) .setReservation(MarathonTask.Reservation.newBuilder .addLocalVolumeIds(LocalVolumeId(appId, "my-volume", "uuid-123").idString) .setState(MarathonTask.Reservation.State.newBuilder() .setType(MarathonTask.Reservation.State.Type.Launched))) .build() private[this] def attribute(name: String, textValue: String): MesosProtos.Attribute = { val text = MesosProtos.Value.Text.newBuilder().setValue(textValue) MesosProtos.Attribute.newBuilder().setName(name).setType(MesosProtos.Value.Type.TEXT).setText(text).build() } object Resident { import scala.collection.JavaConverters._ import scala.concurrent.duration._ private[this] val appId = PathId("/test") private[this] val taskId = Task.Id("reserved1") private[this] val host = "some.host" private[this] val agentId = "agent-1" private[this] val now = MarathonTestHelper.clock.now() private[this] val containerPath = "containerPath" private[this] val uuid = "uuid" private[this] val attributes = Iterable.empty[MesosProtos.Attribute] private[this] val localVolumeIds = Seq(Task.LocalVolumeId(appId, containerPath, uuid)) private[this] val stagedAt = now - 1.minute private[this] val startedAt = now - 55.seconds private[this] val mesosStatus = MarathonTestHelper.statusForState(taskId.idString, MesosProtos.TaskState.TASK_RUNNING) private[this] val status = Task.Status(stagedAt, Some(startedAt), Some(mesosStatus), taskStatus = MarathonTaskStatus.Running) private[this] val hostPorts = Seq(1, 2, 3) def reservedProto = MarathonTask.newBuilder() .setId(taskId.idString) .setHost(host) .setSlaveId(MesosProtos.SlaveID.newBuilder().setValue(agentId)) .addAllAttributes(attributes.asJava) .setMarathonTaskStatus(MarathonTask.MarathonTaskStatus.Reserved) .setReservation(MarathonTask.Reservation.newBuilder() .addAllLocalVolumeIds(localVolumeIds.map(_.idString).asJava) .setState(MarathonTask.Reservation.State.newBuilder() .setType(MarathonTask.Reservation.State.Type.New) .setTimeout(MarathonTask.Reservation.State.Timeout.newBuilder() .setInitiated(now.toDateTime.getMillis) .setDeadline((now + 1.minute).toDateTime.getMillis) .setReason(MarathonTask.Reservation.State.Timeout.Reason.ReservationTimeout)))) .build() def reservedState = Task.Reserved( Task.Id(taskId.idString), Task.AgentInfo(host = host, agentId = Some(agentId), attributes), reservation = Task.Reservation(localVolumeIds, Task.Reservation.State.New(Some(Task.Reservation.Timeout( initiated = now, deadline = now + 1.minute, reason = Task.Reservation.Timeout.Reason.ReservationTimeout)))), status = Task.Status(stagedAt = Timestamp(0), taskStatus = MarathonTaskStatus.Reserved) ) def launchedEphemeralProto = MarathonTask.newBuilder() .setId(taskId.idString) .setHost(host) .setSlaveId(MesosProtos.SlaveID.newBuilder().setValue(agentId)) .addAllAttributes(attributes.asJava) .setVersion(appVersion.toString) .setStagedAt(stagedAt.toDateTime.getMillis) .setStartedAt(startedAt.toDateTime.getMillis) .setStatus(mesosStatus) .setMarathonTaskStatus(MarathonTask.MarathonTaskStatus.Running) .addAllPorts(hostPorts.map(Integer.valueOf).asJava) .build() def launchedOnReservationProto = launchedEphemeralProto.toBuilder .setReservation(MarathonTask.Reservation.newBuilder() .addAllLocalVolumeIds(localVolumeIds.map(_.idString).asJava) .setState(MarathonTask.Reservation.State.newBuilder() .setType(MarathonTask.Reservation.State.Type.Launched))) .build() def launchedOnReservationState = Task.LaunchedOnReservation( taskId, Task.AgentInfo(host = host, agentId = Some(agentId), attributes), appVersion, status, hostPorts, Task.Reservation(localVolumeIds, Task.Reservation.State.Launched) ) def reservedProtoWithoutReservation = reservedProto.toBuilder.clearReservation().build() } } }
timcharper/marathon
src/test/scala/mesosphere/marathon/core/task/tracker/impl/TaskSerializerTest.scala
Scala
apache-2.0
11,785
package be.cmpg.simulatedDataAnalysis import java.io.FileWriter class ROCPlotValuesCreator(selected: Set[String], positives: Set[String], all: Set[String]) { val value = { val negatives = all.--(positives) val truePositives = positives.&(selected) val trueNegatives = (negatives).&(all.--(selected)) val falsePositives = selected.&(negatives) val falseNegatives = positives.--(selected) new ROCPlotValue(truePositives.size.toDouble, falsePositives.size.toDouble, trueNegatives.size.toDouble, falseNegatives.size.toDouble) //val sensitivity = truePositives.size.toDouble / (truePositives.size.toDouble + falseNegatives.size.toDouble) //val positivePredictiveValue = truePositives.size.toDouble / (truePositives.size.toDouble + falsePositives.size.toDouble) //(positivePredictiveValue , sensitivity) } def get = value }
spulido99/SSA
src/main/scala/be/cmpg/simulatedDataAnalysis/ROCPlotValuesCreator.scala
Scala
gpl-2.0
880
package com.voldy.beans import com.fasterxml.jackson.annotation.JsonProperty import com.fasterxml.jackson.annotation.JsonIgnoreProperties import scala.beans.BeanProperty @JsonIgnoreProperties(ignoreUnknown = true) class RoutingDetails (){ @BeanProperty @JsonProperty("customer_name") var customer_name: String = _ @BeanProperty @JsonProperty("routing_number") @BeanProperty var routing_number: String = _ @JsonProperty("change_date") @BeanProperty var change_date: String = _ @JsonProperty("data_view_code") @BeanProperty var data_view_code: String = _ @JsonProperty("message") @BeanProperty var message: String = _ @JsonProperty("record_type_code") @BeanProperty var record_type_code: String = _ @JsonProperty("zip") @BeanProperty var zip: String = _ @JsonProperty("office_code") @BeanProperty var office_code: String = _ @JsonProperty("telephone") @BeanProperty var telephone: String = _ @JsonProperty("rn") @BeanProperty var rn: String = _ @JsonProperty("address") @BeanProperty var address: String = _ @JsonProperty("code") @BeanProperty var code: String = _ @JsonProperty("state") @BeanProperty var state: String = _ @JsonProperty("new_routing_number") @BeanProperty var new_routing_number: String = _ @JsonProperty("institution_status_code") @BeanProperty var institution_status_code: String = _ @JsonProperty("city") @BeanProperty var city: String = _ def this(customer_name:String,routing_number:String) { this() this.customer_name = customer_name this.routing_number = routing_number } }
amoghrao2003/cmpe273-assignment2
src/main/scala/com/voldy/beans/RoutingDetails.scala
Scala
mit
1,624
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.exchange import scala.collection.mutable import scala.collection.mutable.ArrayBuffer import org.apache.spark.broadcast import org.apache.spark.rdd.RDD import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions.Attribute import org.apache.spark.sql.catalyst.rules.Rule import org.apache.spark.sql.execution.{LeafExecNode, SparkPlan, UnaryExecNode} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.StructType /** * Base class for operators that exchange data among multiple threads or processes. * * Exchanges are the key class of operators that enable parallelism. Although the implementation * differs significantly, the concept is similar to the exchange operator described in * "Volcano -- An Extensible and Parallel Query Evaluation System" by Goetz Graefe. */ abstract class Exchange extends UnaryExecNode { override def output: Seq[Attribute] = child.output } /** * A wrapper for reused exchange to have different output, because two exchanges which produce * logically identical output will have distinct sets of output attribute ids, so we need to * preserve the original ids because they're what downstream operators are expecting. */ case class ReusedExchangeExec(override val output: Seq[Attribute], child: Exchange) extends LeafExecNode { // Ignore this wrapper for canonicalizing. override lazy val canonicalized: SparkPlan = child.canonicalized def doExecute(): RDD[InternalRow] = { child.execute() } override protected[sql] def doExecuteBroadcast[T](): broadcast.Broadcast[T] = { child.executeBroadcast() } } /** * Find out duplicated exchanges in the spark plan, then use the same exchange for all the * references. */ case class ReuseExchange(conf: SQLConf) extends Rule[SparkPlan] { def apply(plan: SparkPlan): SparkPlan = { if (!conf.exchangeReuseEnabled) { return plan } // Build a hash map using schema of exchanges to avoid O(N*N) sameResult calls. val exchanges = mutable.HashMap[StructType, ArrayBuffer[Exchange]]() plan.transformUp { case exchange: Exchange => // the exchanges that have same results usually also have same schemas (same column names). val sameSchema = exchanges.getOrElseUpdate(exchange.schema, ArrayBuffer[Exchange]()) val samePlan = sameSchema.find { e => exchange.sameResult(e) } if (samePlan.isDefined) { // Keep the output of this exchange, the following plans require that to resolve // attributes. ReusedExchangeExec(exchange.output, samePlan.get) } else { sameSchema += exchange exchange } } } }
MLnick/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/exchange/Exchange.scala
Scala
apache-2.0
3,558
// Copyright: 2010 - 2018 https://github.com/ensime/ensime-server/graphs // License: http://www.gnu.org/licenses/gpl-3.0.en.html package org.ensime.core.javac import scala.collection.JavaConverters._ import scala.collection.breakOut import akka.event.slf4j.SLF4JLogging import com.sun.source.tree._ import com.sun.source.util.TreePath import javax.lang.model.`type`._ import javax.lang.model.element._ import javax.lang.model.element.ElementKind._ import org.ensime.core.{ DocFqn, DocSig } import org.ensime.indexer._ trait Helpers extends UnsafeHelpers with SLF4JLogging { private implicit class EnhancedElement(e: Element) { def isOf(kinds: ElementKind*): Boolean = kinds.exists(_ == e.getKind) } def typeMirror(c: Compilation, t: Tree): Option[TypeMirror] = Option(c.trees.getTypeMirror(c.trees.getPath(c.compilationUnit, t))) def typeElement(c: Compilation, t: Tree): Option[Element] = typeMirror(c, t).map(c.types.asElement) def element(c: Compilation, path: TreePath): Option[Element] = Option(c.trees.getElement(path)) .orElse(unsafeGetElement(path.getLeaf)) .orElse( Option(c.trees.getTypeMirror(path)) .flatMap(t => Option(c.types.asElement(t))) ) def toSymbolName(fqn: FullyQualifiedName): String = fqn match { case m: MethodName => val owner = m.owner.fqnString val name = m.name s"$owner.$name" case x => x.fqnString } private def showParam(d: DescriptorType): String = d match { case a: ArrayDescriptor => showParam(a.fqn) case c: ClassName => c.fqnString } def fqn(el: Element): Option[FullyQualifiedName] = el match { case e: ExecutableElement => descriptor(e).map { descriptor => val name = e.getSimpleName.toString val params = descriptor.params .map(showParam) .mkString(",") MethodName( ClassName.fromFqn(e.getEnclosingElement.toString), s"$name($params)", descriptor ) } case e: VariableElement if e.isOf(PARAMETER, LOCAL_VARIABLE) => Some(ClassName(PackageName(Nil), e.toString)) case e: VariableElement if e.isOf(FIELD) => Some( FieldName( ClassName.fromFqn( e.getEnclosingElement.toString ), e.getSimpleName.toString ) ) case e: VariableElement if e.isOf(ENUM_CONSTANT) => fqn(e.asType()).map( FieldName(_, e.getSimpleName.toString) ) case e => fqn(e.asType()) } private def descriptor(e: ExecutableElement): Option[Descriptor] = fqn(e.getReturnType).map { returnType => val params: List[DescriptorType] = e.getParameters.asScala.flatMap(p => fqn(p.asType()))(breakOut) Descriptor(params, returnType) } def path(c: Compilation, t: Tree): Option[TreePath] = Option(c.trees.getPath(c.compilationUnit, t)) def fqn(c: Compilation, t: Tree): Option[FullyQualifiedName] = path(c, t).flatMap(fqn(c, _)) def fqn(c: Compilation, p: TreePath): Option[FullyQualifiedName] = element(c, p).flatMap(fqn(_)) def fqn(tm: TypeMirror): Option[ClassName] = Some(ClassName.fromFqn(tm.toString)) def toDocSig(fqn: FullyQualifiedName): DocSig = fqn match { case p: PackageName => DocSig(DocFqn(p.parent.fqnString, p.path.last), None) case c: ClassName => DocSig(DocFqn(c.pack.fqnString, c.name), None) case m: MethodName => DocSig(DocFqn(m.owner.pack.fqnString, m.owner.name), Some(m.name)) case f: FieldName => DocSig(DocFqn(f.owner.fqnString, f.name), Some(f.name)) } }
yyadavalli/ensime-server
core/src/main/scala/org/ensime/core/javac/Helpers.scala
Scala
gpl-3.0
3,603
/* This file is part of ZikDroid. * Copyright (C) 2015 Sacha Delanoue <contact@shaac.me> * * ZikDroid is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * ZikDroid is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with ZikDroid. If not, see <http://www.gnu.org/licenses/>. */ package me.shaac.zikdroid object Protocol { // Byte array to send to open a session with device val start = Array[Byte](0, 0, 0) // Get byte array to send to get information corresponding to API request def getRequest(request: String): Array[Byte] = pack("GET " + request) // Get byte array to send to set values to corresponding API request def setRequest(request: String, arguments: String): Array[Byte] = pack("SET " + request + "?arg=" + arguments) // TODO function getting an XML from a byte array: array[7:] // Protocol is the following: first 2 bytes are packet size, then a minimal // value byte, and then the bytes of the string message private def pack(request: String): Array[Byte] = { val n = request.size + 3 // Entire size of the final byte array Array[Byte]((n >> 8).toByte, n.toByte, Byte.MinValue) ++ request.getBytes } } // They are a few others protocol features, for sending firmware on device for // instance, but I would not risk trying it.
Shaac/ZikDroid
src/Protocol.scala
Scala
gpl-3.0
1,735
/* * DigiSSHD - DigiControl component for Android Platform * Copyright (c) 2012, Alexey Aksenov ezh@ezh.msk.ru. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 3 or any later * version, as published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 3 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 3 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * */ package org.digimead.digi.ctrl.sshd.service.option import org.digimead.digi.ctrl.lib.util.Android import org.digimead.digi.ctrl.sshd.service.OptionBlock.Item import android.content.Context import android.view.LayoutInflater import android.view.View trait TextViewItem extends Item { def getView(context: Context, inflater: LayoutInflater): View = inflater.inflate(Android.getId(context, "option_list_item_value", "layout"), null) }
ezh/android-component-DigiSSHD
src/main/scala/org/digimead/digi/ctrl/sshd/service/option/TextViewItem.scala
Scala
gpl-3.0
1,404
package controllers import io.apibuilder.api.v0.models.json._ import db.MembershipsDao import java.util.UUID import javax.inject.{Inject, Singleton} import play.api.libs.json.Json @Singleton class Memberships @Inject() ( val apiBuilderControllerComponents: ApiBuilderControllerComponents, membershipsDao: MembershipsDao ) extends ApibuilderController { def get( organizationGuid: Option[UUID], organizationKey: Option[String], userGuid: Option[UUID], role: Option[String], limit: Long = 25, offset: Long = 0 ) = Identified { request => Ok( Json.toJson( membershipsDao.findAll( request.authorization, organizationGuid = organizationGuid, organizationKey = organizationKey, userGuid = userGuid, role = role, limit = Some(limit), offset = offset ) ) ) } def getByGuid(guid: UUID) = Identified { request => membershipsDao.findByGuid(request.authorization, guid) match { case None => NotFound case Some(membership) => { if (membershipsDao.isUserAdmin(request.user, membership.organization)) { Ok(Json.toJson(membership)) } else { Unauthorized } } } } def deleteByGuid(guid: UUID) = Identified { request => membershipsDao.findByGuid(request.authorization, guid) match { case None => NoContent case Some(membership) => { if (membershipsDao.isUserAdmin(request.user, membership.organization)) { membershipsDao.softDelete(request.user, membership) NoContent } else { Unauthorized } } } } }
apicollective/apibuilder
api/app/controllers/Memberships.scala
Scala
mit
1,685
/* * Copyright 2015 Dennis Vriend * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.test.week4 import com.test.TestSpec import rx.lang.scala._ import scala.concurrent.duration._ class RxOperatorsTest extends TestSpec { /** * Observables are asynchronous streams of data. * * Contrary to Futures, they can return multiple values. * * In ReactiveX an `observer` subscribes to an `Observable`. * * Then that observer `reacts` to whatever item or sequence * of items the Observable `emits`. * * So, the most important part is that Observable(s) emit * items (stuff) that observers can subscribe to and react * upon. * * The workflow is: * 1. Create an Observable (note, it emits items!!) * 2. write an 'item'-processing pipeline, eg. * take (2) items, then convert those to a list * 3. Subscribe to the observable * 4. React to the emitted and transformed items */ /** * Create an observable that emits 0, 1, 2, ... with a delay * of duration between consecutive numbers. */ def observableThatEmitsNumbers: Observable[Long] = Observable.interval(200.millis) /** * Create an observable that emits no data to the observer * and immediately invokes its onCompleted method. */ def emptyObservable: Observable[Nothing] = Observable.empty "Observer that emits numbers" should "emit the number 0" in { val observable = observableThatEmitsNumbers // the observable will take 2 items from the stream // then it will automatically unsubscribe .take(2) // for testing, it is better to have a BlockingObservable, normally // all operations are asynchronous and non-blocking .toBlocking // Subscribe to the observable and take the head of the stream, when there are // no items, return None // Returns an Option with the very first item emitted by the source // Observable, or None if the source Observable is empty. val result: Option[Long] = observable.headOption result.headOption should not be empty result.value shouldBe 0 } it should "emit 0, 1" in { // the `.toList` returns an Observable that emits a single item, a List composed of all the items emitted by the // source Observable. Be careful not to use this operator on Observables that emit infinite or very large numbers // of items, as you do not have the option to unsubscribe. observableThatEmitsNumbers .take(2) .toBlocking .toList shouldBe List(0, 1) } it should "transform the items using .map()" in { observableThatEmitsNumbers .drop(3) .take(2) .map(n ⇒ "number: " + n) .toBlocking .toList shouldBe List("number: 3", "number: 4") } it should "merge two streams" in { val o1 = observableThatEmitsNumbers.drop(3).take(3) // 3, 4, 5 val o2 = observableThatEmitsNumbers.take(3) // 0, 1, 2 o1.merge(o2) .take(6) .toBlocking .toList shouldBe List(0, 1, 2, 3, 4, 5) } "An empty observable" should "emit nothing" in { emptyObservable .toBlocking .headOption shouldBe empty } it should "return an empty list" in { emptyObservable.toList.toBlocking.head shouldBe empty emptyObservable.toList.toBlocking.head shouldBe Nil } "A list" should "convert to Observable" in { val o1 = List(0, 1, 2).toObservable val o2 = List(3, 4, 5).toObservable o1.merge(o2) .take(6) .toBlocking .toList shouldBe List(0, 1, 2, 3, 4, 5) } it should "convert to Observable in reverse" in { val o1 = List(0, 1, 2).toObservable val o2 = List(3, 4, 5).toObservable o2.merge(o1) .take(6) .toBlocking .toList shouldBe List(3, 4, 5, 0, 1, 2) } "Observables" should "be used as follows" in { observableThatEmitsNumbers // emit numbers .slidingBuffer(count = 2, skip = 1) // buffer 2 elements, skip 1, so (0, 1), (1, 2), (2, 3) etc .take(3) // take 2 pairs, then unsubscribe automatically .toBlocking .toList should contain inOrder (Seq(0, 1), Seq(1, 2)) } it should "filter" in { observableThatEmitsNumbers // emit numbers .filter(_ % 2 == 0) // only emit elements that are even numbers .slidingBuffer(count = 2, skip = 2) // buffer 2 elements and skip 2 (0, 2), (4, 6) etc .take(2) // take 2 pairs .toBlocking .toList should contain inOrder (Seq(0, 2), Seq(4, 6)) } it should "flatMap" in { val o1 = observableThatEmitsNumbers.take(2) val o2 = observableThatEmitsNumbers.take(5) o1.flatMap(_ ⇒ o2) .toBlocking .toList should not be empty // the content of the resulting list is non-deterministic. // this is because, in contrary to iterables, observables are asynchronous // the function you are flat-mapping over will produce its values asynchronously } it should "merge" in { val o1 = observableThatEmitsNumbers.take(2) val o2 = observableThatEmitsNumbers.take(5) o1.merge(o2) .toBlocking .toList shouldBe List(0, 0, 1, 1, 2, 3, 4) } it should "concat" in { val o1 = observableThatEmitsNumbers.take(2) val o2 = observableThatEmitsNumbers.take(5) (o1 ++ o2) .toBlocking .toList shouldBe List(0, 1, 0, 1, 2, 3, 4) } it should "sum" in { observableThatEmitsNumbers .take(5) .sum .toBlocking .head shouldBe 10 } it should "count" in { observableThatEmitsNumbers .take(5) .countLong .toBlocking .head shouldBe 5 } it should "zip" in { val o1 = observableThatEmitsNumbers.take(3) val o2 = observableThatEmitsNumbers.drop(5).take(5) o1.zip(o2) .toBlocking .toList shouldBe List((0, 5), (1, 6), (2, 7)) } // The marble diagram of the sheet is wrong, the three observables // each emit only 3's, or 2's or 1's not zero and ones "flattening nested streams" should "return the correct sequence" in { val xs: Observable[Int] = Observable.from(List(3, 2, 1)) val yss: Observable[Observable[Int]] = xs.map(x ⇒ Observable.interval(x seconds).map(_ ⇒ x).take(2)) val zs: Observable[Int] = yss.flatten zs.toBlocking.toList match { case List(1, 1, 2, 3, 2, 3) ⇒ case List(1, 2, 1, 3, 2, 3) ⇒ case u ⇒ fail("Unexpected: " + u) } } "Concatenating nested streams" should "return the correct sequence" in { Observable.from(List(3, 2, 1)) .map(x ⇒ Observable.interval(x seconds).map(_ ⇒ x).take(2)) .concat .toBlocking .toList shouldBe List(3, 3, 2, 2, 1, 1) // note, never use concat, because it must wait until all streams terminate // before the streams can be concatenated. } }
dnvriend/reactive-programming
src/test/scala/com/test/week4/RxOperatorsTest.scala
Scala
apache-2.0
7,307
package com.xhachi.gae4s.datastore import com.google.appengine.api.datastore.{Key => LLKey, KeyFactory} import scala.reflect.ClassTag object EntityMeta { import scala.language.experimental.macros implicit def createMeta[E <: Entity[E]]: EntityMeta[E] = macro EntityMacro.createMeta[E] } trait EntityType { type EntityType <: Entity[EntityType] } abstract class EntityMeta[E <: Entity[E]: ClassTag] extends Serializable with EntityType { type EntityType = E def ancestorType: Option[Class[_ <: Entity[_]]] def entityType: Class[_ <: Entity[E]] def kind: String = entityType.getName val key = new KeyProperty[EntityType]("__key__") with IndexedProperty[Key[EntityType]] with Getter[E, Key[EntityType]] { def getValueFromEntity(e: E): Key[EntityType] = e.key } def properties: Seq[Property[_]] = Seq(key) def property(name: String): Option[Property[_]] = properties.find(_.name == name) def versionProperty: Option[VersionProperty] = None def versionEnabled: Boolean = versionProperty.isDefined def version(e: EntityType): Option[Long] = versionProperty.map(_.getValueFromLLEntity(toLLEntity(e))) def createEntity(key: Key[EntityType]): EntityType final def toEntity(from: com.google.appengine.api.datastore.Entity): EntityType = { val to = createEntity(createKey(from.getKey)) for (p <- properties if p.isInstanceOf[Setter[_, _]] && from.hasProperty(p.name)) { val value = p.getValueFromLLEntity(from).asInstanceOf[p.PropertyType] val setter: Setter[E, p.PropertyType] = p.asInstanceOf[Setter[E, p.PropertyType]] value match { case null => setter.setValueToEntity(to, null.asInstanceOf[p.PropertyType]) case v: p.PropertyType => setter.setValueToEntity(to, value.asInstanceOf[p.PropertyType]) case v => throw new IllegalStateException("Stored value type is invalid: " + v.getClass) } } to } final def toLLEntity(from: EntityType): com.google.appengine.api.datastore.Entity = { val to = new com.google.appengine.api.datastore.Entity(from.key.key) for (p <- properties if p.isInstanceOf[Getter[_, _]]) { val getter = p.asInstanceOf[Getter[E, p.PropertyType]] val value = getter.getValueFromEntity(from) value match { case null => p.asInstanceOf[Property[p.PropertyType]].setValueToLLEntity(to)(null.asInstanceOf[p.PropertyType]) case v: p.PropertyType => p.asInstanceOf[Property[p.PropertyType]].setValueToLLEntity(to)(v.asInstanceOf[p.PropertyType]) case v => throw new IllegalStateException("Store value type is invalid: " + v.getClass) } } to } def createKey(key: LLKey) = Key[EntityType](key) def createKeyWithName(name: String) = { val key = KeyFactory.createKey(kind, name) Key[EntityType](key) } def createKeyWithId(id: Long) = { Key[EntityType](KeyFactory.createKey(kind, id)) } def createKeyWithName(parent: Key[_], name: String) = { val key = KeyFactory.createKey(parent.key, kind, name) Key[EntityType](key) } def createKeyWithId(parent: Key[_], id: Long) = { val key = KeyFactory.createKey(parent.key, kind, id) Key[EntityType](key) } def fromKeyStrong(keyString: String): Key[EntityType] = { val key = Key[EntityType](KeyFactory.stringToKey(keyString)) assert(key.kind == kind) key } def toKeyStrong(key: Key[_]): String = KeyFactory.keyToString(key.key) def toString(entity: E): String = { val values = properties.map{ case p: Getter[E,_] => s"${p.name}:${p.getValueFromEntity(entity)}" case p => s"${p.name}:???" } val k = key.getValueFromEntity(entity) values.mkString(s"$kind(key:$k, ", ", ", ")") } }
thachi/gae4s
core/src/main/scala/com/xhachi/gae4s/datastore/EntityMeta.scala
Scala
apache-2.0
3,751
/* * Copyright (C) 2014 GRNET S.A. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package gr.grnet.egi package vmcatcher import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} import com.fasterxml.jackson.core.util.DefaultPrettyPrinter import com.fasterxml.jackson.core.{JsonGenerator, JsonParser, JsonFactory} import scala.collection.JavaConverters._ import com.fasterxml.jackson.core.`type`.TypeReference import java.io.StringWriter /** * */ object Json { val mapper = new ObjectMapper() mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES) val JMapTypeRef = new TypeReference[java.util.Map[String, String]] {} val jf = new JsonFactory(mapper) val pp = new DefaultPrettyPrinter() jf.configure(JsonParser.Feature.ALLOW_COMMENTS, true) jf.configure(JsonGenerator.Feature.QUOTE_FIELD_NAMES, true) jf.configure(JsonGenerator.Feature.WRITE_NUMBERS_AS_STRINGS, true) def stringMapOfJson(json: String): Map[String, String] = { val jmap = mapper.readValue[java.util.Map[String, String]](json, JMapTypeRef) jmap.asScala.toMap } def jsonOfMap[T](map: Map[String, T], pretty: Boolean = true): String = { val jmap = deepScalaToJava(map) val sw = new StringWriter() val jg = jf.createGenerator(sw) if(pretty) { jg.setPrettyPrinter(pp) } jg.writeObject(jmap) sw.toString } }
grnet/snf-vmcatcher
src/main/scala/gr/grnet/egi/vmcatcher/Json.scala
Scala
gpl-3.0
1,972
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package whisk.core.containerpool.docker import java.io.FileNotFoundException import java.nio.file.Files import java.nio.file.Paths import scala.concurrent.ExecutionContext import scala.concurrent.Future import scala.util.Failure import scala.util.Success import scala.util.Try import akka.event.Logging.ErrorLevel import whisk.common.Logging import whisk.common.LoggingMarkers import whisk.common.TransactionId /** * Serves as interface to the docker CLI tool. * * Be cautious with the ExecutionContext passed to this, as the * calls to the CLI are blocking. * * You only need one instance (and you shouldn't get more). */ class DockerClient(dockerHost: Option[String] = None)(executionContext: ExecutionContext)(implicit log: Logging) extends DockerApi with ProcessRunner { implicit private val ec = executionContext // Determines how to run docker. Failure to find a Docker binary implies // a failure to initialize this instance of DockerClient. protected val dockerCmd: Seq[String] = { val alternatives = List("/usr/bin/docker", "/usr/local/bin/docker") val dockerBin = Try { alternatives.find(a => Files.isExecutable(Paths.get(a))).get } getOrElse { throw new FileNotFoundException(s"Couldn't locate docker binary (tried: ${alternatives.mkString(", ")}).") } val host = dockerHost.map(host => Seq("--host", s"tcp://$host")).getOrElse(Seq.empty[String]) Seq(dockerBin) ++ host } def run(image: String, args: Seq[String] = Seq.empty[String])(implicit transid: TransactionId): Future[ContainerId] = runCmd((Seq("run", "-d") ++ args ++ Seq(image)): _*).map(ContainerId.apply) def inspectIPAddress(id: ContainerId, network: String)(implicit transid: TransactionId): Future[ContainerIp] = runCmd("inspect", "--format", s"{{.NetworkSettings.Networks.${network}.IPAddress}}", id.asString).flatMap { _ match { case "<no value>" => Future.failed(new NoSuchElementException) case stdout => Future.successful(ContainerIp(stdout)) } } def pause(id: ContainerId)(implicit transid: TransactionId): Future[Unit] = runCmd("pause", id.asString).map(_ => ()) def unpause(id: ContainerId)(implicit transid: TransactionId): Future[Unit] = runCmd("unpause", id.asString).map(_ => ()) def rm(id: ContainerId)(implicit transid: TransactionId): Future[Unit] = runCmd("rm", "-f", id.asString).map(_ => ()) def ps(filters: Seq[(String, String)] = Seq(), all: Boolean = false)(implicit transid: TransactionId): Future[Seq[ContainerId]] = { val filterArgs = filters.map { case (attr, value) => Seq("--filter", s"$attr=$value") }.flatten val allArg = if (all) Seq("--all") else Seq.empty[String] val cmd = Seq("ps", "--quiet", "--no-trunc") ++ allArg ++ filterArgs runCmd(cmd: _*).map(_.lines.toSeq.map(ContainerId.apply)) } def pull(image: String)(implicit transid: TransactionId): Future[Unit] = runCmd("pull", image).map(_ => ()) private def runCmd(args: String*)(implicit transid: TransactionId): Future[String] = { val cmd = dockerCmd ++ args val start = transid.started(this, LoggingMarkers.INVOKER_DOCKER_CMD(args.head), s"running ${cmd.mkString(" ")}") executeProcess(cmd: _*).andThen { case Success(_) => transid.finished(this, start) case Failure(t) => transid.failed(this, start, t.getMessage, ErrorLevel) } } } case class ContainerId(val asString: String) { require(asString.nonEmpty, "ContainerId must not be empty") } case class ContainerIp(val asString: String) { require(asString.nonEmpty, "ContainerIp must not be empty") } trait DockerApi { /** * Spawns a container in detached mode. * * @param image the image to start the container with * @param args arguments for the docker run command * @return id of the started container */ def run(image: String, args: Seq[String] = Seq.empty[String])(implicit transid: TransactionId): Future[ContainerId] /** * Gets the IP address of a given container. * * A container may have more than one network. The container has an * IP address in each of these networks such that the network name * is needed. * * @param id the id of the container to get the IP address from * @param network name of the network to get the IP address from * @return ip of the container */ def inspectIPAddress(id: ContainerId, network: String)(implicit transid: TransactionId): Future[ContainerIp] /** * Pauses the container with the given id. * * @param id the id of the container to pause * @return a Future completing according to the command's exit-code */ def pause(id: ContainerId)(implicit transid: TransactionId): Future[Unit] /** * Unpauses the container with the given id. * * @param id the id of the container to unpause * @return a Future completing according to the command's exit-code */ def unpause(id: ContainerId)(implicit transid: TransactionId): Future[Unit] /** * Removes the container with the given id. * * @param id the id of the container to remove * @return a Future completing according to the command's exit-code */ def rm(id: ContainerId)(implicit transid: TransactionId): Future[Unit] /** * Returns a list of ContainerIds in the system. * * @param filters Filters to apply to the 'ps' command * @param all Whether or not to return stopped containers as well * @return A list of ContainerIds */ def ps(filters: Seq[(String, String)] = Seq(), all: Boolean = false)(implicit transid: TransactionId): Future[Seq[ContainerId]] /** * Pulls the given image. * * @param image the image to pull * @return a Future completing once the pull is complete */ def pull(image: String)(implicit transid: TransactionId): Future[Unit] }
prccaraujo/openwhisk
core/invoker/src/main/scala/whisk/core/containerpool/docker/DockerClient.scala
Scala
apache-2.0
6,905
// AORTA is copyright (C) 2012 Dustin Carlino, Mike Depinet, and Piyush // Khandelwal of UT Austin // License: GNU GPL v2 package utexas.aorta.map import scala.collection.mutable import utexas.aorta.map.make.MapStateWriter import utexas.aorta.common.{Util, StateReader, VertexID, EdgeID, RoadID} class Graph( val roads: Array[Road], val edges: Array[Edge], val vertices: Array[Vertex], val artifacts: Array[RoadArtifact], val width: Double, val height: Double, val offX: Double, val offY: Double, val scale: Double, val name: String ) { ////////////////////////////////////////////////////////////////////////////// // Deterministic state // TODO if we squish down IDs, it can be an array too! val turns = vertices.foldLeft(List[Turn]())( (l, v) => v.turns.toList ++ l ).map(t => t.id -> t).toMap ////////////////////////////////////////////////////////////////////////////// // Meta def serialize(w: MapStateWriter) { w.doubles(width, height, offX, offY, scale) w.int(roads.size) roads.foreach(r => r.serialize(w)) w.int(edges.size) edges.foreach(e => e.serialize(w)) w.int(vertices.size) vertices.foreach(v => v.serialize(w)) w.string(name) w.int(artifacts.size) artifacts.foreach(a => a.serialize(w)) } ////////////////////////////////////////////////////////////////////////////// // Queries def traversables() = edges ++ turns.values def get_v(id: VertexID) = vertices(id.int) def get_e(id: EdgeID) = edges(id.int) def get_r(id: RoadID) = roads(id.int) // TODO file library def basename = name.replace("maps/", "").replace(".map", "") } // It's a bit funky, but the actual graph instance doesn't have this; we do. // TODO This is the only remaining global mutable state singleton remaining as of May 2014, and // there's a proper fix that's hard. For now, leave it, it's fine, just can't simultaneously // simulate unless both sims use the same map. object Graph { var width = 0.0 var height = 0.0 var xoff = 0.0 var yoff = 0.0 var scale = 0.0 private val cached_graphs = new mutable.HashMap[String, Graph]() // this MUST be set before world_to_gps is called. // TODO get rid of this approach once GPS coordinates always retained def set_params(w: Double, h: Double, x: Double, y: Double, s: Double) { width = w height = h xoff = x yoff = y scale = s } // inverts what PreGraph1's normalize() does. def world_to_gps(x: Double, y: Double) = Coordinate( (x / scale) - xoff, ((height - y) / scale) - yoff ) // TODO traversables have Queues and vertices have Intersections for speed // Set fresh_copy to true to force a new version of everything, otherwise caching'll return the // same copy var fresh_copy = false def load(fn: String): Graph = { if (fresh_copy || !cached_graphs.contains(fn)) { print(s"Loading $fn...") cached_graphs(fn) = unserialize(Util.reader(fn)) println(s"\\rLoaded $fn. ") } return cached_graphs(fn) } def unserialize(r: StateReader): Graph = { // Set these before loading any traversables, since length'll be computed from em val w = r.double val h = r.double val xo = r.double val yo = r.double val s = r.double set_params(w, h, xo, yo, s) val roads = Range(0, r.int).map(_ => Road.unserialize(r)).toArray val edges = Range(0, r.int).map(_ => Edge.unserialize(r, roads)).toArray val vertices = Range(0, r.int).map(_ => Vertex.unserialize(r, edges)).toArray val name = r.string val artifacts = Range(0, r.int).map(_ => RoadArtifact.unserialize(r)).toArray val g = new Graph(roads, edges, vertices, artifacts, w, h, xo, yo, s, name) // Dependency between roads, edges, and vertices is cyclic, so have to set up one of these. g.roads.foreach(r => r.setup(vertices)) return g } }
dabreegster/aorta
utexas/aorta/map/Graph.scala
Scala
gpl-2.0
3,868
/* __ *\\ ** ________ ___ / / ___ __ ____ Scala.js Test Suite ** ** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL ** ** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ ** ** /____/\\___/_/ |_/____/_/ | |__/ /____/ ** ** |/____/ ** \\* */ package org.scalajs.testsuite.jsinterop import org.junit.Test import org.scalajs.testsuite.junit.JUnitUtil class `1_TestName` { // scalastyle:ignore @Test def `a test with name 1_TestName`(): Unit = () } class eval { // scalastyle:ignore @Test def `a test with name eval`(): Unit = () } class `\\u1f4a7` { // scalastyle:ignore @Test def `a test with name \\u1f4a7`(): Unit = () } class StrangeNamedTests { @Test def testName1(): Unit = { // This should not fail JUnitUtil.loadBootstrapper("org.scalajs.testsuite.jsinterop.1_TestName") } @Test def testName2(): Unit = { // This should not fail JUnitUtil.loadBootstrapper("org.scalajs.testsuite.jsinterop.eval") } @Test def testName3(): Unit = { // This should not fail JUnitUtil.loadBootstrapper("org.scalajs.testsuite.jsinterop.\\u1f4a7") } }
lrytz/scala-js
test-suite/js/src/test/scala/org/scalajs/testsuite/jsinterop/StrangeNamedTests.scala
Scala
bsd-3-clause
1,364
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.sources.v2 import org.apache.spark.sql.{AnalysisException, QueryTest} import org.apache.spark.sql.execution.datasources.FileFormat import org.apache.spark.sql.execution.datasources.parquet.{ParquetFileFormat, ParquetTest} import org.apache.spark.sql.execution.datasources.v2.FileDataSourceV2 import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.sources.v2.reader.ScanBuilder import org.apache.spark.sql.sources.v2.writer.WriteBuilder import org.apache.spark.sql.test.SharedSQLContext import org.apache.spark.sql.types.StructType class DummyReadOnlyFileDataSourceV2 extends FileDataSourceV2 { override def fallBackFileFormat: Class[_ <: FileFormat] = classOf[ParquetFileFormat] override def shortName(): String = "parquet" override def getTable(options: DataSourceOptions): Table = { new DummyReadOnlyFileTable } } class DummyReadOnlyFileTable extends Table with SupportsBatchRead { override def name(): String = "dummy" override def schema(): StructType = StructType(Nil) override def newScanBuilder(options: DataSourceOptions): ScanBuilder = { throw new AnalysisException("Dummy file reader") } } class DummyWriteOnlyFileDataSourceV2 extends FileDataSourceV2 { override def fallBackFileFormat: Class[_ <: FileFormat] = classOf[ParquetFileFormat] override def shortName(): String = "parquet" override def getTable(options: DataSourceOptions): Table = { new DummyWriteOnlyFileTable } } class DummyWriteOnlyFileTable extends Table with SupportsBatchWrite { override def name(): String = "dummy" override def schema(): StructType = StructType(Nil) override def newWriteBuilder(options: DataSourceOptions): WriteBuilder = throw new AnalysisException("Dummy file writer") } class FileDataSourceV2FallBackSuite extends QueryTest with SharedSQLContext { private val dummyParquetReaderV2 = classOf[DummyReadOnlyFileDataSourceV2].getName private val dummyParquetWriterV2 = classOf[DummyWriteOnlyFileDataSourceV2].getName test("Fall back to v1 when writing to file with read only FileDataSourceV2") { val df = spark.range(10).toDF() withTempPath { file => val path = file.getCanonicalPath // Writing file should fall back to v1 and succeed. df.write.format(dummyParquetReaderV2).save(path) // Validate write result with [[ParquetFileFormat]]. checkAnswer(spark.read.parquet(path), df) // Dummy File reader should fail as expected. val exception = intercept[AnalysisException] { spark.read.format(dummyParquetReaderV2).load(path).collect() } assert(exception.message.equals("Dummy file reader")) } } test("Fall back read path to v1 with configuration USE_V1_SOURCE_READER_LIST") { val df = spark.range(10).toDF() withTempPath { file => val path = file.getCanonicalPath df.write.parquet(path) Seq( "foo,parquet,bar", "ParQuet,bar,foo", s"foobar,$dummyParquetReaderV2" ).foreach { fallbackReaders => withSQLConf(SQLConf.USE_V1_SOURCE_READER_LIST.key -> fallbackReaders) { // Reading file should fall back to v1 and succeed. checkAnswer(spark.read.format(dummyParquetReaderV2).load(path), df) checkAnswer(sql(s"SELECT * FROM parquet.`$path`"), df) } } withSQLConf(SQLConf.USE_V1_SOURCE_READER_LIST.key -> "foo,bar") { // Dummy File reader should fail as DISABLED_V2_FILE_DATA_SOURCE_READERS doesn't include it. val exception = intercept[AnalysisException] { spark.read.format(dummyParquetReaderV2).load(path).collect() } assert(exception.message.equals("Dummy file reader")) } } } test("Fall back to v1 when reading file with write only FileDataSourceV2") { val df = spark.range(10).toDF() withTempPath { file => val path = file.getCanonicalPath // Dummy File writer should fail as expected. val exception = intercept[AnalysisException] { df.write.format(dummyParquetWriterV2).save(path) } assert(exception.message.equals("Dummy file writer")) df.write.parquet(path) // Fallback reads to V1 checkAnswer(spark.read.format(dummyParquetWriterV2).load(path), df) } } test("Fall back write path to v1 with configuration USE_V1_SOURCE_WRITER_LIST") { val df = spark.range(10).toDF() Seq( "foo,parquet,bar", "ParQuet,bar,foo", s"foobar,$dummyParquetWriterV2" ).foreach { fallbackWriters => withSQLConf(SQLConf.USE_V1_SOURCE_WRITER_LIST.key -> fallbackWriters) { withTempPath { file => val path = file.getCanonicalPath // Writes should fall back to v1 and succeed. df.write.format(dummyParquetWriterV2).save(path) checkAnswer(spark.read.parquet(path), df) } } } withSQLConf(SQLConf.USE_V1_SOURCE_WRITER_LIST.key -> "foo,bar") { withTempPath { file => val path = file.getCanonicalPath // Dummy File reader should fail as USE_V1_SOURCE_READER_LIST doesn't include it. val exception = intercept[AnalysisException] { df.write.format(dummyParquetWriterV2).save(path) } assert(exception.message.equals("Dummy file writer")) } } } }
WindCanDie/spark
sql/core/src/test/scala/org/apache/spark/sql/sources/v2/FileDataSourceV2FallBackSuite.scala
Scala
apache-2.0
6,124
package se.gigurra.leavu3.datamodel import com.github.gigurra.heisenberg.MapData._ import com.github.gigurra.heisenberg.{Schema, Parsed} case class GameVersion(source: SourceData = Map.empty) extends SafeParsed[GameVersion.type] { val productName = parse(schema.productName) val fileVersion = parse(schema.fileVersion).mkString(".") val productVersion = parse(schema.productVersion).mkString(".") } object GameVersion extends Schema[GameVersion] { val productName = required[String]("ProductName", default = "") val fileVersion = required[Seq[Int]]("FileVersion", default = Seq.empty) val productVersion = required[Seq[Int]]("ProductVersion", default = Seq.empty) }
GiGurra/leavu3
src/main/scala/se/gigurra/leavu3/datamodel/GameVersion.scala
Scala
mit
693
package ch.epfl.perfNetwork.drawn import scala.scalajs.js.Date import ch.epfl.perfNetwork.webapp.Algebra._ /** * @author Thibault Urien * * Hold the information about the time scale. * Contain each days that contain a commit in the graph, sorted in chronological order. * Each day is matched with the absolute display coordinate of its first second. */ sealed class StretchyTimeScale(val timesclale: Vector[((Int, Int, Int), Double)]) /** * @author Thibault Urien * * Create a StretchyTimeScale and set vertexes absolute display x coordinate. */ object StretchyTimeScale { implicit def asVec(s: StretchyTimeScale): Vector[((Int, Int, Int), Double)] = s.timesclale /** * @param timeScale the ratio second/pixel used to display the data * @param commitMinDist the minimal horizontal displayed distance between two commits. * @param commits the list of all existing commit sorted by date * @return a StretchyTimeScale created in a way that no commit have a displayed horizontal distance lower than commitMinDist * * In addition of creating the time scale, this method also set the absolute displayed x of each commits. */ def apply(timeScale: Double, commitMinDist: Int, commits: Seq[Vertex]) = { val distortion = new StretchyDays(commits.head, commitMinDist, timeScale) commits.tail foreach distortion.addCommit new StretchyTimeScale(distortion.daysLocation.reverse.toVector) } private class StretchyDays(firstComit: Vertex, val commitMinDist: Int, val timeScale: Double) { var daysLocation: Seq[((Int, Int, Int), Double)] = { val firstDate = new Date firstDate.setTime(1000.0 * firstComit.date) firstComit.x = (firstComit.date * timeScale).toInt val firstDay = (firstDate.getDate(), firstDate.getMonth(), firstDate.getFullYear()) Seq((firstDay, toPx(firstDay, timeScale))) } var lastComit = firstComit def addCommit(commit: Vertex) = { val dist = ((commit.date - lastComit.date) * timeScale) commit.x = dist + lastComit.x val keyDate = new Date keyDate.setTime(1000.0 * commit.date + aDay_mSecond) val timeKey = (keyDate.getDate(), keyDate.getMonth(), keyDate.getFullYear()) val addedOffset = if (dist < commitMinDist) { val dif = commitMinDist - dist dif } else 0 commit.x += addedOffset appendOffset(timeKey, addedOffset) lastComit = commit } private def appendOffset(timeKey: (Int, Int, Int), addedOffset: Double): Unit = { while (daysLocation.head._1 != timeKey) { val oldHead = daysLocation.head val d = new Date d.setTime(Date.UTC(oldHead._1._3, oldHead._1._2, oldHead._1._1) + aDay_mSecond) val newHead = ((d.getDate(), d.getMonth(), d.getFullYear()), aScaledDaySecond + oldHead._2) daysLocation = newHead +: daysLocation assert(daysLocation.head._2 > daysLocation.tail.head._2) } if (addedOffset != 0) daysLocation = (timeKey, addedOffset + daysLocation.head._2) +: daysLocation.tail } private def toUTC(d: (Int, Int, Int)) = (Date.UTC(d._3, d._2, d._1) / 1000.0) private def toPx(d: (Int, Int, Int), scale: Double) = (toUTC(d) * scale) private def aScaledDaySecond = (60 * 60 * 24 * timeScale) private def aDay_mSecond = 60 * 60 * 24 * 1000.0 } }
ThibaultUrien/SemesterProject
js/src/main/scala/ch/epfl/perfNetwork/drawn/StretchyTimeScale.scala
Scala
bsd-3-clause
3,343
package hulk.oauth import java.util.Date import akka.http.scaladsl.model.HttpMethods import hulk.HulkHttpServer import hulk.auth.{Authorized, OAuthGrantFlow, OAuthGrantFlowData} import hulk.http._ import hulk.routing.{RouteDef, Router} import play.api.libs.json.Json import scala.concurrent.Future import scalaoauth2.provider._ import scala.concurrent.ExecutionContext.Implicits.global /** * See https://tools.ietf.org/html/rfc6749 for more Info */ object OAuthGrantFlowApplication extends App { val router = new OAuthGrantRouter() HulkHttpServer(router).run() } class OAuthGrantRouter() extends Router { val oAuthGrantController = new OAuthGrantController() override def router: Map[RouteDef, Action] = Map( (HttpMethods.POST, "/login") -> Action(), //Login to app (HttpMethods.POST, "/authorization") -> oAuthGrantController.authorization, (HttpMethods.POST, "/token") -> oAuthGrantController.token, (HttpMethods.GET, "/restrictedResource") -> oAuthGrantController.restrictedResource ) } class OAuthGrantController() { def authorization = Action { request => request.body.asJson().flatMap { jsOpt => val json = jsOpt.getOrElse(throw new IllegalArgumentException()) val f = (clientId: String, responseType: String, redirectUri: Option[String]) => Future.successful("code") val codeFuture = OAuthGrantFlow.code((json \\ "clientId").as[String], (json \\ "responseType").as[String], (json \\ "redirectUri").asOpt[String], f) codeFuture.map(c => Ok(Json.obj("code" -> c))) } } def token = Action { request => val grantAuthHandler = new GrantAuthorizationHandler() val grantFlowData = OAuthGrantFlowData(request.httpHeader.find(_.name() == "Authorization").get, "authorization_code", "authCode", Some("redirectUri"), None) val grantResultFuture = OAuthGrantFlow.token(grantFlowData, grantAuthHandler) grantResultFuture.map(grantResult => { Ok(Json.obj("access_token" -> grantResult.accessToken, "refresh_token" -> grantResult.refreshToken, "expires_in" -> grantResult.expiresIn)) }) } val oAuthGrantProtectedResourceHandler = new OAuthGrantProtectedResourceHandler() def restrictedResource = Action { Authorized(oAuthGrantProtectedResourceHandler) { request => Future.successful(Ok()) }} } class GrantAuthorizationHandler extends AuthorizationHandler[TestUser] { //These functions should properly validate the input and store / retrieve the data from a db override def validateClient(request: AuthorizationRequest): Future[Boolean] = Future.successful(true) override def createAccessToken(authInfo: AuthInfo[TestUser]): Future[AccessToken] = Future.successful(AccessToken("accessToken", None, None, None, new Date())) override def refreshAccessToken(authInfo: AuthInfo[TestUser], refreshToken: String): Future[AccessToken] = Future.successful(AccessToken("accessToken", None, None, None, new Date())) override def findAuthInfoByRefreshToken(refreshToken: String): Future[Option[AuthInfo[TestUser]]] = Future.successful(Some(AuthInfo(TestUser(), Some("clientId"), None, None))) override def getStoredAccessToken(authInfo: AuthInfo[TestUser]): Future[Option[AccessToken]] = Future(Some(AccessToken("accessToken", None, None, None, new Date()))) override def findAuthInfoByCode(code: String): Future[Option[AuthInfo[TestUser]]] = Future.successful(Some(AuthInfo(TestUser(), Some("clientId"), None, None))) override def findUser(request: AuthorizationRequest): Future[Option[TestUser]] = Future.successful(Some(TestUser())) override def deleteAuthCode(code: String): Future[Unit] = Future.successful() } class OAuthGrantProtectedResourceHandler extends ProtectedResourceHandler[TestUser] { override def findAuthInfoByAccessToken(accessToken: AccessToken): Future[Option[AuthInfo[TestUser]]] = Future.successful(Some(AuthInfo(TestUser(), Some("clientId"), None, None))) override def findAccessToken(token: String): Future[Option[AccessToken]] = Future(Some(AccessToken("accessToken", None, None, None, new Date()))) }
reneweb/hulk
examples/src/main/scala/hulk/oauth/OAuthGrantFlowApplication.scala
Scala
apache-2.0
4,098