code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
/* * Scala (https://www.scala-lang.org) * * Copyright EPFL and Lightbend, Inc. * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). * * See the NOTICE file distributed with this work for * additional information regarding copyright ownership. */ import scala.util.parsing.combinator.RegexParsers import org.junit.Test import org.junit.Assert.assertEquals import scala.language.implicitConversions class t1229 extends RegexParsers { val number = """0|[1-9]\\d*""".r ^^ { _.toInt } val parser: Parser[Int] = number - "42" @Test def test: Unit = { assertEquals("[1.3] parsed: 21", parse(phrase(parser), "21").toString) val expected = """[1.1] failure: Expected failure 42 ^""" assertEquals(expected, parse(phrase(parser), "42").toString ) } }
scala/scala-parser-combinators
shared/src/test/scala/scala/util/parsing/combinator/t1229.scala
Scala
apache-2.0
811
/* * The MIT License * * Copyright (c) 2019 Fulcrum Genomics LLC * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.fulcrumgenomics.vcf.api import com.fulcrumgenomics.FgBioDef._ import com.fulcrumgenomics.testing.UnitSpec import com.fulcrumgenomics.vcf.api.Allele.{NoCallAllele, SimpleAllele, SpannedAllele, SymbolicAllele} class AlleleTest extends UnitSpec { "Allele.apply()" should "generate SimpleAlleles for base strings" in { Allele("ACT").isInstanceOf[SimpleAllele] shouldBe true Allele("ACT").value shouldBe "ACT" Allele("G").value shouldBe "G" Allele("g").value shouldBe "g" } it should "return cached instances for single-base alleles" in { Allele("A") eq Allele("a") shouldBe false "ACGTNacgtn".sliding(1).foreach(b => Allele(b) eq Allele(b) shouldBe true) } it should "return the constant no-call allele for no-calls" in { Allele(".") shouldBe NoCallAllele Allele(".") eq NoCallAllele shouldBe true } it should "return the constant spanned allele for *" in { Allele("*") eq SpannedAllele shouldBe true } it should "construct symbolic alleles for string wrapped in <>" in { Allele("<NON_REF>").isInstanceOf[SymbolicAllele] shouldBe true Allele("<NON_REF>").value shouldBe "<NON_REF>" } it should "reject alleles with non-ACGTN bases in them" in { an[IllegalArgumentException] shouldBe thrownBy { Allele("ARR") } an[IllegalArgumentException] shouldBe thrownBy { Allele("ABC") } an[IllegalArgumentException] shouldBe thrownBy { Allele("Y") } } "Allele equality" should "be case insensitive" in { Allele("T") == Allele("A") shouldBe false "ACGTN".sliding(1).foreach { s => val upper = Allele(s) val lower = Allele(s.toLowerCase) upper.hashCode() shouldBe lower.hashCode() upper == lower shouldBe true } } }
fulcrumgenomics/fgbio
src/test/scala/com/fulcrumgenomics/vcf/api/AlleleTest.scala
Scala
mit
2,887
package com.lkroll.ep.mapviewer.graphics import com.lkroll.ep.mapviewer.datamodel.{Bathyscaphe => BathyscapheData, AstronomicalObject} import com.lkroll.ep.mapviewer.{ExtObject3D, Main, SceneContainer} import org.denigma.threejs._ import scala.scalajs.js import js.JSConverters._ import squants._ import squants.space._ class Bathyscaphe(val settlement: BathyscapheData) extends GraphicsObject with Overlayed { val radius = 5.0; protected val geometry = new SphereGeometry(radius, 12, 8); protected val material = new MeshLambertMaterial(Bathyscaphe.materialParams(settlement.name)); val mesh = { val m = new Mesh(geometry, material); m.name = settlement.name; GraphicsObjects.put(m, this); m } lazy val overlay = { val o = TacticalOverlay.from(settlement); GraphicsObjects.put(o.mesh, this); o } override def moveTo(pos: Vector3) { mesh.moveTo(pos); overlay.moveTo(pos); } override def addToScene(scene: SceneContainer) { scene.addSceneObject(this, mesh); scene.addOverlayObject(this, overlay.mesh); } val meshRotation = new Quaternion(); override def update(t: Time) { val pSnap = settlement.position.at(t); val dir = pSnap.pos.clone(); dir.normalize(); val offset = dir.clone(); offset.multiplyScalar(-radius); offset.add(pSnap.pos); moveTo(offset); //dir.multiplyScalar(-1.0); // face down instead of up meshRotation.setFromUnitVectors(vYup, dir); mesh.setRotationFromQuaternion(meshRotation); } override def children = List.empty[GraphicsObject]; override def name = mesh.name; override def position = mesh.position; override def id = mesh.id; override def data: Option[AstronomicalObject] = Some(settlement); override def boundingRadius: Double = radius; } object Bathyscaphe { def materialParams(name: String): MeshLambertMaterialParameters = js.Dynamic .literal( color = new Color(0xFCD19C) // wireframe = true ) .asInstanceOf[MeshLambertMaterialParameters]; def fromData(data: BathyscapheData): Bathyscaphe = { new Bathyscaphe(data) } }
Bathtor/ep-explorer
src/main/scala/com/lkroll/ep/mapviewer/graphics/Bathyscaphe.scala
Scala
mit
2,129
/* * Copyright 2016 MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mongodb.scala.gridfs import java.util.concurrent.TimeUnit import scala.concurrent.duration.Duration import com.mongodb.async.client.gridfs.GridFSFindIterable import org.mongodb.scala.bson.conversions.Bson import org.mongodb.scala.internal.ObservableHelper.observe import org.mongodb.scala.{Observable, Observer} /** * Observable representing the GridFS Files Collection. * * @since 1.2 */ case class GridFSFindObservable(private val wrapped: GridFSFindIterable) extends Observable[GridFSFile] { /** * Sets the query filter to apply to the query. * * Below is an example of filtering against the filename and some nested metadata that can also be stored along with the file data: * * {{{ * Filters.and(Filters.eq("filename", "mongodb.png"), Filters.eq("metadata.contentType", "image/png")); * }}} * * @param filter the filter, which may be null. * @return this * @see [[http://docs.mongodb.org/manual/reference/method/db.collection.find/ Filter]] * @see [[org.mongodb.scala.model.Filters]] */ def filter(filter: Bson): GridFSFindObservable = { wrapped.filter(filter) this } /** * Sets the limit to apply. * * @param limit the limit, which may be null * @return this * @see [[http://docs.mongodb.org/manual/reference/method/cursor.limit/#cursor.limit Limit]] */ def limit(limit: Int): GridFSFindObservable = { wrapped.limit(limit) this } /** * Sets the number of documents to skip. * * @param skip the number of documents to skip * @return this * @see [[http://docs.mongodb.org/manual/reference/method/cursor.skip/#cursor.skip Skip]] */ def skip(skip: Int): GridFSFindObservable = { wrapped.skip(skip) this } /** * Sets the sort criteria to apply to the query. * * @param sort the sort criteria, which may be null. * @return this * @see [[http://docs.mongodb.org/manual/reference/method/cursor.sort/ Sort]] */ def sort(sort: Bson): GridFSFindObservable = { wrapped.sort(sort) this } /** * The server normally times out idle cursors after an inactivity period (10 minutes) * to prevent excess memory use. Set this option to prevent that. * * @param noCursorTimeout true if cursor timeout is disabled * @return this */ def noCursorTimeout(noCursorTimeout: Boolean): GridFSFindObservable = { wrapped.noCursorTimeout(noCursorTimeout) this } /** * Sets the maximum execution time on the server for this operation. * * @see [[http://docs.mongodb.org/manual/reference/operator/meta/maxTimeMS/ Max Time]] * @param duration the duration * @return this */ def maxTime(duration: Duration): GridFSFindObservable = { wrapped.maxTime(duration.toMillis, TimeUnit.MILLISECONDS) this } /** * Sets the number of documents to return per batch. * * @param batchSize the batch size * @return this * @see [[http://docs.mongodb.org/manual/reference/method/cursor.batchSize/#cursor.batchSize Batch Size]] */ def batchSize(batchSize: Int): GridFSFindObservable = { wrapped.batchSize(batchSize) this } /** * Request `Observable` to start streaming data. * * This is a "factory method" and can be called multiple times, each time starting a new [[org.mongodb.scala.Subscription]]. * Each `Subscription` will work for only a single [[Observer]]. * * If the `Observable` rejects the subscription attempt or otherwise fails it will signal the error via [[Observer.onError]]. * * @param observer the `Observer` that will consume signals from this `Observable` */ override def subscribe(observer: Observer[_ >: GridFSFile]): Unit = observe(wrapped).subscribe(observer) }
jCalamari/mongo-scala-driver
driver/src/main/scala/org/mongodb/scala/gridfs/GridFSFindObservable.scala
Scala
apache-2.0
4,339
/* * Copyright (c) 2015 Robert Conrad - All Rights Reserved. * Unauthorized copying of this file, via any medium is strictly prohibited. * This file is proprietary and confidential. * Last modified by rconrad, 1/4/15 7:16 PM */ package base.entity.user.mock import java.util.UUID import base.common.random.RandomService import base.common.service.ServiceImpl import base.common.time.DateTimeHelper import base.entity.Tables.UserRow import base.entity.auth.context.AuthContext import base.entity.service.CrudServiceImplHelper import base.entity.user.UserService import base.entity.user.model.{ PostResetRequest, PostUserRequest, PutUserRequest, UserModel } /** * Fake UserService will do whatever you like * @author rconrad */ class UserServiceMock() extends ServiceImpl with UserService with CrudServiceImplHelper[UserModel] with DateTimeHelper { private var userUUID = RandomService().uuid private var users = Map[UUID, UserRow]() /** * Create a new User in the database and get a UserResponse */ def create(implicit authCtx: AuthContext, input: PostUserRequest) = { val uuid = nextUserUUID val row = input.toRow(now).copy(uuid = uuid) users += uuid -> row UserModel(row) } /** * Update an User in the database and get a UserResponse */ def update(implicit authCtx: AuthContext, uuid: UUID, input: PutUserRequest) = throw new Exception("not implemented") /** * Get an User Response from the database */ def get(implicit authCtx: AuthContext, uuid: UUID) = throw new Exception("not implemented") /** * Begin the process of resetting a user's password. Will create a reset code and send an email * to the user with a link to hit resetComplete with that code */ def resetInitiate(input: PostResetRequest) = throw new Exception("not implemented") /** * Complete the process of resetting a user's password. Users will have received a code in their * email which they supply to this function in order to finally set their password to whatever * it was assigned in the email */ def resetComplete(resetCode: String) = throw new Exception("not implemented") /** * Get an User */ def get(uuid: UUID) = users(uuid) /** * Get the next userId that will be assigned */ def getNextUserUUID = userUUID /** * Get the next available invoiceId and increment */ private def nextUserUUID = { val id = userUUID userUUID = RandomService().uuid id } }
robconrad/base-api
project-entity/src/test/scala/base/entity/user/mock/UserServiceMock.scala
Scala
mit
2,492
package org.bitcoins.spvnode.messages.control import org.bitcoins.spvnode.gen.ControlMessageGenerator import org.scalacheck.{Prop, Properties} /** * Created by chris on 7/20/16. */ class FilterLoadMessageSpec extends Properties("FilterLoadMessageSpec") { property("Serialization symmetry") = Prop.forAll(ControlMessageGenerator.filterLoadMessage) { filterMsg => FilterLoadMessage(filterMsg.hex) == filterMsg } }
Christewart/bitcoin-s-spv-node
src/test/scala/org/bitcoins/spvnode/messages/control/FilterLoadMessageSpec.scala
Scala
mit
435
/* ************************************************************************************* * Copyright 2011 Normation SAS ************************************************************************************* * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * In accordance with the terms of section 7 (7. Additional Terms.) of * the GNU Affero GPL v3, the copyright holders add the following * Additional permissions: * Notwithstanding to the terms of section 5 (5. Conveying Modified Source * Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3 * licence, when you create a Related Module, this Related Module is * not considered as a part of the work and may be distributed under the * license agreement of your choice. * A "Related Module" means a set of sources files including their * documentation that, without modification of the Source Code, enables * supplementary functions or services in addition to those offered by * the Software. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>. * ************************************************************************************* */ package com.normation.rudder.services.marshalling import scala.xml.NodeSeq import net.liftweb.common._ import net.liftweb.common.Box._ import com.normation.rudder.services.queries.CmdbQueryParser import net.liftweb.common.Failure import com.normation.rudder.domain.nodes.NodeGroup import com.normation.rudder.domain.policies.Rule import com.normation.rudder.domain.policies.Directive import com.normation.cfclerk.domain.TechniqueName import com.normation.rudder.domain.policies.SectionVal import scala.xml.{Node => XNode} import com.normation.utils.Control.sequence import com.normation.cfclerk.domain.TechniqueVersion import net.liftweb.util.Helpers.tryo import com.normation.rudder.domain.policies.DirectiveId import com.normation.inventory.domain.NodeId import com.normation.rudder.domain.policies.RuleTarget import com.normation.rudder.domain.nodes.NodeGroupId import com.normation.rudder.domain.policies.RuleId import com.normation.rudder.domain.policies.ActiveTechniqueCategory import com.normation.rudder.domain.policies.ActiveTechniqueCategoryId import com.normation.rudder.domain.policies.ActiveTechnique import com.normation.rudder.domain.policies.ActiveTechniqueId import org.joda.time.format.ISODateTimeFormat import com.normation.rudder.batch.SuccessStatus import com.normation.rudder.batch.ErrorStatus import com.normation.rudder.batch.NoStatus import com.normation.rudder.batch.CurrentDeploymentStatus import com.normation.rudder.domain.nodes.NodeGroupCategory import com.normation.rudder.domain.nodes.NodeGroupCategoryId import scala.xml.Node import com.normation.rudder.domain.Constants._ import com.normation.rudder.domain.workflows.DirectiveChange import com.normation.rudder.domain.workflows.NodeGroupChange import com.normation.rudder.domain.workflows.DirectiveChanges import com.normation.rudder.domain.workflows.NodeGroupChanges import com.normation.rudder.domain.nodes.NodeGroupId import com.normation.rudder.domain.nodes.NodeGroupId import com.normation.rudder.domain.workflows.NodeGroupChanges import com.normation.rudder.domain.nodes._ import com.normation.rudder.domain.workflows.NodeGroupChange import com.normation.rudder.domain.workflows.NodeGroupChangeItem import com.normation.eventlog.EventActor import com.normation.rudder.domain.policies.DirectiveId import com.normation.rudder.domain.workflows.DirectiveChanges import com.normation.rudder.domain.policies._ import com.normation.cfclerk.domain.TechniqueName import com.normation.cfclerk.services.TechniqueRepository import com.normation.cfclerk.domain.TechniqueId import com.normation.rudder.domain.workflows.DirectiveChangeItem import com.normation.cfclerk.xmlparsers.SectionSpecParser import com.normation.cfclerk.domain.TechniqueId import com.normation.rudder.domain.workflows.RuleChange import com.normation.rudder.domain.workflows.RuleChangeItem import com.normation.rudder.domain.workflows.RuleChanges import scala.util.Try import scala.util.Success import scala.util.{Failure => Catch} import com.normation.rudder.domain.logger.ApplicationLogger class DirectiveUnserialisationImpl extends DirectiveUnserialisation { override def parseSectionVal(xml:NodeSeq) : Box[SectionVal] = { def recValParseSection(elt:XNode) : Box[(String, SectionVal)] = { if(elt.label != "section") Failure("Bad XML, awaiting a <section> and get: " + elt) else { for { name <- (elt \\ "@name").headOption ?~! ("Missing required attribute 'name' for <section>: " + elt) // Seq( (var name , var value ) ) vars <- sequence( elt \\ "var" ) { xmlVar => for { n <- (xmlVar \\ "@name").headOption ?~! ("Missing required attribute 'name' for <var>: " + xmlVar) } yield { (n.text , xmlVar.text) } } // Seq ( SectionVal ) sections <- sequence( elt \\ "section" ) { sectionXml => recValParseSection(sectionXml) } } yield { val s = sections.groupBy { case(n,s) => n }.map { case(n,seq) => (n,seq.map { case (_,section) => section } ) } (name.text, SectionVal(s, vars.toMap)) } } } for { root <- (xml \\ "section").toList match { case Nil => Failure("Missing required tag <section> in: " + xml) case node :: Nil => Full(node) case x => Failure("Found several <section> tag in XML, but only one root section is allowed: " + xml) } (_ , sectionVal) <- recValParseSection(root) } yield { sectionVal } } override def unserialise(xml:XNode) : Box[(TechniqueName, Directive, SectionVal)] = { for { directive <- { if(xml.label == XML_TAG_DIRECTIVE) Full(xml) else Failure("Entry type is not a <%s>: %s".format(XML_TAG_DIRECTIVE, xml)) } fileFormatOk <- TestFileFormat(directive) id <- (directive \\ "id").headOption.map( _.text ) ?~! ("Missing attribute 'id' in entry type directive : " + xml) ptName <- (directive \\ "techniqueName").headOption.map( _.text ) ?~! ("Missing attribute 'techniqueName' in entry type directive : " + xml) name <- (directive \\ "displayName").headOption.map( _.text ) ?~! ("Missing attribute 'displayName' in entry type directive : " + xml) techniqueVersion <- (directive \\ "techniqueVersion").headOption.map( x => TechniqueVersion(x.text) ) ?~! ("Missing attribute 'techniqueVersion' in entry type directive : " + xml) sectionVal <- parseSectionVal(directive) shortDescription <- (directive \\ "shortDescription").headOption.map( _.text ) ?~! ("Missing attribute 'shortDescription' in entry type directive : " + xml) longDescription <- (directive \\ "longDescription").headOption.map( _.text ) ?~! ("Missing attribute 'longDescription' in entry type directive : " + xml) isEnabled <- (directive \\ "isEnabled").headOption.flatMap(s => tryo { s.text.toBoolean } ) ?~! ("Missing attribute 'isEnabled' in entry type directive : " + xml) priority <- (directive \\ "priority").headOption.flatMap(s => tryo { s.text.toInt } ) ?~! ("Missing or bad attribute 'priority' in entry type directive : " + xml) isSystem <- (directive \\ "isSystem").headOption.flatMap(s => tryo { s.text.toBoolean } ) ?~! ("Missing attribute 'isSystem' in entry type directive : " + xml) directiveIds = (directive \\ "directiveIds" \\ "id" ).map( n => DirectiveId( n.text ) ).toSet } yield { ( TechniqueName(ptName) , Directive( id = DirectiveId(id) , name = name , techniqueVersion = techniqueVersion , parameters = SectionVal.toMapVariables(sectionVal) , shortDescription = shortDescription , longDescription = longDescription , priority = priority , isEnabled = isEnabled , isSystem = isSystem ) , sectionVal ) } } } class NodeGroupCategoryUnserialisationImpl extends NodeGroupCategoryUnserialisation { def unserialise(entry:XNode): Box[NodeGroupCategory] = { for { category <- { if(entry.label == XML_TAG_NODE_GROUP_CATEGORY) Full(entry) else Failure("Entry type is not a <%s>: %s".format(XML_TAG_NODE_GROUP_CATEGORY, entry)) } fileFormatOk <- TestFileFormat(category) id <- (category \\ "id").headOption.map( _.text ) ?~! ("Missing attribute 'id' in entry type groupLibraryCategory : " + entry) name <- (category \\ "displayName").headOption.map( _.text ) ?~! ("Missing attribute 'displayName' in entry type groupLibraryCategory : " + entry) description <- (category \\ "description").headOption.map( _.text ) ?~! ("Missing attribute 'description' in entry type groupLibraryCategory : " + entry) isSystem <- (category \\ "isSystem").headOption.flatMap(s => tryo { s.text.toBoolean } ) ?~! ("Missing attribute 'isSystem' in entry type groupLibraryCategory : " + entry) } yield { NodeGroupCategory( id = NodeGroupCategoryId(id) , name = name , description = description , items = Nil , children = Nil , isSystem = isSystem ) } } } class NodeGroupUnserialisationImpl( cmdbQueryParser: CmdbQueryParser ) extends NodeGroupUnserialisation { def unserialise(entry:XNode) : Box[NodeGroup] = { for { group <- { if(entry.label == XML_TAG_NODE_GROUP) Full(entry) else Failure("Entry type is not a <%s>: %s".format(XML_TAG_NODE_GROUP, entry)) } fileFormatOk <- TestFileFormat(group) id <- (group \\ "id").headOption.map( _.text ) ?~! ("Missing attribute 'id' in entry type nodeGroup : " + entry) name <- (group \\ "displayName").headOption.map( _.text ) ?~! ("Missing attribute 'displayName' in entry type nodeGroup : " + entry) description <- (group \\ "description").headOption.map( _.text ) ?~! ("Missing attribute 'description' in entry type nodeGroup : " + entry) query <- (group \\ "query").headOption match { case None => Full(None) case Some(s) => if(s.text.size == 0) Full(None) else cmdbQueryParser(s.text).map( Some(_) ) } isDynamic <- (group \\ "isDynamic").headOption.flatMap(s => tryo { s.text.toBoolean } ) ?~! ("Missing attribute 'isDynamic' in entry type nodeGroup : " + entry) serverList = if (isDynamic) { Set[NodeId]() } else { (group \\ "nodeIds" \\ "id" ).map( n => NodeId( n.text ) ).toSet } isEnabled <- (group \\ "isEnabled").headOption.flatMap(s => tryo { s.text.toBoolean } ) ?~! ("Missing attribute 'isEnabled' in entry type nodeGroup : " + entry) isSystem <- (group \\ "isSystem").headOption.flatMap(s => tryo { s.text.toBoolean } ) ?~! ("Missing attribute 'isSystem' in entry type nodeGroup : " + entry) } yield { NodeGroup( id = NodeGroupId(id) , name = name , description = description , query = query , isDynamic = isDynamic , serverList = serverList , isEnabled = isEnabled , isSystem = isSystem ) } } } class RuleUnserialisationImpl extends RuleUnserialisation { def unserialise(entry:XNode) : Box[Rule] = { for { rule <- { if(entry.label == XML_TAG_RULE) Full(entry) else Failure("Entry type is not a <%s>: %s".format(XML_TAG_RULE, entry)) } fileFormatOk <- TestFileFormat(rule) id <- (rule \\ "id").headOption.map( _.text ) ?~! ("Missing attribute 'id' in entry type rule: " + entry) name <- (rule \\ "displayName").headOption.map( _.text ) ?~! ("Missing attribute 'displayName' in entry type rule: " + entry) shortDescription <- (rule \\ "shortDescription").headOption.map( _.text ) ?~! ("Missing attribute 'shortDescription' in entry type rule: " + entry) longDescription <- (rule \\ "longDescription").headOption.map( _.text ) ?~! ("Missing attribute 'longDescription' in entry type rule: " + entry) isEnabled <- (rule \\ "isEnabled").headOption.flatMap(s => tryo { s.text.toBoolean } ) ?~! ("Missing attribute 'isEnabled' in entry type rule: " + entry) isSystem <- (rule \\ "isSystem").headOption.flatMap(s => tryo { s.text.toBoolean } ) ?~! ("Missing attribute 'isSystem' in entry type rule: " + entry) targets <- sequence((rule \\ "targets" \\ "target")) { t => RuleTarget.unser(t.text) } ?~! ("Invalid attribute in 'target' entry: " + entry) directiveIds = (rule \\ "directiveIds" \\ "id" ).map( n => DirectiveId( n.text ) ).toSet } yield { Rule( id = RuleId(id) , name = name // current serial should to be in Rule, we set it to 0 // its value must be handled by the caller of the unserialisation , serial = 0 , targets = targets.toSet , directiveIds = directiveIds , shortDescription = shortDescription , longDescription = longDescription , isEnabledStatus = isEnabled , isSystem = isSystem ) } } } class ActiveTechniqueCategoryUnserialisationImpl extends ActiveTechniqueCategoryUnserialisation { def unserialise(entry:XNode): Box[ActiveTechniqueCategory] = { for { uptc <- { if(entry.label == XML_TAG_ACTIVE_TECHNIQUE_CATEGORY) Full(entry) else Failure("Entry type is not a <%s>: %s".format(XML_TAG_ACTIVE_TECHNIQUE_CATEGORY, entry)) } fileFormatOk <- TestFileFormat(uptc) id <- (uptc \\ "id").headOption.map( _.text ) ?~! ("Missing attribute 'id' in entry type policyLibraryCategory : " + entry) name <- (uptc \\ "displayName").headOption.map( _.text ) ?~! ("Missing attribute 'displayName' in entry type policyLibraryCategory : " + entry) description <- (uptc \\ "description").headOption.map( _.text ) ?~! ("Missing attribute 'description' in entry type policyLibraryCategory : " + entry) isSystem <- (uptc \\ "isSystem").headOption.flatMap(s => tryo { s.text.toBoolean } ) ?~! ("Missing attribute 'isSystem' in entry type policyLibraryCategory : " + entry) } yield { ActiveTechniqueCategory( id = ActiveTechniqueCategoryId(id) , name = name , description = description , items = Nil , children = Nil , isSystem = isSystem ) } } } class ActiveTechniqueUnserialisationImpl extends ActiveTechniqueUnserialisation { //we expect acceptation date to be in ISO-8601 format private[this] val dateFormatter = ISODateTimeFormat.dateTime def unserialise(entry:XNode): Box[ActiveTechnique] = { for { activeTechnique <- { if(entry.label == XML_TAG_ACTIVE_TECHNIQUE) Full(entry) else Failure("Entry type is not a <%s>: ".format(XML_TAG_ACTIVE_TECHNIQUE, entry)) } fileFormatOk <- TestFileFormat(activeTechnique) id <- (activeTechnique \\ "id").headOption.map( _.text ) ?~! ("Missing attribute 'id' in entry type policyLibraryTemplate : " + entry) ptName <- (activeTechnique \\ "techniqueName").headOption.map( _.text ) ?~! ("Missing attribute 'displayName' in entry type policyLibraryTemplate : " + entry) isSystem <- (activeTechnique \\ "isSystem").headOption.flatMap(s => tryo { s.text.toBoolean } ) ?~! ("Missing attribute 'isSystem' in entry type policyLibraryTemplate : " + entry) isEnabled <- (activeTechnique \\ "isEnabled").headOption.flatMap(s => tryo { s.text.toBoolean } ) ?~! ("Missing attribute 'isEnabled' in entry type policyLibraryTemplate : " + entry) acceptationDates <- sequence(activeTechnique \\ "versions" \\ "version" ) { version => for { ptVersionName <- version.attribute("name").map( _.text) ?~! "Missing attribute 'name' for acceptation date in PT '%s' (%s): '%s'".format(ptName, id, version) ptVersion <- tryo { TechniqueVersion(ptVersionName) } acceptationDate <- tryo { dateFormatter.parseDateTime(version.text) } } yield { (ptVersion, acceptationDate) } } acceptationMap <- { val map = acceptationDates.toMap if(map.size != acceptationDates.size) Failure("There exists a duplicate polity template version in the acceptation date map: " + acceptationDates.mkString("; ")) else Full(map) } } yield { ActiveTechnique( id = ActiveTechniqueId(id) , techniqueName = TechniqueName(ptName) , acceptationDatetimes = acceptationMap , directives = Nil , isEnabled = isEnabled , isSystem = isSystem ) } } } class DeploymentStatusUnserialisationImpl extends DeploymentStatusUnserialisation { def unserialise(entry:XNode) : Box[CurrentDeploymentStatus] = { for { depStatus <- { if(entry.label == XML_TAG_DEPLOYMENT_STATUS) Full(entry) else Failure("Entry type is not a <%s>: %s".format(XML_TAG_DEPLOYMENT_STATUS, entry)) } fileFormatOk <- TestFileFormat(depStatus) id <- (depStatus \\ "id").headOption.flatMap(s => tryo {s.text.toLong } ) ?~! ("Missing attribute 'id' in entry type deploymentStatus : " + entry) status <- (depStatus \\ "status").headOption.map( _.text ) ?~! ("Missing attribute 'status' in entry type deploymentStatus : " + entry) started <- (depStatus \\ "started").headOption.flatMap(s => tryo { ISODateTimeFormat.dateTimeParser.parseDateTime(s.text) } ) ?~! ("Missing or bad attribute 'started' in entry type deploymentStatus : " + entry) ended <- (depStatus \\ "ended").headOption.flatMap(s => tryo { ISODateTimeFormat.dateTimeParser.parseDateTime(s.text) } ) ?~! ("Missing or bad attribute 'ended' in entry type deploymentStatus : " + entry) errorMessage <- (depStatus \\ "errorMessage").headOption match { case None => Full(None) case Some(s) => if(s.text.size == 0) Full(None) else Full(Some(s.text)) } } yield { status match { case "success" => SuccessStatus(id, started, ended, Map()) case "failure" => ErrorStatus(id, started, ended, errorMessage.map(x => Failure(x)).getOrElse(Failure("")) ) case s => NoStatus } } } } /** * That trait allow to unserialise change request changes from an XML file. * */ class ChangeRequestChangesUnserialisationImpl ( nodeGroupUnserialiser : NodeGroupUnserialisation , directiveUnserialiser : DirectiveUnserialisation , ruleUnserialiser : RuleUnserialisation , techRepo : TechniqueRepository , sectionSpecUnserialiser : SectionSpecParser ) extends ChangeRequestChangesUnserialisation with Loggable { def unserialise(xml:XNode): Box[(Box[Map[DirectiveId,DirectiveChanges]],Map[NodeGroupId,NodeGroupChanges],Map[RuleId,RuleChanges])] = { def unserialiseNodeGroupChange(changeRequest:XNode): Box[Map[NodeGroupId,NodeGroupChanges]]= { (for { groupsNode <- (changeRequest \\ "groups").headOption ?~! s"Missing child 'groups' in entry type changeRequest : ${xml}" } yield { (groupsNode\\"group").flatMap{ group => for { nodeGroupId <- group.attribute("id").map(id => NodeGroupId(id.text)) ?~! s"Missing attribute 'id' in entry type changeRequest group changes : ${group}" initialNode <- (group \\ "initialState").headOption initialState <- (initialNode \\ "nodeGroup").headOption match { case Some(initialState) => nodeGroupUnserialiser.unserialise(initialState) match { case Full(group) => Full(Some(group)) case eb : EmptyBox => eb ?~! "could not unserialize group" } case None => Full(None) } changeNode <- (group \\ "firstChange" \\ "change").headOption actor <- (changeNode \\\\ "actor").headOption.map(actor => EventActor(actor.text)) date <- (changeNode \\\\ "date").headOption.map(date => ISODateTimeFormat.dateTimeParser.parseDateTime(date.text)) reason = (changeNode \\\\ "reason").headOption.map(_.text) diff <- (changeNode \\\\ "diff").headOption.flatMap(_.attribute("action").headOption.map(_.text)) diffGroup <- (changeNode \\\\ "nodeGroup").headOption changeGroup <- nodeGroupUnserialiser.unserialise(diffGroup) change <- diff match { case "add" => Full(AddNodeGroupDiff(changeGroup)) case "delete" => Full(DeleteNodeGroupDiff(changeGroup)) case "modifyTo" => Full(ModifyToNodeGroupDiff(changeGroup)) case _ => Failure("should not happen") } } yield { val groupChange = NodeGroupChange(initialState,NodeGroupChangeItem(actor,date,reason,change),Seq()) (nodeGroupId -> NodeGroupChanges(groupChange,Seq())) } }.toMap }) } def unserialiseDirectiveChange(changeRequest:XNode): Box[Map[DirectiveId,DirectiveChanges]]= { (for { directivesNode <- (changeRequest \\ "directives").headOption ?~! s"Missing child 'directives' in entry type changeRequest : ${xml}" } yield { (directivesNode\\"directive").flatMap{ directive => for { directiveId <- directive.attribute("id").map(id => DirectiveId(id.text)) ?~! s"Missing attribute 'id' in entry type changeRequest directive changes : ${directive}" initialNode <- (directive \\ "initialState").headOption initialState <- (initialNode \\\\ "directive").headOption match { case Some(initialState) => directiveUnserialiser.unserialise(initialState) match { case Full((techName,directive,_)) => Full(Some((techName,directive))) case eb : EmptyBox => eb ?~! "could not unserialize directive" } case None => Full(None) } changeNode <- (directive \\ "firstChange" \\ "change").headOption actor <- (changeNode \\\\ "actor").headOption.map(actor => EventActor(actor.text)) date <- (changeNode \\\\ "date").headOption.map(date => ISODateTimeFormat.dateTimeParser.parseDateTime(date.text)) reason = (changeNode \\\\ "reason").headOption.map(_.text) diff <- (changeNode \\\\ "diff").headOption.flatMap(_.attribute("action").headOption.map(_.text)) diffDirective <- (changeNode \\\\ "directive").headOption (techniqueName,changeDirective,_) <- directiveUnserialiser.unserialise(diffDirective) change <- { diff match { case "add" => Full(AddDirectiveDiff(techniqueName,changeDirective)) case "delete" => Full(DeleteDirectiveDiff(techniqueName,changeDirective)) case "modifyTo" => (changeNode \\\\ "rootSection").headOption match { case Some(rsXml) => val techId = TechniqueId(techniqueName,changeDirective.techniqueVersion) val rootSection = sectionSpecUnserialiser.parseSectionsInPolicy(rsXml, techId, techniqueName.value) Full(ModifyToDirectiveDiff(techniqueName,changeDirective,rootSection)) case None => Failure(s"Could not find rootSection node in ${changeNode}") } case _ => Failure("should not happen") } } } yield { val directiveChange = DirectiveChange(initialState.map{case (techName,directive) => val rootSection = techRepo.get(TechniqueId(techName,directive.techniqueVersion)).map(_.rootSection).get (techName,directive,rootSection)},DirectiveChangeItem(actor,date,reason,change),Seq()) (directiveId -> DirectiveChanges(directiveChange,Seq())) } }.toMap }) } def unserialiseRuleChange(changeRequest:XNode): Box[Map[RuleId,RuleChanges]]= { (for { rulesNode <- (changeRequest \\ "rules").headOption ?~! s"Missing child 'rules' in entry type changeRequest : ${xml}" } yield { (rulesNode\\"rule").flatMap{ rule => for { ruleId <- rule.attribute("id").map(id => RuleId(id.text)) ?~! s"Missing attribute 'id' in entry type changeRequest rule changes : ${rule}" initialRule <- (rule \\ "initialState").headOption initialState <- (initialRule \\ "rule").headOption match { case Some(initialState) => ruleUnserialiser.unserialise(initialState) match { case Full(rule) => Full(Some(rule)) case eb : EmptyBox => eb ?~! "could not unserialize rule" } case None => Full(None) } changeRule <- (rule \\ "firstChange" \\ "change").headOption actor <- (changeRule \\\\ "actor").headOption.map(actor => EventActor(actor.text)) date <- (changeRule \\\\ "date").headOption.map(date => ISODateTimeFormat.dateTimeParser.parseDateTime(date.text)) reason = (changeRule \\\\ "reason").headOption.map(_.text) diff <- (changeRule \\\\ "diff").headOption.flatMap(_.attribute("action").headOption.map(_.text)) diffRule <- (changeRule \\\\ "rule").headOption changeRule <- ruleUnserialiser.unserialise(diffRule) change <- diff match { case "add" => Full(AddRuleDiff(changeRule)) case "delete" => Full(DeleteRuleDiff(changeRule)) case "modifyTo" => Full(ModifyToRuleDiff(changeRule)) case _ => Failure("should not happen") } } yield { val ruleChange = RuleChange(initialState,RuleChangeItem(actor,date,reason,change),Seq()) (ruleId -> RuleChanges(ruleChange,Seq())) } }.toMap }) } for { changeRequest <- { if(xml.label == XML_TAG_CHANGE_REQUEST) Full(xml) else Failure("Entry type is not a <%s>: ".format(XML_TAG_CHANGE_REQUEST, xml)) } fileFormatOk <- TestFileFormat(changeRequest) groups <- unserialiseNodeGroupChange(changeRequest) directives = Try { unserialiseDirectiveChange(changeRequest) } match { case Success(change) => change case Catch(e) => Failure(s"Could not deserialize directives changes cause ${e.getMessage()}") } rules <- unserialiseRuleChange(changeRequest) } yield { (directives,groups, rules) } } }
jooooooon/rudder
rudder-core/src/main/scala/com/normation/rudder/services/marshalling/XmlUnserialisationImpl.scala
Scala
agpl-3.0
28,969
package warsztat.groups.dao import java.util.UUID import warsztat.groups.domain.Group import scala.concurrent.Future trait GroupDao { def saveGroup(group: Group): Future[Unit] def addUserToGroup(groupId: UUID, userId: UUID): Future[Unit] }
bjankie1/warsztat-scala
src/main/scala/warsztat/groups/dao/GroupDao.scala
Scala
apache-2.0
251
package com.gx.typeclasses /** * Copyright 2017 josephguan * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ // type classes trait Speakable[T] { def say(): String }
josephguan/scala-design-patterns
structural/type-classes/src/main/scala/com/gx/typeclasses/Speakable.scala
Scala
apache-2.0
698
package akka.ainterface.remote import akka.ainterface.datatype._ import akka.ainterface.datatype.interpolation._ import akka.ainterface.test.arbitrary.AinterfaceArbitrary._ import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.{Arbitrary, Gen} class SendSpec extends BaseSpec { // TODO: tests all formats private[this] def genErlTerm(level: Int): Gen[ErlTerm] = { def genScalar: Gen[ErlTerm] = Gen.oneOf( arbitrary[ErlInteger], arbitrary[ErlFloat], arbitrary[ErlAtom], arbitrary[ErlBitString], arbitrary[ErlBitStringImpl], arbitrary[ErlBinary], arbitrary[ErlReference], arbitrary[ErlNewReference], arbitrary[ErlExternalFun], //arbitrary[ErlPort], arbitrary[ErlPid] ) genScalar } implicit lazy val arbErlTerm: Arbitrary[ErlTerm] = Arbitrary(genErlTerm(0)) "ErlTermCodec" should { "encode/decode round-trip" in { forAll { term: ErlTerm => process.send(atom"echo", atom"erltest@okumin-mini.local", erl"{${process.self}, mofu}") val echo = process.receive() match { case ErlTuple(from: ErlPid, _) => from } process.send(echo, ErlTuple(process.self, term)) val response = process.receive() assert(response === ErlTuple(echo, term)) } } } }
ainterface/ainterface
ainterface-integration-test/src/test/scala/akka/ainterface/remote/SendSpec.scala
Scala
apache-2.0
1,319
package fr.hurrycane.routes import akka.actor.ActorRef import akka.http.scaladsl.model.StatusCodes import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route import akka.http.scaladsl.server.directives.MethodDirectives.{ get, post } import akka.http.scaladsl.server.directives.PathDirectives.path import akka.http.scaladsl.server.directives.RouteDirectives.complete import akka.http.scaladsl.settings.RoutingSettings import akka.pattern.ask import akka.stream.ActorMaterializer import akka.util.Timeout import fr.hurrycane.dto.RequestDto import fr.hurrycane.entity.ActionPerformed import fr.hurrycane.registry.MessageRegistryActor._ import fr.hurrycane.registry.{ Message, Messages } import fr.hurrycane.tools.JsonSupport import scala.concurrent.duration._ import scala.concurrent.{ ExecutionContext, Future } trait MessageRoutes extends JsonSupport { implicit lazy val messageTimeout: Timeout = Timeout(5.seconds) def messageRoutes(messageRegistryActor: ActorRef, askTimeout: FiniteDuration)(implicit mat: ActorMaterializer, ec: ExecutionContext, rs: RoutingSettings): Route = pathPrefix("message") { concat( pathEnd { concat( get { val messages: Future[Future[Messages]] = (messageRegistryActor ? GetMessages).mapTo[Future[Messages]] complete(messages) }, post { entity(as[RequestDto]) { message => println("RECEIVE POST REQUEST") val userCreated: Future[Future[ActionPerformed]] = (messageRegistryActor ? SendMessage(message)).mapTo[Future[ActionPerformed]] onSuccess(userCreated) { performed => println("COMPLETE REQUEST") complete((StatusCodes.Created, performed)) } } }) }, path(Segment) { name => concat( get { val maybeUser: Future[Future[Option[Message]]] = (messageRegistryActor ? GetMessage(name)).mapTo[Future[Option[Message]]] rejectEmptyResponse { complete(maybeUser) } }) }) } }
haris44/Bot-API
src/main/scala/fr/hurrycane/routes/MessageRoutes.scala
Scala
bsd-3-clause
2,231
package com.whisk.docker.testkit import java.nio.charset.StandardCharsets import java.util.concurrent.TimeUnit import com.google.common.io.Closeables import com.spotify.docker.client.DockerClient.{AttachParameter, RemoveContainerParam} import com.spotify.docker.client.messages._ import com.spotify.docker.client.{DockerClient, LogMessage, LogStream} import scala.concurrent.{ExecutionContext, Future, Promise} class StartFailedException(msg: String) extends Exception(msg) class ContainerCommandExecutor(val client: DockerClient) { def createContainer( spec: ContainerSpec )(implicit ec: ExecutionContext): Future[ContainerCreation] = { Future( scala.concurrent.blocking(client.createContainer(spec.containerConfig(), spec.name.orNull)) ) } def startContainer(id: String)(implicit ec: ExecutionContext): Future[Unit] = { Future(scala.concurrent.blocking(client.startContainer(id))) } def runningContainer(id: String)(implicit ec: ExecutionContext): Future[ContainerInfo] = { def inspect() = { Future(scala.concurrent.blocking(client.inspectContainer(id))).flatMap { info => val status = info.state().status() val badStates = Set("removing", "paused", "exited", "dead") if (status == "running") { Future.successful(info) } else if (badStates(status)) { Future.failed(new StartFailedException("container is in unexpected state: " + status)) } else { Future.failed(new Exception("not running yet")) } } } def attempt(rest: Int): Future[ContainerInfo] = { inspect().recoverWith { case e: StartFailedException => Future.failed(e) case _ if rest > 0 => RetryUtils.withDelay(TimeUnit.SECONDS.toMillis(1))(attempt(rest - 1)) case _ => Future.failed(new StartFailedException("failed to get container in running state")) } } attempt(10) } private def logStreamFuture(id: String, withErr: Boolean)(implicit ec: ExecutionContext ): Future[LogStream] = { val baseParams = List(AttachParameter.STDOUT, AttachParameter.STREAM, AttachParameter.LOGS) val logParams = if (withErr) AttachParameter.STDERR :: baseParams else baseParams Future(scala.concurrent.blocking(client.attachContainer(id, logParams: _*))) } def withLogStreamLines(id: String, withErr: Boolean)( f: String => Unit )(implicit ec: ExecutionContext): Unit = { logStreamFuture(id, withErr).foreach { stream => stream.forEachRemaining(new java.util.function.Consumer[LogMessage] { override def accept(t: LogMessage): Unit = { val str = StandardCharsets.US_ASCII.decode(t.content()).toString f(s"[$id] $str") } }) } } def withLogStreamLinesRequirement(id: String, withErr: Boolean)( f: String => Boolean )(implicit ec: ExecutionContext): Future[Unit] = { logStreamFuture(id, withErr).flatMap { stream => val p = Promise[Unit]() Future { stream.forEachRemaining(new java.util.function.Consumer[LogMessage] { override def accept(t: LogMessage): Unit = { val str = StandardCharsets.US_ASCII.decode(t.content()).toString if (f(str)) { p.trySuccess(()) Closeables.close(stream, true) } } }) } p.future } } def remove(id: String, force: Boolean, removeVolumes: Boolean)(implicit ec: ExecutionContext ): Future[Unit] = { Future( scala.concurrent.blocking( client.removeContainer( id, RemoveContainerParam.forceKill(force), RemoveContainerParam.removeVolumes(removeVolumes) ) ) ) } def close(): Unit = { Closeables.close(client, true) } }
whisklabs/docker-it-scala
core/src/main/scala/com/whisk/docker/testkit/ContainerCommandExecutor.scala
Scala
mit
3,824
package opentrack.jp.journey import opentrack.jp.station.CRS case class CallingPoint(station: CRS, arrivalTime: Option[Int], departureTime: Option[Int])
open-track/journey-planner-scala
app/opentrack/jp/journey/CallingPoint.scala
Scala
gpl-3.0
155
/* * Copyright 2016 Nicolas Rinaudo * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kantan.codecs.enumeratum.values import kantan.codecs.enumeratum.laws.discipline.EnumeratedLong import kantan.codecs.enumeratum.laws.discipline.arbitrary._ import kantan.codecs.laws.discipline.{StringCodecTests, StringDecoderTests, StringEncoderTests} import kantan.codecs.laws.discipline.DisciplineSuite class LongEnumCodecTests extends DisciplineSuite { checkAll("StringDecoder[EnumeratedLong]", StringDecoderTests[EnumeratedLong].decoder[Int, Int]) checkAll("StringEncoder[EnumeratedLong]", StringEncoderTests[EnumeratedLong].encoder[Int, Int]) checkAll("StringCodec[EnumeratedLong]", StringCodecTests[EnumeratedLong].codec[Int, Int]) }
nrinaudo/kantan.codecs
enumeratum/core/shared/src/test/scala/kantan/codecs/enumeratum/values/LongEnumCodecTests.scala
Scala
apache-2.0
1,257
package code.snippet import _root_.scala.xml.{NodeSeq, Text} import _root_.net.liftweb.util._ import _root_.net.liftweb.mapper._ import _root_.net.liftweb.common._ import _root_.net.liftweb.http.S import net.liftweb.http.{S,DispatchSnippet,Paginator,PaginatorSnippet, SortedPaginator,SortedPaginatorSnippet} import net.liftweb.mapper.view.{SortedMapperPaginatorSnippet,SortedMapperPaginator} import _root_.java.util.Date import code.lib._ import code.model.Message import code.model.Recipient import Helpers._ import S.? class Messages extends DispatchSnippet { override def dispatch = { case "count" => count _ case "all" => all _ case "top" => top _ case "paginate" => paginator.paginate _ case "detail" => detail _ } def count(xhtml: NodeSeq): NodeSeq = Text(Message.count.toString) val paginator = new SortedMapperPaginatorSnippet(Message,Message.id, "ID" -> Message.id){ override def itemsPerPage = 20 _sort = (0,false) override def prevXml: NodeSeq = Text(?("Prev")) override def nextXml: NodeSeq = Text(?("Next")) override def firstXml: NodeSeq = Text(?("First")) override def lastXml: NodeSeq = Text(?("Last")) } protected def many(messages: List[Message], xhtml: NodeSeq): NodeSeq = messages.flatMap(a => single(a,xhtml)) protected def single(m: Message, xhtml: NodeSeq): NodeSeq = { val humanSubject = if (m.subject.is == null || m.subject.is == "") { "(No Subject)" } else { m.subject } bind("a", xhtml, "recipients" -> m.recipientsPrintable, "sender" -> m.sender, "subject" -> m.subject, "sent" -> <abbr class="timeago" title={m.atomSentDate}>{m.sentDate}</abbr>, "linkedsubject" -%> <a href={m.url}>{humanSubject}</a> ) } // Display all entries the paginator returns def all(xhtml: NodeSeq): NodeSeq = many(paginator.page,xhtml) // Show pagination links def paginate(xhtml: NodeSeq) { paginator.paginate(xhtml) } // Show most recent, no pagination offsets def top(xhtml: NodeSeq) = { val count = S.attr("count", _.toInt) openOr 20 many(Message.findAll(MaxRows(count), OrderBy(Message.id, Descending)),xhtml) } def detail(xhtml: NodeSeq) = { val msgid = S.param("msgId") getOrElse {"0"} val message : Box[Message] = Message.getMessageById(msgid.toLong) message match { case Full(m) => bind("message", xhtml, "sender" -> m.sender, "subject" -> m.subject, "sent" -> <abbr class="timeago" title={m.atomSentDate}>{m.sentDate}</abbr>, "textContent" -> m.textContent, "recipients" -> m.recipientsPrintable, "headers" -> m.getHeaders(), "attachments" -> m.getAttachments().flatMap({x=>x})) case _ => <strong>{?("Could not find message")}</strong> } } }
scsibug/fermata
src/main/scala/code/snippet/Messages.scala
Scala
bsd-3-clause
2,825
package com.teamisotope.techexpansion.util import net.minecraft.client.resources.I18n import net.minecraft.entity.player.EntityPlayer import net.minecraft.util.text.{TextComponentString, TextFormatting} import net.minecraftforge.event.entity.EntityJoinWorldEvent import net.minecraftforge.fml.common.eventhandler.SubscribeEvent object Handlers { @SubscribeEvent def entityJoinWorldEvent(e: EntityJoinWorldEvent): Unit = { // TODO: Update checker? } }
collaborationmods/TechExpansion
src/main/scala/com/teamisotope/techexpansion/util/Handlers.scala
Scala
gpl-3.0
465
package ch09 /* * 3. Write a Scala code snippet that reads a file and prints all words * with more than 12 characters to the console. Extra credit if you can * do this in a single line. * */ import scala.io.Source object ex03 extends App { var fileName = "" if (args.length < 1) { println("No input from the command line. Exiting.") sys.exit(-1) } fileName = args(0) var source = Source.fromFile(fileName, "UTF-8") for(line <- source.getLines; word <- line.split("\\\\s+") if (word.length > 12)) println(word) }
tuxdna/scala-for-the-impatient-exercises
src/main/scala/ch09/ex03.scala
Scala
apache-2.0
538
/* The MIT License (MIT) Copyright (c) 2014 Martin Snyder Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.martinsnyder.datastore.test import org.scalatest.FunSpec import com.martinsnyder.datastore.{ DataStore, EqualsCondition, Record, UniqueConstraint } import scala.util.{ Failure, Success, Try } import com.martinsnyder.datastore.DataStore.ConstraintViolation import com.martinsnyder.datastore.inmemory.InMemoryDataStore object ConstraintAndTransactionTest { case class MyRecord(value: String) extends Record } abstract class ConstraintAndTransactionTest extends FunSpec { import ConstraintAndTransactionTest._ val dataStore: DataStore describe("DataStore") { it("lets me insert a record") { val myRecord = MyRecord("testInsert") val insertResult = dataStore.withConnection(_.inTransaction(_.createRecords(Seq(myRecord)))) assert(insertResult.isSuccess) } it("lets me retrieve a stored record") { val myRecord = MyRecord("testRetrieve") val insertResult = dataStore.withConnection(_.inTransaction(_.createRecords(Seq(myRecord)))) assert(insertResult.isSuccess) val records = dataStore.withConnection(_.retrieveRecords[MyRecord](EqualsCondition("value", "testRetrieve"))) assert(records == Success(Seq(myRecord))) } it("disallows duplicate records") { val myRecord = MyRecord("testDuplicate") val insertResult = dataStore.withConnection(_.inTransaction(_.createRecords(Seq(myRecord)))) assert(insertResult.isSuccess) dataStore.withConnection(_.inTransaction(_.createRecords(Seq(myRecord)))) match { case Failure(ConstraintViolation(_)) => // Good case _ => fail("duplicate insert allowed") } } it("supports transaction rollback") { val myRecord = MyRecord("testTransactionRollback") val transactionResult = dataStore.withConnection(_.inTransaction(connection => Try({ val insertResult = connection.createRecords(Seq(myRecord)) assert(insertResult.isSuccess) val records = connection.retrieveRecords[MyRecord](EqualsCondition("value", "testTransactionRollback")) assert(records == Success(Seq(myRecord))) throw new Exception("whoops!") }))) assert(transactionResult.isFailure) val records = dataStore.withConnection(_.retrieveRecords[MyRecord](EqualsCondition("value", "testTransactionRollback"))) assert(records == Success(Seq())) } it("rejects conflicting transactions") { val myRecord = MyRecord("testTransactionConflict") dataStore.withConnection(connection => { val transaction1Result = connection.inTransaction(writeConnection1 => Try({ val insertResult1 = writeConnection1.createRecords(Seq(myRecord)) assert(insertResult1.isSuccess) val transaction2Result = connection.inTransaction(writeConnection2 => Try({ val insertResult2 = writeConnection2.createRecords(Seq(myRecord)) assert(insertResult2.isSuccess) })) assert(transaction2Result.isSuccess) })) assert(transaction1Result.isFailure) }) } it("lets me delete a stored record") { val myRecord = MyRecord("testDelete") val insertResult = dataStore.withConnection(_.inTransaction(_.createRecords(Seq(myRecord)))) assert(insertResult.isSuccess) // Should NOT be permitted to insert it again val duplicateInsert1Result = dataStore.withConnection(_.inTransaction(_.createRecords(Seq(myRecord)))) assert(duplicateInsert1Result.isFailure) val records = dataStore.withConnection(_.retrieveRecords[MyRecord](EqualsCondition("value", "testDelete"))) assert(records == Success(Seq(myRecord))) val deleteResult = dataStore.withConnection(_.inTransaction(_.deleteRecords[MyRecord](EqualsCondition("value", "testDelete")))) assert(deleteResult == Success(Seq(MyRecord("testDelete")))) // Should be permitted to insert it again val duplicateInsert2Result = dataStore.withConnection(_.inTransaction(_.createRecords(Seq(myRecord)))) assert(duplicateInsert2Result.isSuccess) } } } class InMemoryDataStoreConstraintAndTransactionTest extends ConstraintAndTransactionTest { import ConstraintAndTransactionTest._ override val dataStore = new InMemoryDataStore(List( UniqueConstraint(classOf[MyRecord].getName, "value") )) }
MartinSnyder/scala-transactional-datastore
inmemory_store/src/test/scala/com/martinsnyder/datastore/test/ConstraintAndTransactionTest.scala
Scala
mit
5,489
package monocle.std import monocle.TestUtil._ import monocle.law.PrismLaws import org.specs2.scalaz.Spec class TheseSpec extends Spec { checkAll("These - Disjunction" , PrismLaws( theseDisjunction[Int, String])) }
CapeSepias/Monocle
test/src/test/scala/monocle/std/TheseSpec.scala
Scala
mit
218
package ru.dgolubets.reactjs.server.actors import java.io.{File, PrintWriter} import java.nio.file.Files import scala.language.postfixOps import io.circe.Json import org.scalatest._ import ru.dgolubets.reactjs.server._ /** * Integration tests for RenderActor actor. */ class RenderServerActorSpec extends WordSpec with ActorSpecLike with Matchers with BeforeAndAfterEach { import Messages._ val tempDir = Files.createTempDirectory("RenderServerActorSpec") val watchSettings = WatchSettings(tempDir.toFile) val renderScript = """ |function render(state){ | return "<div></div>" |} """.stripMargin val renderSource = ScriptSource.fromString(renderScript) override def afterAll(): Unit = { super.afterAll() better.files.File(tempDir).delete(true) } override def afterEach(): Unit = { super.afterEach() better.files.File(tempDir).clear() } "RenderServerActor" when { "not watching" should { "render" in { disposableActor(RenderServerActor.props(RenderServerSettings(Seq(renderSource)))) { server => server ! RenderRequest("render", Json.Null) expectMsgPF() { case RenderResponse(Right(_)) => } } } } "watching" should { "postpone rendering until ready" in { val files = Seq( new File(tempDir.toFile, "file1") ) val sources = Seq(renderSource) ++ files.map(ScriptSource.fromFile(_)) disposableActor(RenderServerActor.props(RenderServerSettings(sources, watch = Some(watchSettings)))) { server => server ! RenderRequest("render", Json.Null) expectNoMessage() for (f <- files) { f.createNewFile() } expectMsgPF() { case RenderResponse(Right(_)) => } } } "return success" in { val sources = Seq(renderSource) disposableActor(RenderServerActor.props(RenderServerSettings(sources, watch = Some(watchSettings)))) { server => Thread.sleep(100) // wait for source monitor to report server ! RenderRequest("render", Json.Null) expectMsgPF() { case RenderResponse(Right(_)) => } } } "if render actors fail to initialize wait for sources to change" in { val fakeSource = new File(tempDir.toFile, "fake_source") fakeSource.createNewFile() def overwriteSource(text: String): Unit = { val writer = new PrintWriter(fakeSource) writer.println(text) writer.close() } overwriteSource("(gibberish that should make it fail to load {") val sources = Seq(ScriptSource.fromFile(fakeSource)) disposableActor(RenderServerActor.props(RenderServerSettings(sources, watch = Some(watchSettings)))) { server => // this should not produce any result yet, cos renderer should have failed to load server ! RenderRequest("render", Json.Null) expectNoMessage() // update the source with valid script overwriteSource(renderScript) // we should get our response server ! RenderRequest("render", Json.Null) expectMsgPF(hint = "should get response after the source is updated") { case RenderResponse(Right(_)) => } } } } } }
DGolubets/reactjs-server
src/test/scala/ru/dgolubets/reactjs/server/actors/RenderServerActorSpec.scala
Scala
mit
3,406
package com.bazaarvoice.sswf import com.amazonaws.services.simpleworkflow.AmazonSimpleWorkflowClient import com.amazonaws.services.simpleworkflow.model._ import com.bazaarvoice.sswf.model.history.StepsHistory import com.bazaarvoice.sswf.model.result.{InProgress, StepResult, Success} import com.bazaarvoice.sswf.model.{DefinedStep, ScheduledStep, StepInput} import com.bazaarvoice.sswf.service.{StepActionWorker, StepDecisionWorker, WorkflowManagement} import org.joda.time.Duration import org.scalatest.FlatSpec import scala.collection.JavaConverters._ class WaitTimeTestWorkflowDef() extends WorkflowDefinition[String, WaitTimeSteps] { private var waitStepInvocationCount: Int = 4 override def workflow(input: String): _root_.java.util.List[ScheduledStep[WaitTimeSteps]] = List[ScheduledStep[WaitTimeSteps]](DefinedStep(WaitTimeSteps.DUMMY_STEP), DefinedStep(WaitTimeSteps.WAIT_STEP), DefinedStep(WaitTimeSteps.DUMMY_STEP), DefinedStep(WaitTimeSteps.DUMMY_STEP)).asJava override def onFinish(workflowId: String, runId: String, input: String, history: StepsHistory[String, WaitTimeSteps], message: String): Unit = () override def onCancel(workflowId: String, runId: String, input: String, history: StepsHistory[String, WaitTimeSteps], message: String): Unit = () override def onFail(workflowId: String, runId: String, input: String, history: StepsHistory[String, WaitTimeSteps], message: String): Unit = () override def act(step: WaitTimeSteps, input: String, stepInput: StepInput, heartbeatCallback: HeartbeatCallback, execution: WorkflowExecution): StepResult = step match { case WaitTimeSteps.DUMMY_STEP => Thread.sleep(500) Success(None) case WaitTimeSteps.WAIT_STEP => if (waitStepInvocationCount > 0) { Thread.sleep(500) waitStepInvocationCount -= 1 InProgress(Some(s"waiting!!")) } else { Success(None) } } } class WaitTimeTest extends FlatSpec { val parser = new InputParser[String] { override def serialize(input: String): String = input override def deserialize(inputString: String): String = inputString } private val domain: String = "sswf-tests" private val wf: String = "wait-test" private val swf: AmazonSimpleWorkflowClient = new AmazonSimpleWorkflowClient() private val logger: Logger = new SilentLogger val manager = new WorkflowManagement[String, WaitTimeSteps](domain, wf, "0.0", wf, swf, inputParser = parser, log = logger) val definition = new WaitTimeTestWorkflowDef() val actor = new StepActionWorker[String, WaitTimeSteps](domain, wf, swf, parser, definition, log = logger) val decider = new StepDecisionWorker[String, WaitTimeSteps](domain, wf, swf, parser, definition, logger) "Each Step" should " have 2 invocations " in { manager.registerWorkflow() val workflow: WorkflowExecution = manager.startWorkflow("B", "") try { assert(waitForStepResult(makeTimerDecision = false, 0).getResult === StepResult.serialize(Success(None))) //wait step invocations begin... assert(waitForStepResult(makeTimerDecision = false, 0).getResult === StepResult.serialize(InProgress(Some("waiting!!")))) //2nd invocation for wait step assert(waitForStepResult(makeTimerDecision = true, 2).getResult === StepResult.serialize(InProgress(Some("waiting!!")))) //3rd invocation for wait step assert(waitForStepResult(makeTimerDecision = true, 4).getResult === StepResult.serialize(InProgress(Some("waiting!!")))) //4th invocation for wait step assert(waitForStepResult(makeTimerDecision = true, 4).getResult === StepResult.serialize(InProgress(Some("waiting!!")))) //5th invocation for wait step assert(waitForStepResult(makeTimerDecision = true, 4).getResult === StepResult.serialize(Success(None))) //2nd dummy step invocation assert(waitForStepResult(makeTimerDecision = false, 0).getResult === StepResult.serialize(Success(None))) //final dummy step invocation assert(waitForStepResult(makeTimerDecision = false, 0).getResult === StepResult.serialize(Success(None))) val wfHistory = manager.describeExecution(workflow.getWorkflowId, workflow.getRunId) assert(wfHistory.events.size() == 9) val firstDummyStep = wfHistory.events.get(1) assert(firstDummyStep.invocations == 1) //case 1: verify cumulative time over single invocation assert(firstDummyStep.cumulativeActivityTime.getMillis >= firstDummyStep.end.get.getMillis - firstDummyStep.start.getMillis) val firstWaitStepInProgress = wfHistory.events.get(2) assert(firstWaitStepInProgress.cumulativeActivityTime.getMillis >= 500) assert(firstWaitStepInProgress.invocations == 1) val secondWaitStepInProgress = wfHistory.events.get(3) assert(secondWaitStepInProgress.invocations == 2) val secondWaitStepCumulativeTime: Duration = new Duration(firstWaitStepInProgress.start, secondWaitStepInProgress.end.get) assert(secondWaitStepInProgress.cumulativeActivityTime.getMillis >= secondWaitStepCumulativeTime.getMillis) assert(secondWaitStepInProgress.cumulativeActivityTime.getMillis >= firstWaitStepInProgress.cumulativeActivityTime.getMillis) val thirdWaitStepInProgress = wfHistory.events.get(4) assert(thirdWaitStepInProgress.invocations == 3) val thirdWaitStepCumulativeTime: Duration = new Duration(firstWaitStepInProgress.start, thirdWaitStepInProgress.end.get) assert(thirdWaitStepInProgress.cumulativeActivityTime.getMillis >= thirdWaitStepCumulativeTime.getMillis) assert(thirdWaitStepCumulativeTime.getMillis > secondWaitStepCumulativeTime.getMillis) val fourthWaitStepInProgress = wfHistory.events.get(5) assert(fourthWaitStepInProgress.invocations == 4) //case 2: verify cumulative time over two in-progress invocations //A STEP1: InProgress() //B STEP1: Success() verify: B.cum >= B.end - A.start val fifthWaitStep = wfHistory.events.get(6) assert(fifthWaitStep.invocations == 5) assert(fifthWaitStep.cumulativeActivityTime.getMillis > 0) assert(fifthWaitStep.cumulativeActivityTime.getMillis >= new Duration(fourthWaitStepInProgress.start, fifthWaitStep.end.get).getMillis) //case 3: verify accumulative time over successive invocations //A STEP0: Success() verify: A.cum >= A.end - A.start //B STEP0: Success() verify: B.cum >= B.end - B.start AND B.cum != 0 AND A.cum != 0 AND B.cum < B.end - A.start val secondDummyStep = wfHistory.events.get(7) val secondDummyStepDuration = new Duration(secondDummyStep.start, secondDummyStep.end.get) assert(secondDummyStep.invocations == 1) assert(secondDummyStep.cumulativeActivityTime.getMillis > 0) assert(secondDummyStep.cumulativeActivityTime.getMillis >= secondDummyStepDuration.getMillis) val thirdDummyStep = wfHistory.events.get(8) val thirdDummyStepDuration = new Duration(thirdDummyStep.start, thirdDummyStep.end.get) assert(thirdDummyStep.invocations == 1) assert(thirdDummyStep.cumulativeActivityTime.getMillis > 0) assert(thirdDummyStep.cumulativeActivityTime.getMillis >= thirdDummyStepDuration.getMillis) assert(thirdDummyStep.cumulativeActivityTime.getMillis < new Duration(secondDummyStep.start, thirdDummyStep.end.get).getMillis) } finally { try { swf.terminateWorkflowExecution(new TerminateWorkflowExecutionRequest().withDomain(domain).withWorkflowId(workflow.getWorkflowId).withRunId(workflow.getRunId)) } catch { case e: UnknownResourceException => () // this means the test passed and the workflow got cancelled } } } def waitForStepResult(makeTimerDecision: Boolean, waitTimeRequested: Int): RespondActivityTaskCompletedRequest = { if (makeTimerDecision) { val decisionTask: DecisionTask = untilNotNull(decider.pollForDecisionsToMake()) val decision: RespondDecisionTaskCompletedRequest = decider.makeDecision(decisionTask) assert(decision.getDecisions.asScala.exists((d: Decision) => d.getDecisionType == DecisionType.StartTimer.toString)) assert(decision.getDecisions.asScala.exists((d: Decision) => d.getStartTimerDecisionAttributes.getStartToFireTimeout.toInt == waitTimeRequested)) } val scheduleActivityDecisionTask: DecisionTask = untilNotNull(decider.pollForDecisionsToMake()) val scheduleActivityDecision: RespondDecisionTaskCompletedRequest = decider.makeDecision(scheduleActivityDecisionTask) assert(scheduleActivityDecision.getDecisions.asScala.exists((d: Decision) => d.getDecisionType == DecisionType.ScheduleActivityTask.toString)) val activityTask: ActivityTask = untilNotNull(actor.pollForWork()) actor.doWork(activityTask) } def untilNotNull[R](action: => R): R = { while (true) { val r: R = action if (r != null) { return r } } throw new Exception() } }
bazaarvoice/super-simple-workflow
sswf-core/src/it/scala/com/bazaarvoice/sswf/WaitTimeTest.scala
Scala
apache-2.0
8,984
package com.github.mdr.mash.parser import com.github.mdr.mash.lexer.{ LexerResult, MashLexer } import com.github.mdr.mash.parser.ConcreteSyntax.Program import com.github.mdr.mash.utils.PointedRegion import scala.language.implicitConversions case class ParseError(message: String, location: PointedRegion) object MashParser { def parse(s: String, mish: Boolean = false): Either[ParseError, Program] = try Right(parseProgram(s, forgiving = false, mish = mish)) catch { case e: MashParserException ⇒ Left(e.parseError) } def parseForgiving(s: String, mish: Boolean = false): Program = parseProgram(s, forgiving = true, mish = mish) private def parseProgram(s: String, forgiving: Boolean = true, mish: Boolean = false): Program = { val lexerResult = MashLexer.tokenise(s, forgiving = forgiving, mish = mish) parseProgram(lexerResult, forgiving, mish) } def parseProgram(lexerResult: LexerResult, forgiving: Boolean, mish: Boolean): Program = { val parse = new MashParse(lexerResult, initialForgiving = forgiving) if (mish) Program(None, parse.mishExpr()) else parse.program() } }
mdr/mash
src/main/scala/com/github/mdr/mash/parser/MashParser.scala
Scala
mit
1,159
package com.socrata.datacoordinator.secondary.feedback import java.io.IOException import com.rojoma.json.util.JsonArrayIterator.ElementDecodeException import com.rojoma.json.v3.ast.{JArray, JObject, JString, JValue} import com.rojoma.json.v3.codec.{DecodeError, JsonDecode} import com.rojoma.json.v3.io.{JValueEventIterator, JsonLexException, JsonReaderException} import com.rojoma.json.v3.util._ import com.rojoma.simplearm.v2.{ResourceScope, using} import com.socrata.datacoordinator.id.UserColumnId import com.socrata.datacoordinator.secondary import com.socrata.datacoordinator.util.collection.MutableColumnIdMap import com.socrata.http.client.{HttpClient, RequestBuilder, SimpleHttpRequest} import com.socrata.http.client.exceptions.{ContentTypeException, HttpClientException} case class RowData[CV](pk: UserColumnId, rows: Iterator[secondary.Row[CV]]) abstract class DataCoordinatorClient[CT, CV](typeFromJValue: JValue => Option[CT], datasetInternalName: String, fromJValueFunc: CT => JValue => Option[CV]) { /** * Export the rows of a dataset for the given columns * @param columnSet must contain at least on column */ def exportRows(columnSet: Seq[UserColumnId], cookie: CookieSchema, resourceScope: ResourceScope): Either[RequestFailure, Either[ColumnsDoNotExist, RowData[CV]]] /** * Post mutation script to data-coordinator * @return On success return None * On failure return Some RequestFailure or UpdateSchemaFailure */ def postMutationScript(script: JArray, cookie: CookieSchema): Option[Either[RequestFailure, UpdateSchemaFailure]] } object HttpDataCoordinatorClient { def apply[CT,CV](httpClient: HttpClient, hostAndPort: String => Option[(String, Int)], retries: Int, typeFromJValue: JValue => Option[CT]): (String, CT => JValue => Option[CV]) => HttpDataCoordinatorClient[CT,CV] = new HttpDataCoordinatorClient[CT,CV](httpClient, hostAndPort, retries, typeFromJValue, _, _) } class HttpDataCoordinatorClient[CT,CV](httpClient: HttpClient, hostAndPort: String => Option[(String, Int)], retries: Int, typeFromJValue: JValue => Option[CT], datasetInternalName: String, fromJValueFunc: CT => JValue => Option[CV]) extends DataCoordinatorClient[CT,CV](typeFromJValue, datasetInternalName, fromJValueFunc) { val log = org.slf4j.LoggerFactory.getLogger(classOf[HttpDataCoordinatorClient[CT,CV]]) private def datasetEndpoint: Option[String] = { datasetInternalName.lastIndexOf('.') match { case -1 => log.error("Could not extract data-coordinator instance name from dataset: {}", datasetInternalName) throw new Exception(s"Could not extract data-coordinator instance name from dataset: $datasetInternalName") case n => hostAndPort(datasetInternalName.substring(0, n)) match { case Some((host, port)) => Some(s"http://$host:$port/dataset/$datasetInternalName") case None => None } } } private def unexpectedError[T](message: String, cause: Throwable = null): Left[UnexpectedError, T] = { log.error(message, cause) Left(UnexpectedError(message, cause)) } private def unexpectedErrorForResponse(message: String, code: Int, info: Any, cause: Exception = null) = unexpectedError(s"$message from data-coordinator for status $code: $info", cause) private val unexpected = "Received an unexpected error" private val uninterpretable = "Unable to interpret error response" private def unexpectedResponse(code: Int, response: ErrorResponse) = unexpectedErrorForResponse(unexpected, code, response) private def uninterpretableResponse(code: Int, response: ErrorResponse) = unexpectedErrorForResponse(uninterpretable, code, response) private def uninterpretableResponse(code: Int, error: DecodeError) = unexpectedErrorForResponse(uninterpretable, code, error) private def retrying[T](actions: => Either[RequestFailure, T], remainingAttempts: Int = retries): Either[RequestFailure, T] = { val failure = try { return actions } catch { case e: IOException => e case e: HttpClientException => e case e: JsonReaderException => e } log.info("Failure occurred while posting mutation script: {}", failure.getMessage) if (remainingAttempts > 0) retrying[T](actions, remainingAttempts - 1) else unexpectedError(s"Ran out of retry attempts after failure: ${failure.getMessage}", failure) } val UpdateDatasetDoesNotExist = "update.dataset.does-not-exist" val UpdateRowUnknownColumn = "update.row.unknown-column" val ReqExportUnknownColumns = "req.export.unknown-columns" case class ErrorResponse(errorCode: String, data: JObject) implicit val erCodec = AutomaticJsonCodecBuilder[ErrorResponse] private def doRequest[T](request: () => SimpleHttpRequest, message: String, successHandle: Iterator[JValue]=> Either[UnexpectedError, T], badRequestHandle: ErrorResponse => Either[UnexpectedError, T], serverErrorMessageExtra: String, resourceScope: ResourceScope): Either[RequestFailure, T] = { retrying[T] { val resp = httpClient.execute(request(), resourceScope) val start = System.nanoTime() def logContentTypeFailure[V](v: => JValue)(f: JValue => Either[RequestFailure, T]): Either[RequestFailure, T] = try { f(v) } catch { case e: ContentTypeException => log.warn("The response from data-coordinator was not a valid JSON content type! The request we sent was: {}", request.toString) unexpectedError("Unable to understand data-coordinator's response", e) // no retry here } resp.resultCode match { case 200 => // success! ... well maybe... val end = System.nanoTime() log.info("{} in {}ms", message, (end - start) / 1000000) // this is kind of a lie... hmm val response = JsonArrayIterator.fromReader[JValue](resp.reader()) successHandle(resourceScope.openUnmanaged(response, transitiveClose = List(resp))) case 400 => logContentTypeFailure(resp.jValue()) { JsonDecode.fromJValue[ErrorResponse](_) match { case Right(response) => badRequestHandle(response) case Left(e) => uninterpretableResponse(400, e) }} case 404 => // { "errorCode" : "update.dataset.does-not-exist" // , "data" : { "dataset" : "XXX.XXX, "data" : { "commandIndex" : 0 } // } logContentTypeFailure(resp.jValue()) { JsonDecode.fromJValue[ErrorResponse](_) match { case Right(ErrorResponse(UpdateDatasetDoesNotExist, _)) => Left(DatasetDoesNotExist) case Right(response) => unexpectedResponse(404, response) case Left(e) => uninterpretableResponse(404, e) }} case 409 => // come back later! Left(DataCoordinatorBusy) // no retry case other => if (other == 500) { log.warn("Scream! 500 from data-coordinator! Going to retry; {}", serverErrorMessageExtra) } // force a retry throw new IOException(s"Unexpected result code $other from data-coordinator") } } } case class Column(c: UserColumnId, t: JValue) case class Schema(pk: UserColumnId, schema: Seq[Column]) implicit val coCodec = AutomaticJsonCodecBuilder[Column] implicit val scCodec = AutomaticJsonCodecBuilder[Schema] /** * Export the rows of a dataset for the given columns * @param columnSet must contain at least on column */ def exportRows(columnSet: Seq[UserColumnId], cookie: CookieSchema, resourceScope: ResourceScope): Either[RequestFailure, Either[ColumnsDoNotExist, RowData[CV]]] = { require(columnSet.nonEmpty, "`columnSet` must be non-empty") log.info("Exporting rows for columns: {}", columnSet) val endpoint = datasetEndpoint.getOrElse(return Left(FailedToDiscoverDataCoordinator)) val columns = columnSet.tail.foldLeft(columnSet.head.underlying) { (str, id) => str + "," + id.underlying } val builder = RequestBuilder(new java.net.URI(endpoint)).addParameter(("c", columns)) def successHandle(response: Iterator[JValue]): Either[UnexpectedError, Either[ColumnsDoNotExist, RowData[CV]]] = { if (response.isEmpty) return unexpectedError("Response from data-coordinator was an empty array.") JsonDecode.fromJValue[Schema](response.next()) match { case Right(Schema(pk, schema)) => val columns = schema.map { col => (cookie.columnIdMap(col.c), typeFromJValue(col.t).getOrElse { return unexpectedError(s"Could not derive column type for column ${col.c} from value: ${col.t}") }) }.toArray val rows = response.map { JsonDecode.fromJValue[JArray](_) match { case Right(row) => assert(row.size == columns.length) val colIdMap = new MutableColumnIdMap[CV] row.iterator.zipWithIndex.foreach { case (jVal, index) => val (colId, typ) = columns(index) colIdMap += ((colId, fromJValueFunc(typ)(jVal).getOrElse { return unexpectedError(s"Could not interpret column value of type $typ from value: $jVal") })) } colIdMap.freeze() case Left(e) => return unexpectedErrorForResponse("Row data was not an array", 200, e.english) } } Right(Right(resourceScope.openUnmanaged(RowData(pk, rows), transitiveClose = List(response)))) case Left(e) => unexpectedErrorForResponse("Response did not start with expected column schema", 200, e.english) } } def badRequestHandle(response: ErrorResponse): Either[UnexpectedError, Either[ColumnsDoNotExist, RowData[CV]]] = response match { case ErrorResponse(ReqExportUnknownColumns, data) => JsonDecode.fromJValue[Set[UserColumnId]](data.getOrElse("columns", return uninterpretableResponse(400, response))) match { case Right(unknown) if unknown.nonEmpty => Right(Left(ColumnsDoNotExist(unknown))) case _ => uninterpretableResponse(400, response) } case _ => unexpectedResponse(400, response) } doRequest( request = () => builder.get, message = s"Got row data for columns $columns", successHandle, badRequestHandle, serverErrorMessageExtra = s"my parameters where: c=$columns", resourceScope ) } sealed abstract class Response case class Upsert(typ: String, id: JValue, ver: String) extends Response case class NonfatalError(typ: String, err: String, id: Option[JValue]) extends Response implicit val upCodec = AutomaticJsonCodecBuilder[Upsert] implicit val neCodec = AutomaticJsonCodecBuilder[NonfatalError] implicit val reCodec = SimpleHierarchyCodecBuilder[Response](NoTag).branch[Upsert].branch[NonfatalError].build override def postMutationScript(script: JArray, cookie: CookieSchema): Option[Either[RequestFailure, UpdateSchemaFailure]] = { val endpoint = datasetEndpoint.getOrElse(return Some(Left(FailedToDiscoverDataCoordinator))) val builder = RequestBuilder(new java.net.URI(endpoint)) def successHandle(response: Iterator[JValue]): Either[UnexpectedError, Option[UpdateSchemaFailure]] = { try { assert(response.hasNext, "Response contains no elements") JsonDecode.fromJValue[JArray](response.next()) match { case Right(results) => assert(!response.hasNext, "Response contains more than one element") assert(results.elems.length == script.elems.length - 2, "Did not get one result for each upsert command that was sent") val rows = script.elems.slice(2, script.elems.length) results.elems.zip(rows).foreach { case (result, row) => JsonDecode.fromJValue[Response](result) match { case Right(Upsert("update", _, _)) => // yay! case Right(NonfatalError("error", nonfatalError, Some(_))) if nonfatalError == "insert_in_update_only" || nonfatalError == "no_such_row_to_update" => // the row has been deleted, nothing more to do here case Right(other) => unexpectedErrorForResponse("Unexpected response in array", 200, other) case Left(e) => unexpectedErrorForResponse("Unable to interpret result in array", 200, e.english) } } Right(None) case Left(e) => unexpectedErrorForResponse("Row data was not an array", 200, e.english) } } catch { case e: JsonLexException => unexpectedErrorForResponse("Unable to parse response as JSON", 200, e.message, e) case e: ElementDecodeException => unexpectedErrorForResponse("Unable to parse an element of the response as JSON", 200, e.position, e) } } def badRequestHandle(response: ErrorResponse): Either[UnexpectedError, Option[UpdateSchemaFailure]] = { response.errorCode match { // { "errorCode" : "update.row.unknown-column" // , "data" : { "commandIndex" : 1, "commandSubIndex" : XXX, "dataset" : "XXX.XXX", "column" : "XXXX-XXXX" // } case UpdateRowUnknownColumn => response.data.get("column") match { case Some(JString(id)) if id != cookie.systemId.underlying => Right(Some(TargetColumnDoesNotExist(new UserColumnId(id)))) case _ => uninterpretableResponse(400, response) } case _ => unexpectedResponse(400, response) } } val requestResult = using(new ResourceScope("post mutation script")) { resourceScope => doRequest( request = () => builder.json(JValueEventIterator(script)), message = s"Posted mutation script with ${script.length - 2} row updates", successHandle, badRequestHandle, serverErrorMessageExtra = s"my mutation script was: ${JsonUtil.renderJson(script, pretty = false)}", resourceScope ) match { case Left(failure) => Some(Left(failure)) case Right(Some(failure)) => Some(Right(failure)) case Right(None) => None } } requestResult } }
socrata-platform/data-coordinator
secondarylib-feedback/src/main/scala/com/socrata/datacoordinator/secondary/feedback/DataCoordinatorClient.scala
Scala
apache-2.0
14,718
/* * DecisionVariableElimination.scala * Variable elimination for Decisions algorithm. * * Created By: Brian Ruttenberg (bruttenberg@cra.com) * Creation Date: Oct 1, 2012 * * Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc. * See http://www.cra.com or email figaro@cra.com for information. * * See http://www.github.com/p2t2/figaro for a copy of the software license. */ package com.cra.figaro.algorithm.decision import com.cra.figaro.algorithm._ import com.cra.figaro.algorithm.factored._ import com.cra.figaro.algorithm.sampling._ import com.cra.figaro.language._ import com.cra.figaro.library.decision._ import com.cra.figaro.util._ import com.cra.figaro.algorithm.lazyfactored.Extended import annotation.tailrec import scala.collection.mutable.{ Map, Set } import scala.language.existentials /* Trait only extends for double utilities. User needs to provide another trait or convert utilities to double * in order to use * */ /** * Trait for Decision based Variable Elimination. This implementation is hardcoded to use * Double utilities */ trait ProbabilisticVariableEliminationDecision extends VariableElimination[(Double, Double)] { /** Retrieve utility nodes in the model */ /* Implementations must define this */ def getUtilityNodes: List[Element[_]] /** * Semiring for Decisions uses a sum-product-utility semiring */ override val semiring = SumProductUtilitySemiring /** * Makes a utility factor an element designated as a utility. This is factor of a tuple (Double, Double) * where the first value is 1.0 and the second is a possible utility of the element */ def makeUtilFactor(e: Element[_]): Factor[(Double, Double)] = { val f = new Factor[(Double, Double)](List(Variable(e))) f.fillByRule((l: List[Any]) => (1.0, l.asInstanceOf[List[Extended[Double]]](0).value)) f } /* Even though utility nodes are eliminated, we need to create factors for them and anything they use. */ override def starterElements = getUtilityNodes ::: targetElements /** * Create the factors for decision factors. Each factor is hardcoded as a tuple of (Double, Double), * where the first value is the probability and the second is the utility. */ def getFactors(neededElements: List[Element[_]], targetElements: List[Element[_]], upper: Boolean = false): List[Factor[(Double, Double)]] = { if (debug) { println("Elements (other than utilities) appearing in factors and their ranges:") for { element <- neededElements } { println(Variable(element).id + "(" + element.name.string + "@" + element.hashCode + ")" + ": " + element + ": " + Variable(element).range.mkString(",")) } } val thisUniverseFactorsExceptUtil = neededElements flatMap (ProbFactor.make(_)) // Make special utility factors for utility elements val thisUniverseFactorsUtil = getUtilityNodes map (makeUtilFactor(_)) val dependentUniverseFactors = for { (dependentUniverse, evidence) <- dependentUniverses } yield ProbFactor.makeDependentFactor(universe, dependentUniverse, dependentAlgorithm(dependentUniverse, evidence)) // Convert all non-utility factors from standard factors to decision factors, ie, factors are now tuples of (Double, _) val thisUniverseFactorsExceptUtil_conv = thisUniverseFactorsExceptUtil.map(s => convert(s, false)) val thisUniverseFactorsUtil_conv = thisUniverseFactorsUtil val dependentUniverseFactors_conv = dependentUniverseFactors.map(s => convert(s, false)) dependentUniverseFactors_conv ::: thisUniverseFactorsExceptUtil_conv ::: thisUniverseFactorsUtil_conv } /* * Converts a factor created by ProbFactor into a tuple of (Prob, E[Utility]), where E[Utility] is zero for * all non-utility nodes, and Prob is 1 for all utility nodes */ private def convert(f: Factor[Double], utility: Boolean): Factor[(Double, Double)] = { val factor = new Factor[(Double, Double)](f.variables) val allIndices = f.allIndices allIndices.foreach { k: List[Int] => val p = f.get(k) val v = if (utility) { if (f.variables.length > 1) throw new IllegalUtilityNodeException f.variables(0).range(k(0)).asInstanceOf[Double] } else { 0.0 } factor.set(k, (p, v)) } factor } } /** * Decision VariableElimination algorithm that computes the expected utility of decision elements using the default * elimination order. */ class ProbQueryVariableEliminationDecision[T, U](override val universe: Universe, utilityNodes: List[Element[_]], target: Element[_])( val showTiming: Boolean, val dependentUniverses: List[(Universe, List[NamedEvidence[_]])], val dependentAlgorithm: (Universe, List[NamedEvidence[_]]) => () => Double) extends OneTimeProbQuery with ProbabilisticVariableEliminationDecision with DecisionAlgorithm[T, U] { lazy val queryTargets = List(target) /** * The variable elimination eliminates all variables except on all decision nodes and their parents. * Thus the target elements is both the decision element and the parent element */ val targetElements = List(target, target.args(0)) def getUtilityNodes = utilityNodes private var finalFactors: Factor[(Double, Double)] = new Factor[(Double, Double)](List[Variable[_]]()) /* Marginalizes the final factor using the semiring for decisions * */ private def marginalizeToTarget(factor: Factor[(Double, Double)], target: Element[_]): Unit = { val unnormalizedTargetFactor = factor.marginalizeTo(semiring, Variable(target)) val z = unnormalizedTargetFactor.foldLeft(semiring.zero, (x: (Double, Double), y: (Double, Double)) => (x._1 + y._1, 0.0)) val targetFactor = new Factor[(Double, Double)](unnormalizedTargetFactor.variables) unnormalizedTargetFactor.mapTo((d: (Double, Double)) => (d._1 / z._1, d._2), targetFactor) targetFactors += target -> targetFactor } private def marginalize(resultFactor: Factor[(Double, Double)]) = queryTargets foreach (marginalizeToTarget(resultFactor, _)) private def makeResultFactor(factorsAfterElimination: Set[Factor[(Double, Double)]]): Factor[(Double, Double)] = { // It is possible that there are no factors (this will happen if there are no decisions or utilities). // Therefore, we start with the unit factor and use foldLeft, instead of simply reducing the factorsAfterElimination. factorsAfterElimination.foldLeft(Factor.unit(semiring))(_.product(_, semiring)) } def finish(factorsAfterElimination: Set[Factor[(Double, Double)]], eliminationOrder: List[Variable[_]]) = finalFactors = makeResultFactor(factorsAfterElimination) /** * Returns distribution of the target, ignoring utilities */ def computeDistribution[T](target: Element[T]): Stream[(Double, T)] = { val factor = targetFactors(target) val targetVar = Variable(target) val dist = targetVar.range.filter(_.isRegular).map(_.value).zipWithIndex map (pair => (factor.get(List(pair._2))._1, pair._1)) // normalization is unnecessary here because it is done in marginalizeTo dist.toStream } /** * Returns expectation of the target, ignoring utilities */ def computeExpectation[T](target: Element[T], function: T => Double): Double = { def get(pair: (Double, T)) = pair._1 * function(pair._2) (0.0 /: computeDistribution(target))(_ + get(_)) } /** * Returns the computed utility of all parent/decision tuple values. For VE, these are not samples * but the actual computed expected utility for all combinations of the parent and decision */ def computeUtility(): scala.collection.immutable.Map[(T, U), DecisionSample] = computeStrategy(finalFactors) /* * Converts the final factor into a map of parent/decision values and expected utility */ private def computeStrategy(factor: Factor[(Double, Double)]) = { val strat = Map[(T, U), DecisionSample]() //find the variable associated with the decision val decisionVariable = factor.variables.filter(_.asInstanceOf[ElementVariable[_]].element == target)(0) // find the variables of the parents. val parentVariable = factor.variables.filterNot(_ == decisionVariable)(0) // index of the decision variable val indexOfDecision = indices(factor.variables, decisionVariable) val indexOParent = indices(factor.variables, parentVariable) for { indices <- factor.allIndices } { /* for each index in the list of indices, strip out the decision variable index, * and retrieve the map entry for the parents. If the factor value is greater than * what is currently stored in the strategy map, replace the decision with the new one from the factor */ val parent = parentVariable.range(indices(indexOParent(0))).value.asInstanceOf[T] val decision = decisionVariable.range(indices(indexOfDecision(0))).value.asInstanceOf[U] val utility = factor.get(indices)._2 strat += (parent, decision) -> DecisionSample(utility, 1.0) } strat.toMap } } object DecisionVariableElimination { /* Checks conditions of Decision Usage * 1. Double utilities */ private[decision] def usageCheck(utilityNodes: List[Element[_]], target: Decision[_, _]): Unit = { utilityNodes.foreach { u => u.value match { case d: Double => 1 case _ => throw new IllegalArgumentException("Only double utilities are allowed") } } } /** * Create a decision variable elimination instance with the given decision variables and indicated utility * nodes */ def apply[T, U](utilityNodes: List[Element[_]], target: Decision[T, U])(implicit universe: Universe) = { utilityNodes.foreach(_.generate()) // need initial values for the utility nodes before the usage check usageCheck(utilityNodes, target) new ProbQueryVariableEliminationDecision[T, U](universe, utilityNodes, target)( false, List(), (u: Universe, e: List[NamedEvidence[_]]) => () => ProbEvidenceSampler.computeProbEvidence(10000, e)(u)) } /** * Create a decision variable elimination algorithm with the given decision variables and indicated utility * nodes and using the given dependent universes in the current default universe. */ def apply[T, U](dependentUniverses: List[(Universe, List[NamedEvidence[_]])], utilityNodes: List[Element[_]], target: Decision[T, U])(implicit universe: Universe) = { utilityNodes.foreach(_.generate()) // need initial values for the utility nodes before the usage check usageCheck(utilityNodes, target) new ProbQueryVariableEliminationDecision[T, U](universe, utilityNodes, target)( false, dependentUniverses, (u: Universe, e: List[NamedEvidence[_]]) => () => ProbEvidenceSampler.computeProbEvidence(10000, e)(u)) } /** * Create a decision variable elimination algorithm with the given decision variables and indicated utility * nodes and using the given dependent universes in the current default universe. Use the given dependent * algorithm function to determine the algorithm to use to compute probability of evidence in each dependent universe. */ def apply[T, U]( dependentUniverses: List[(Universe, List[NamedEvidence[_]])], dependentAlgorithm: (Universe, List[NamedEvidence[_]]) => () => Double, utilityNodes: List[Element[_]], target: Decision[T, U])(implicit universe: Universe) = { utilityNodes.foreach(_.generate()) // need initial values for the utility nodes before the usage check usageCheck(utilityNodes, target) new ProbQueryVariableEliminationDecision[T, U](universe, utilityNodes, target)( false, dependentUniverses, dependentAlgorithm) } }
wkretschmer/figaro
Figaro/src/main/scala/com/cra/figaro/algorithm/decision/DecisionVariableElimination.scala
Scala
bsd-3-clause
11,770
package pkg { trait B extends A class C extends B } object Test { def test1: Unit = { val c = new pkg.C c.foo() // OK val b: pkg.B = c b.foo() // error: Unable to emit reference to method foo in class A, class A is not accessible in object Test } val c2 = new pkg.C c2.foo() // OK val b2: pkg.B = c2 b2.foo() // error: Unable to emit reference to method foo in class A, class A is not accessible in object Test }
dotty-staging/dotty
tests/neg/java-trait-access/B.scala
Scala
apache-2.0
447
import org.junit._ import Assert._ import tu.coreservice.spellcorrector.{SpellCorrectorGoogle, SpellCorrector} @Test class CorrectorTest { @Test def testCompoundCorrector() = { //Hello world test var corrector = SpellCorrector.apply() var test = corrector.correctSentence("The wrld are red") assertTrue(test == "The world is red") } @Test def testGoogle()={ var corrector = new SpellCorrectorGoogle() var test = corrector.correctSentence("The wrld are red") assertTrue(test == "The world is red") } /*@Test def testWorkingSentence() = { //Seems to CatiaV5 aren?t installed correcly var corrector = SpellCorrector.apply() var test = corrector.correctSentence("Seems to CatiaV5 aren?t installed correcly") assertTrue(test == "Seems to CatiaV5 aren't installed correctly") test = corrector.correctSentence("user is not able to use the wirelsess function on his laptop") assertTrue(test == "user is not able to use the wireless function on his laptop") } @Test def testAdditional() = { //It?s a Vista-PC var corrector = SpellCorrector.apply() var test = corrector.correctSentence("It?s a Vista-PC") assertTrue(test == "It?s a Vista-PC") } */ // @Test // def testKO() = assertTrue(false) }
keskival/2
coreservice.spellcorrector/src/test/scala/CorrectorTest.scala
Scala
gpl-3.0
1,305
package com.phaller.rasync.npv class StatsCollector(min: Double, max: Double, numBuckets: Int) { var instances: Int = 1 private val range: Double = max - min private var mean: Double = 0 private var numObs: Int = 0 val buckets: Array[Int] = Array.ofDim[Int](numBuckets) def addObs(obs: Double): Unit = { mean = (obs + (numObs * mean)) / (numObs + 1) numObs += 1 val bucket: Int = Math.floor(numBuckets * (obs - min) / range).asInstanceOf[Int] buckets(bucket) = buckets(bucket) + 1 } def combine(collector: StatsCollector): Unit = { instances += collector.instances mean = ((numObs * mean) + (collector.numObs * collector.mean)) / numObs + collector.numObs numObs += collector.numObs for (i <- 0 until numBuckets) { buckets(i) = buckets(i) + collector.buckets(i) } } override def toString(): String = { val sb = new StringBuilder() sb.append("Collected Statistics") sb.append(System.lineSeparator()) sb.append("--------------------") sb.append(System.lineSeparator()) sb.append(System.lineSeparator()) sb.append(f"Number of instances: $instances%d") sb.append(System.lineSeparator()) sb.append(f"Mean: $mean%2f") sb.append(System.lineSeparator()) sb.append(f"Number of observations: $numObs%d") sb.append(System.lineSeparator()) sb.append("Histogram") sb.append(System.lineSeparator()) for (i <- 0 until numBuckets) { sb.append(f" ${i + 1}%3d ${buckets(i)}%d") sb.append(System.lineSeparator()) } sb.append(System.lineSeparator()) sb.toString() } }
phaller/reactive-async
monte-carlo-npv/src/main/scala/com/phaller/rasync/npv/StatsCollector.scala
Scala
bsd-2-clause
1,605
import org.scalatest._ import scala.xml.transform._ import scala.xml._ class XMLSpec extends FlatSpec { val xml = <root><sub horse="Steed">Gumby</sub></root> "our xml" should "have a gumby attribute" in { assert((xml \\ "sub").text == "Gumby") assert((xml \\\\ "@horse").text == "Steed") object ChangeGumby extends RewriteRule { override def transform(node : Node):Node = node match { case elem: Elem if elem.label == "sub" => elem.copy( child = Seq(Text("Great Gumby")), attributes = Attribute(null, "horse", "Nightshade", elem.attributes) ) case whatever => whatever } } object xf extends RuleTransformer(ChangeGumby) val changed = xf(xml) assert((changed \\\\ "@horse").text == "Nightshade") assert((changed \\ "sub").text == "Great Gumby") } }
delving/play-oscr
test/XMLSpec.scala
Scala
apache-2.0
859
/* _____ _ * | ___| __ __ _ _ __ ___ (_) __ _ _ __ * | |_ | '__/ _` | '_ ` _ \\| |/ _` | '_ \\ * | _|| | | (_| | | | | | | | (_| | | | | * |_| |_| \\__,_|_| |_| |_|_|\\__,_|_| |_| * * Copyright 2014 Pellucid Analytics * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package framian import scala.language.implicitConversions import scala.reflect.ClassTag import spire.algebra.Order import framian.column._ final class TypeWitness[A](val value: A)(implicit val classTag: ClassTag[A]) object TypeWitness { implicit def lift[A: ClassTag](a: A) = new TypeWitness[A](a) } /** * A `Rec` is an untyped sequence of values - usually corresponding to a row or * column in a Frame. */ final class Rec[K](cols: Series[K, UntypedColumn], row: Int) { def get[A: ColumnTyper](col: K): Cell[A] = cols(col) flatMap (_.cast[A].apply(row)) def values: Iterable[(K, Cell[Any])] = cols.to[Vector] map { case (k, colCell) => val value = for { col <- colCell a <- col.cast[Any].apply(row) } yield a k -> value } override def toString: String = values.map { case (k, value) => s"""$k -> ${value.fold("na", "nm")(_.toString)}""" }.mkString("Rec(", ", ", ")") override def equals(that: Any): Boolean = that match { case (that: Rec[_]) => this.values == that.values case _ => false } override def hashCode: Int = this.values.hashCode * 23 } object Rec { def apply[K: Order: ClassTag](kvs: (K, TypeWitness[_])*): Rec[K] = { val cols: Series[K, UntypedColumn] = Series(kvs.map { case (k, w: TypeWitness[a]) => k -> TypedColumn[a](Column.value(w.value))(w.classTag) }: _*) new Rec(cols, 0) } def fromRow[K](frame: Frame[_, K])(row: Int): Rec[K] = new Rec(frame.columnsAsSeries, row) def fromCol[K](frame: Frame[K, _])(col: Int): Rec[K] = new Rec(frame.rowsAsSeries, col) implicit def RecRowExtractor[K]: RowExtractor[Rec[K], K, Variable] = new RowExtractor[Rec[K], K, Variable] { type P = Series[K, UntypedColumn] def prepare(cols: Series[K, UntypedColumn], keys: List[K]): Option[P] = { import cols.index.{ order, classTag } Some(Series.fromCells(keys map { k => k -> cols(k) }: _*)) } def extract(row: Int, cols: P): Cell[Rec[K]] = Value(new Rec(cols, row)) } }
codeaudit/framian
framian/src/main/scala/framian/Rec.scala
Scala
apache-2.0
2,823
import javax.inject._ import play.api._ import play.api.http.HttpFilters import play.filters.csrf.CSRFFilter /** * This class configures filters that run on every request. This * class is queried by Play to get a list of filters. * * Play will automatically use filters from any class called * `Filters` that is placed the root package. You can load filters * from a different class by adding a `play.http.filters` setting to * the `application.conf` configuration file. * * @param env Basic environment settings for the current application. * @param csrfFilter A [[CSRFFilter]] filter that adds a header to * each response. */ @Singleton class Filters @Inject() ( env: Environment, csrfFilter: CSRFFilter) extends HttpFilters { override val filters = { Seq(csrfFilter) } }
orendain/trucking-iot
web-application/backend/app/Filters.scala
Scala
apache-2.0
826
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.dllib.nn import com.intel.analytics.bigdl._ import com.intel.analytics.bigdl.dllib.tensor.{Storage, Tensor} import com.intel.analytics.bigdl.dllib.utils.RandomGenerator._ import com.intel.analytics.bigdl.dllib.utils.serializer.ModuleSerializationTest import org.scalatest.{FlatSpec, Matchers} import scala.util.Random class SpatialWithinChannelLRNSpec extends FlatSpec with Matchers{ "forward" should "work" in { val layer = new SpatialWithinChannelLRN[Float](5, 5e-4, 0.75) val input = Tensor(Storage(Array(-0.5629349351, 0.1707911491, -0.6980619431, 0.7621926665, 0.5278353691, -0.3644135892, -0.7721814513, 0.1747673303, -0.0607845485, -0.6372885108, -0.8762480021, -0.1419942826, 0.1868573427, 0.9514402151, -1.3372359276, -2.2525136471, 0.9049832225, -0.5155935884, 1.7007764578, 0.1731968075, -0.8370018601, 1.0927723646, -1.9021866322, 1.7037003040, -0.4785003364, 2.9398944378, 1.0938369036, -1.1450070143, -0.4233447611, -0.0770315304, 0.3048466742, -0.3763152659, -0.4508697689, -1.5802454948, -0.2301389128, -0.0077587459, 1.3115756512, -0.8771016598, 0.2766959071, 0.9347394705, -0.3583759665, -0.0234385580, -0.8723886013, 1.0725305080, -0.5435923934, -1.2200671434, 1.1134344339, -0.3956952393, 1.2290611267, -0.0300420318, -0.6727856994, -1.2889897823, 0.8753286004, 1.6655322313, 0.0144331316, 0.1103931740, -1.3110619783, -0.0822269097, -1.0805166960, -1.7619274855, 0.6417796016, -0.1190131158, 0.2828691006, 0.2091746032, 0.2575667202, 0.0919162408, -0.4426752627, -0.8307037950, -0.4315507710, 0.6092016101, 0.1071894467, 0.3243490458, 0.2481118143, -0.9265315533, -1.1380627155, 0.2642920315, 0.4124689102, 0.0631492808, 0.2114779800, -0.8970818520, 1.3339444399, -1.9507933855, -2.0102806091, -0.4132987857, 1.7795655727, 0.7749545574, 0.1243268624, 0.2292353660, -0.2117905766, -0.7013441324, -0.1906698346, 0.7260479927, -0.1027618498, 0.4159305096, -0.9893618822, 0.9117889404, -0.4076909125, -0.5034028888, 0.2496513128, 1.1489638090, 0.0799294263, -1.1163944006, 1.0516333580, 0.4432829320, 1.4964165688, 0.4551719129, 1.2555737495, -0.7395679355, 0.8859211802, 1.4474883080, -1.8880417347, 0.2351293117, -1.4667420387, -1.3699244261, -1.9512387514, -0.2791400254, 0.1369622797, -0.8618115783, 0.5658946037, -1.0372757912, 0.8631318808, -0.2621137798, 1.1617847681, 0.8746930361, -0.3064057231, -1.6209356785, -1.2230005264, 0.3100630939, 0.8659827113, 0.6965786815, 1.9879816771, -0.4788473845, 0.9765572548, -0.1230701804, 0.0977652520, -0.5547776818, 0.2019653022, 1.4806493521, -0.6643462181, -0.2502589226, -0.3229486048, -0.3250016570, -0.8827708960, -0.3872688115, -1.3991631269, -0.1658344567, 0.9363160133, -1.0897105932, 0.0178568624, 0.6493149996, -0.7701879740, -0.5503393412, 0.2017845362, -1.2033561468, 0.3187648654, -1.8862437010, -1.0454750061, 0.0067651863, -0.6173903942, -1.6129561663, -0.0125843389, -0.0205810498, 0.0797127113, 0.5228791833, 0.0640839785, -0.2918730080, 0.4691511989, -0.4645946920).map(_.toFloat))) .resize(1, 4, 7, 6) val expected = Tensor(Storage(Array(-0.5628995895, 0.1707648933, -0.6979351044, 0.7620602250, 0.5277513266, -0.3643679619, -0.7720909715, 0.1747278422, -0.0607658178, -0.6370970607, -0.8759979010, -0.1419600844, 0.1868072301, 0.9510628581, -1.3365921974, -2.2514543533, 0.9046882987, -0.5154578686, 1.7003303766, 0.1731241792, -0.8365827203, 1.0922356844, -1.9015303850, 1.7032119036, -0.4783609509, 2.9385557175, 1.0932601690, -1.1444340944, -0.4231994748, -0.0770096704, 0.3047703505, -0.3761879206, -0.4506902993, -1.5796643496, -0.2300873548, -0.0077572609, 1.3113185167, -0.8768681288, 0.2766207457, 0.9345140457, -0.3583408892, -0.0234367829, -0.8723114133, 1.0723847151, -0.5434926748, -1.2197744846, 1.1131868362, -0.3956218362, 1.2289429903, -0.0300377030, -0.6726560593, -1.2886765003, 0.8751313090, 1.6652204990, 0.0144315129, 0.1103748828, -1.3107807636, -0.0822052434, -1.0802621841, -1.7615847588, 0.6417075396, -0.1189959049, 0.2828189433, 0.2091255486, 0.2575123906, 0.0919020027, -0.4426211119, -0.8305486441, -0.4314348996, 0.6090135574, 0.1071601808, 0.3242850900, 0.2480878979, -0.9263827205, -1.1378066540, 0.2642342746, 0.4123934209, 0.0631408766, 0.2114592046, -0.8969451189, 1.3336566687, -1.9503731728, -2.0099184513, -0.4132445455, 1.7794363499, 0.7748803496, 0.1243130341, 0.2292126119, -0.2117739469, -0.7012900114, -0.1906458735, 0.7259376645, -0.1027422175, 0.4158595204, -0.9892163873, 0.9116867185, -0.4076002836, -0.5032773018, 0.2495712340, 1.1485998631, 0.0799085051, -1.1162009239, 1.0513975620, 0.4431669712, 1.4959123135, 0.4550207257, 1.2552160025, -0.7394225597, 0.8857015371, 1.4470622540, -1.8873686790, 0.2350427955, -1.4662616253, -1.3696243763, -1.9507690668, -0.2790654302, 0.1369171739, -0.8615318537, 0.5657315850, -1.0370885134, 0.8629699349, -0.2620584667, 1.1614948511, 0.8744715452, -0.3063384593, -1.6206996441, -1.2229285240, 0.3100406528, 0.8658581376, 0.6964817047, 1.9877101183, -0.4787881970, 0.9764578342, -0.1230537966, 0.0977452397, -0.5546795130, 0.2019301802, 1.4804306030, -0.6642692685, -0.2502166629, -0.3228704631, -0.3249176145, -0.8825492859, -0.3871819377, -1.3990192413, -0.1658033431, 0.9361273646, -1.0894701481, 0.0178530309, 0.6491894722, -0.7701167464, -0.5502436757, 0.2017461210, -1.2031224966, 0.3187061250, -1.8859360218, -1.0453878641, 0.0067640827, -0.6172866821, -1.6126719713, -0.0125822360, -0.0205780119, 0.0797094926, 0.5228255987, 0.0640771016, -0.2918325961, 0.4690902829, -0.4645373523).map(_.toFloat))) .resize(1, 4, 7, 6) val out = layer.forward(input) out.map(expected, (a, b) => { assert(Math.abs(a - b) < 1e-6); a }) } "gradient check" should "pass" in { val layer = SpatialWithinChannelLRN[Double](5, 5e-4, 0.75) val seed = 100 RNG.setSeed(seed) val input = Tensor[Double](4, 4, 4, 6).apply1(e => Random.nextDouble()) val checker = new GradientChecker(1e-4) checker.checkLayer[Double](layer, input, 1e-3) should be(true) } } class SpatialWithinChannelLRNSerialTest extends ModuleSerializationTest { override def test(): Unit = { val spatialWithinChannelLRN = new SpatialWithinChannelLRN[Float](5, 5e-4, 0.75). setName("spatialWithinChannelLRN") val input = Tensor[Float](1, 4, 7, 6).apply1( e => Random.nextFloat()) runSerializationTest(spatialWithinChannelLRN, input) } }
intel-analytics/BigDL
scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/SpatialWithinChannelLRNSpec.scala
Scala
apache-2.0
7,495
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.expressions import org.apache.spark.SparkFunSuite import org.apache.spark.sql.Row import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow} import org.apache.spark.sql.catalyst.util.IntervalUtils import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.UTF8String class MutableProjectionSuite extends SparkFunSuite with ExpressionEvalHelper { val fixedLengthTypes = Array[DataType]( BooleanType, ByteType, ShortType, IntegerType, LongType, FloatType, DoubleType, DateType, TimestampType) val variableLengthTypes = Array( StringType, DecimalType.defaultConcreteType, CalendarIntervalType, BinaryType, ArrayType(StringType), MapType(IntegerType, StringType), StructType.fromDDL("a INT, b STRING"), ObjectType(classOf[java.lang.Integer])) def createMutableProjection(dataTypes: Array[DataType]): MutableProjection = { MutableProjection.create(dataTypes.zipWithIndex.map(x => BoundReference(x._2, x._1, true))) } testBothCodegenAndInterpreted("fixed-length types") { val inputRow = InternalRow.fromSeq(Seq(true, 3.toByte, 15.toShort, -83, 129L, 1.0f, 5.0, 1, 2L)) val proj = createMutableProjection(fixedLengthTypes) assert(proj(inputRow) === inputRow) } testBothCodegenAndInterpreted("unsafe buffer") { val inputRow = InternalRow.fromSeq(Seq(false, 1.toByte, 9.toShort, -18, 53L, 3.2f, 7.8, 4, 9L)) val numBytes = UnsafeRow.calculateBitSetWidthInBytes(fixedLengthTypes.length) val unsafeBuffer = UnsafeRow.createFromByteArray(numBytes, fixedLengthTypes.length) val proj = createMutableProjection(fixedLengthTypes) val projUnsafeRow = proj.target(unsafeBuffer)(inputRow) assert(SafeProjection.create(fixedLengthTypes)(projUnsafeRow) === inputRow) } testBothCodegenAndInterpreted("variable-length types") { val proj = createMutableProjection(variableLengthTypes) val scalaValues = Seq("abc", BigDecimal(10), IntervalUtils.stringToInterval(UTF8String.fromString("interval 1 day")), Array[Byte](1, 2), Array("123", "456"), Map(1 -> "a", 2 -> "b"), Row(1, "a"), new java.lang.Integer(5)) val inputRow = InternalRow.fromSeq(scalaValues.zip(variableLengthTypes).map { case (v, dataType) => CatalystTypeConverters.createToCatalystConverter(dataType)(v) }) val projRow = proj(inputRow) variableLengthTypes.zipWithIndex.foreach { case (dataType, index) => val toScala = CatalystTypeConverters.createToScalaConverter(dataType) assert(toScala(projRow.get(index, dataType)) === toScala(inputRow.get(index, dataType))) } } test("unsupported types for unsafe buffer") { withSQLConf(SQLConf.CODEGEN_FACTORY_MODE.key -> CodegenObjectFactoryMode.NO_CODEGEN.toString) { val proj = createMutableProjection(Array(StringType)) val errMsg = intercept[IllegalArgumentException] { proj.target(new UnsafeRow(1)) }.getMessage assert(errMsg.contains("MutableProjection cannot use UnsafeRow for output data types:")) } } }
jkbradley/spark
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MutableProjectionSuite.scala
Scala
apache-2.0
3,906
package mesosphere.marathon import java.io.FileInputStream import com.google.protobuf.ByteString import org.apache.mesos.Protos.{Credential, FrameworkID, FrameworkInfo} import org.apache.mesos.{MesosSchedulerDriver, Scheduler, SchedulerDriver} import FrameworkInfo.Capability import com.typesafe.scalalogging.StrictLogging object MarathonSchedulerDriver extends StrictLogging { def newDriver( config: MarathonConf, httpConfig: HttpConf, newScheduler: Scheduler, frameworkId: Option[FrameworkID]): SchedulerDriver = { logger.info(s"Create new Scheduler Driver with frameworkId: $frameworkId and scheduler $newScheduler") val frameworkInfoBuilder = FrameworkInfo.newBuilder() .setName(config.frameworkName()) .setFailoverTimeout(config.mesosFailoverTimeout().toDouble) .setUser(config.mesosUser()) .setCheckpoint(config.checkpoint()) .setHostname(config.hostname()) // Set the role, if provided. config.mesosRole.foreach(frameworkInfoBuilder.setRole: @silent) // Set the ID, if provided frameworkId.foreach(frameworkInfoBuilder.setId) if (config.webuiUrl.isSupplied) { frameworkInfoBuilder.setWebuiUrl(config.webuiUrl()) } else if (httpConfig.sslKeystorePath.isDefined) { // ssl enabled, use https frameworkInfoBuilder.setWebuiUrl(s"https://${config.hostname()}:${httpConfig.httpsPort()}") } else { // ssl disabled, use http frameworkInfoBuilder.setWebuiUrl(s"http://${config.hostname()}:${httpConfig.httpPort()}") } // set the authentication principal, if provided config.mesosAuthenticationPrincipal.foreach(frameworkInfoBuilder.setPrincipal) val credential: Option[Credential] = { def secretFileContent = config.mesosAuthenticationSecretFile.toOption.map { secretFile => ByteString.readFrom(new FileInputStream(secretFile)).toStringUtf8 } def credentials = config.mesosAuthenticationPrincipal.toOption.map { principal => val credentials = Credential.newBuilder().setPrincipal(principal) //secret is optional config.mesosAuthenticationSecret.toOption.orElse(secretFileContent).foreach(credentials.setSecret) credentials.build() } if (config.mesosAuthentication()) credentials else None } credential.foreach(c => logger.info(s"Authenticate with Mesos as ${c.getPrincipal}")) // Task Killing Behavior enables a dedicated task update (TASK_KILLING) from mesos before a task is killed. // In Marathon this task update is currently ignored. // It makes sense to enable this feature, to support other tools that parse the mesos state, even if // Marathon does not use it in the moment. // Mesos will implement a custom kill behavior, so this state can be used by Marathon as well. if (config.features().contains(Features.TASK_KILLING)) { frameworkInfoBuilder.addCapabilities(Capability.newBuilder().setType(Capability.Type.TASK_KILLING_STATE)) logger.info("TASK_KILLING feature enabled.") } // GPU Resources allows Marathon to get offers from Mesos agents with GPUs. For details, see MESOS-5634. if (config.features().contains(Features.GPU_RESOURCES)) { frameworkInfoBuilder.addCapabilities(Capability.newBuilder().setType(Capability.Type.GPU_RESOURCES)) logger.info("GPU_RESOURCES feature enabled.") } // Enables partition awareness in Mesos to receive TASK_UNREACHABLE status updates when a task is partitioned // instead of a more general TASK_LOST. See also Mesos documentation. // Note: This feature is available since Mesos 1.1 and Marathon 1.4 requires Mesos 1.1 frameworkInfoBuilder.addCapabilities(Capability.newBuilder().setType(Capability.Type.PARTITION_AWARE)) logger.info("PARTITION_AWARE feature enabled.") // Enables region awareness in Mesos to receive offers from other regions frameworkInfoBuilder.addCapabilities(Capability.newBuilder().setType(Capability.Type.REGION_AWARE)) logger.info("REGION_AWARE feature enabled") val frameworkInfo = frameworkInfoBuilder.build() logger.debug("Start creating new driver") val implicitAcknowledgements = false val newDriver: MesosSchedulerDriver = credential match { case Some(cred) => new MesosSchedulerDriver(newScheduler, frameworkInfo, config.mesosMaster().unredactedConnectionString, implicitAcknowledgements, cred) case None => new MesosSchedulerDriver(newScheduler, frameworkInfo, config.mesosMaster().unredactedConnectionString, implicitAcknowledgements) } logger.debug("Finished creating new driver", newDriver) newDriver } }
gsantovena/marathon
src/main/scala/mesosphere/marathon/MarathonSchedulerDriver.scala
Scala
apache-2.0
4,669
package com.p44.models import play.api.libs.json._ import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future case class Fish(name: String, pounds: Double) object Fish { implicit val jsonWriter = Json.writes[Fish] // Json.toJson(obj): JsValue implicit val jsonReader = Json.reads[Fish] // Json.fromJson[T](jsval): JsResult[T] .asOpt Option[T] def toJsArray(objs: List[Fish]): JsArray = JsArray(objs.map(Json.toJson(_)).toSeq) } case class MessageTime(message: String, time: String) object MessageTime { implicit val jsonWriter = Json.writes[MessageTime] // Json.toJson(obj): JsValue implicit val jsonReader = Json.reads[MessageTime] // Json.fromJson[T](jsval): JsResult[T] .asOpt Option[T] def toJsArray(objs: List[MessageTime]): JsArray = JsArray(objs.map(Json.toJson(_)).toSeq) } case class DroppedFish(fish: Fish, exclamation: String) object DroppedFish { implicit val jsonWriter = Json.writes[DroppedFish] // Json.toJson(obj): JsValue implicit val jsonReader = Json.reads[DroppedFish] // Json.fromJson[T](jsval): JsResult[T] .asOpt Option[T] def toJsArray(objs: List[DroppedFish]): JsArray = JsArray(objs.map(Json.toJson(_)).toSeq) } case class DeliveryReceipt(id: Long, fishCount: Int, totalWeight: Double, payment: Double, time: String, message: String) object DeliveryReceipt { implicit val jsonWriter = Json.writes[DeliveryReceipt] // Json.toJson(obj): JsValue implicit val jsonReader = Json.reads[DeliveryReceipt] // Json.fromJson[T](jsval): JsResult[T] .asOpt Option[T] def toJsArray(objs: List[DeliveryReceipt]): JsArray = JsArray(objs.map(Json.toJson(_)).toSeq) } /** A whale sighting comment or other general comment */ case class Comment(comment: String) object Comment { implicit val jsonWriter = Json.writes[Comment] // Json.toJson(obj): JsValue implicit val jsonReader = Json.reads[Comment] // Json.fromJson[T](jsval): JsResult[T] .asOpt Option[T] def toJsArray(objs: List[Comment]): JsArray = JsArray(objs.map(Json.toJson(_)).toSeq) } import com.typesafe.config.ConfigFactory object FishStoreModels { val CONF = ConfigFactory.load val FISHSTORE_TWO_DB_NAME = CONF.getString("store.two.mongodb.database").trim val FISHSTORE_TWO_DB_HOSTS_AS_STRING = CONF.getString("store.two.mongodb.hosts").trim val FISHSTORE_TWO_DB_HOSTS: List[String] = List(FISHSTORE_TWO_DB_HOSTS_AS_STRING) val JSON_ARRAY_EMPTY = "[]" /** (name, min lbs, max lbs) */ val possibleFish: Seq[(String, Int, Int)] = Seq(("sea bass", 5, 10), ("blue-green snapper", 4, 18), ("pink snapper", 2, 18), ("red snapper", 2, 18), ("wahoo", 8, 30), ("skipjack tuna", 4, 30)) def getRandomFish: Fish = { val pf: (String, Int, Int) = getRandomPossibleFish getFishFromPossibleFish(pf) } def getRandomPossibleFish: (String, Int, Int) = { possibleFish(scala.util.Random.nextInt(possibleFish.size)) } def getFishFromPossibleFish(pf: (String, Int, Int)): Fish = { Fish(pf._1, getFishWeight(pf._2, pf._3)) } def getFishWeight(min: Int, max: Int): Double = { val lbs: Double = (min + scala.util.Random.nextInt(max - min + 1)).toDouble val frac: Double = (scala.util.Random.nextInt(10)).toDouble lbs + frac / 10.0 } def aBunchOfFishToJson(futureShipment: Future[List[Fish]]): Future[String] = { futureShipment.map { shipment => Json.prettyPrint(Fish.toJsArray(shipment)) } } /** Generates a list of fish of size specified by count, returns as future */ def generateFish(count: Int): Future[List[Fish]] = { Future { generateFishImpl(count) } } /** Generates a list of fish of size specified by count */ def generateFishImpl(count: Int): List[Fish] = { val fish = for (i <- 0 until count) yield { getRandomFish } fish.toList } import org.joda.time.DateTime import org.joda.time.format._ val DATE_FORMATTER_USA: DateTimeFormatter = DateTimeFormat.forPattern("MM/dd/yyyy HH:mm:ss") def makeMessageTimeJson(msg: String, ts: Long): Future[String] = { Future { makeMessageTimeJsonImpl(msg, ts) } } def makeMessageTimeJsonImpl(msg: String, ts: Long): String = { val mt = MessageTime(msg, formatTimstampMillis(ts, DATE_FORMATTER_USA)) Json.prettyPrint(Json.toJson(mt)) } def formatTimstampMillis(ts: Long, formatter: DateTimeFormatter): String = { formatter.print(new DateTime(ts)) } }
p44/FishStore
app/com/p44/models/FishStoreModels.scala
Scala
apache-2.0
4,365
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.mllib.tree.model import scala.collection.mutable import org.json4s._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ import org.apache.spark.{Logging, SparkContext} import org.apache.spark.annotation.{Experimental, Since} import org.apache.spark.api.java.JavaRDD import org.apache.spark.mllib.linalg.Vector import org.apache.spark.mllib.tree.configuration.{Algo, FeatureType} import org.apache.spark.mllib.tree.configuration.Algo._ import org.apache.spark.mllib.util.{Loader, Saveable} import org.apache.spark.rdd.RDD import org.apache.spark.sql.{DataFrame, Row, SQLContext} import org.apache.spark.util.Utils /** * :: Experimental :: * Decision tree model for classification or regression. * This model stores the decision tree structure and parameters. * @param topNode root node * @param algo algorithm type -- classification or regression */ @Since("1.0.0") @Experimental class LambdaDecisionTreeModel @Since("1.0.0") ( @Since("1.0.0") val topNode: MNode, @Since("1.0.0") val algo: Algo) extends Serializable with Saveable { /** * Predict values for a single data point using the model trained. * * @param features array representing a single data point * @return Double prediction from the trained model */ @Since("1.0.0") def predict(features: Vector): Double = { topNode.predict(features) } /** * Predict values for the given data set using the model trained. * * @param features RDD representing data points to be predicted * @return RDD of predictions for each of the given data points */ @Since("1.0.0") def predict(features: RDD[Vector]): RDD[Double] = { features.map(x => predict(x)) } /** * Predict values for the given data set using the model trained. * * @param features JavaRDD representing data points to be predicted * @return JavaRDD of predictions for each of the given data points */ @Since("1.2.0") def predict(features: JavaRDD[Vector]): JavaRDD[Double] = { predict(features.rdd) } /** * Get number of nodes in tree, including leaf nodes. */ @Since("1.1.0") def numNodes: Int = { 1 + topNode.numDescendants } /** * Get depth of tree. * E.g.: Depth 0 means 1 leaf node. Depth 1 means 1 internal node and 2 leaf nodes. */ @Since("1.1.0") def depth: Int = { topNode.subtreeDepth } /** * Print a summary of the model. */ override def toString: String = algo match { case Classification => s"LambdaDecisionTreeModel classifier of depth $depth with $numNodes nodes" case Regression => s"LambdaDecisionTreeModel regressor of depth $depth with $numNodes nodes" case _ => throw new IllegalArgumentException( s"LambdaDecisionTreeModel given unknown algo parameter: $algo.") } /** * Print the full model to a string. */ @Since("1.2.0") def toDebugString: String = { val header = toString + "\\n" header + topNode.subtreeToString(2) } /** * @param sc Spark context used to save model data. * @param path Path specifying the directory in which to save this model. * If the directory already exists, this method throws an exception. */ @Since("1.3.0") override def save(sc: SparkContext, path: String): Unit = { LambdaDecisionTreeModel.SaveLoadV1_0.save(sc, path, this) } override protected def formatVersion: String = LambdaDecisionTreeModel.formatVersion } @Since("1.3.0") object LambdaDecisionTreeModel extends Loader[LambdaDecisionTreeModel] with Logging { private[spark] def formatVersion: String = "1.0" private[tree] object SaveLoadV1_0 { def thisFormatVersion: String = "1.0" // Hard-code class name string in case it changes in the future def thisClassName: String = "org.apache.spark.mllib.tree.LambdaDecisionTreeModel" case class PredictData(predict: Double, prob: Double) { def toPredict: Predict = new Predict(predict, prob) } object PredictData { def apply(p: Predict): PredictData = PredictData(p.predict, p.prob) def apply(r: Row): PredictData = PredictData(r.getDouble(0), r.getDouble(1)) } case class SplitData( feature: Int, threshold: Double, featureType: Int, categories: Seq[Double]) { // TODO: Change to List once SPARK-3365 is fixed def toSplit: Split = { new Split(feature, threshold, FeatureType(featureType), categories.toList) } } object SplitData { def apply(s: Split): SplitData = { SplitData(s.feature, s.threshold, s.featureType.id, s.categories) } def apply(r: Row): SplitData = { SplitData(r.getInt(0), r.getDouble(1), r.getInt(2), r.getAs[Seq[Double]](3)) } } /** Model data for model import/export */ case class NodeData( treeId: Int, nodeId: Int, predict: PredictData, impurity: Double, isLeaf: Boolean, split: Option[SplitData], leftNodeId: Option[Int], rightNodeId: Option[Int], infoGain: Option[Double]) object NodeData { def apply(treeId: Int, n: MNode): NodeData = { NodeData(treeId, n.id, PredictData(n.predict), n.impurity, n.isLeaf, n.split.map(SplitData.apply), n.leftNode.map(_.id), n.rightNode.map(_.id), n.stats.map(_.gain)) } def apply(r: Row): NodeData = { val split = if (r.isNullAt(5)) None else Some(SplitData(r.getStruct(5))) val leftNodeId = if (r.isNullAt(6)) None else Some(r.getInt(6)) val rightNodeId = if (r.isNullAt(7)) None else Some(r.getInt(7)) val infoGain = if (r.isNullAt(8)) None else Some(r.getDouble(8)) NodeData(r.getInt(0), r.getInt(1), PredictData(r.getStruct(2)), r.getDouble(3), r.getBoolean(4), split, leftNodeId, rightNodeId, infoGain) } } def save(sc: SparkContext, path: String, model: LambdaDecisionTreeModel): Unit = { val sqlContext = new SQLContext(sc) import sqlContext.implicits._ // SPARK-6120: We do a hacky check here so users understand why save() is failing // when they run the ML guide example. // TODO: Fix this issue for real. val memThreshold = 768 if (sc.isLocal) { val driverMemory = sc.getConf.getOption("spark.driver.memory") .orElse(Option(System.getenv("SPARK_DRIVER_MEMORY"))) .map(Utils.memoryStringToMb) .getOrElse(Utils.DEFAULT_DRIVER_MEM_MB) if (driverMemory <= memThreshold) { logWarning(s"$thisClassName.save() was called, but it may fail because of too little" + s" driver memory (${driverMemory}m)." + s" If failure occurs, try setting driver-memory ${memThreshold}m (or larger).") } } else { if (sc.executorMemory <= memThreshold) { logWarning(s"$thisClassName.save() was called, but it may fail because of too little" + s" executor memory (${sc.executorMemory}m)." + s" If failure occurs try setting executor-memory ${memThreshold}m (or larger).") } } // Create JSON metadata. val metadata = compact(render( ("class" -> thisClassName) ~ ("version" -> thisFormatVersion) ~ ("algo" -> model.algo.toString) ~ ("numNodes" -> model.numNodes))) sc.parallelize(Seq(metadata), 1).saveAsTextFile(Loader.metadataPath(path)) // Create Parquet data. val nodes = model.topNode.subtreeIterator.toSeq val dataRDD: DataFrame = sc.parallelize(nodes) .map(NodeData.apply(0, _)) .toDF() dataRDD.write.parquet(Loader.dataPath(path)) } def load(sc: SparkContext, path: String, algo: String, numNodes: Int): LambdaDecisionTreeModel = { val datapath = Loader.dataPath(path) val sqlContext = new SQLContext(sc) // Load Parquet data. val dataRDD = sqlContext.read.parquet(datapath) // Check schema explicitly since erasure makes it hard to use match-case for checking. Loader.checkSchema[NodeData](dataRDD.schema) val nodes = dataRDD.map(NodeData.apply) // Build node data into a tree. val trees = constructTrees(nodes) assert(trees.size == 1, "Decision tree should contain exactly one tree but got ${trees.size} trees.") val model = new LambdaDecisionTreeModel(trees(0), Algo.fromString(algo)) assert(model.numNodes == numNodes, s"Unable to load LambdaDecisionTreeModel data from: $datapath." + s" Expected $numNodes nodes but found ${model.numNodes}") model } def constructTrees(nodes: RDD[NodeData]): Array[MNode] = { val trees = nodes .groupBy(_.treeId) .mapValues(_.toArray) .collect() .map { case (treeId, data) => (treeId, constructTree(data)) }.sortBy(_._1) val numTrees = trees.size val treeIndices = trees.map(_._1).toSeq assert(treeIndices == (0 until numTrees), s"Tree indices must start from 0 and increment by 1, but we found $treeIndices.") trees.map(_._2) } /** * Given a list of nodes from a tree, construct the tree. * @param data array of all node data in a tree. */ def constructTree(data: Array[NodeData]): MNode = { val dataMap: Map[Int, NodeData] = data.map(n => n.nodeId -> n).toMap assert(dataMap.contains(1), s"LambdaDecisionTree missing root node (id = 1).") constructNode(1, dataMap, mutable.Map.empty) } /** * Builds a node from the node data map and adds new nodes to the input nodes map. */ private def constructNode( id: Int, dataMap: Map[Int, NodeData], nodes: mutable.Map[Int, MNode]): MNode = { if (nodes.contains(id)) { return nodes(id) } val data = dataMap(id) val node = if (data.isLeaf) { MNode(data.nodeId, data.predict.toPredict, data.impurity, data.isLeaf) } else { val leftNode = constructNode(data.leftNodeId.get, dataMap, nodes) val rightNode = constructNode(data.rightNodeId.get, dataMap, nodes) val stats = new InformationGainStats(data.infoGain.get, data.impurity, leftNode.impurity, rightNode.impurity, leftNode.predict, rightNode.predict) new MNode(data.nodeId, data.predict.toPredict, data.impurity, data.isLeaf, data.split.map(_.toSplit), Some(leftNode), Some(rightNode), Some(stats)) } nodes += node.id -> node node } } /** * * @param sc Spark context used for loading model files. * @param path Path specifying the directory to which the model was saved. * @return Model instance */ @Since("1.3.0") override def load(sc: SparkContext, path: String): LambdaDecisionTreeModel = { implicit val formats = DefaultFormats val (loadedClassName, version, metadata) = Loader.loadMetadata(sc, path) val algo = (metadata \\ "algo").extract[String] val numNodes = (metadata \\ "numNodes").extract[Int] val classNameV1_0 = SaveLoadV1_0.thisClassName (loadedClassName, version) match { case (className, "1.0") if className == classNameV1_0 => SaveLoadV1_0.load(sc, path, algo, numNodes) case _ => throw new Exception( s"LambdaDecisionTreeModel.load did not recognize model with (className, format version):" + s"($loadedClassName, $version). Supported:\\n" + s" ($classNameV1_0, 1.0)") } } }
hu17889/mllib_subpackage
src/main/scala/malgo/tree/model/LambdaDecisionTreeModel.scala
Scala
apache-2.0
12,322
package org.scalaide.debug.internal.breakpoints import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.atomic.AtomicReference import scala.collection.JavaConverters.mapAsScalaConcurrentMapConverter import scala.collection.Seq import scala.collection.concurrent import scala.concurrent.ExecutionContext import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.concurrent.Promise import org.eclipse.core.resources.IMarkerDelta import org.eclipse.debug.core.DebugPlugin import org.eclipse.debug.core.IBreakpointListener import org.eclipse.debug.core.model.IBreakpoint import org.eclipse.jdt.internal.debug.core.breakpoints.JavaLineBreakpoint import org.scalaide.debug.internal.model.ScalaDebugTarget object ScalaDebugBreakpointManager { /** * A debug message used to know if the event request associated to the passed `breakpoint` is enabled. * @note Use this for test purposes only! */ case class GetBreakpointRequestState(breakpoint: IBreakpoint) def apply(debugTarget: ScalaDebugTarget): ScalaDebugBreakpointManager = { import scala.concurrent.ExecutionContext.Implicits.global val subordinate = new ScalaDebugBreakpointSubordinate(debugTarget) new ScalaDebugBreakpointManager(subordinate) } } /** * Setup the initial breakpoints, and listen to breakpoint changes, for the given ScalaDebugTarget. * * @note All breakpoint-event related methods in this class are asynchronous, by delegating to the subordinate. */ class ScalaDebugBreakpointManager private ( /*public field only for testing purposes */ val subordinate: ScalaDebugBreakpointSubordinate) extends IBreakpointListener { /** * Used to wait until all required `Future` messages have been processed. * * @note Use this for test purposes only! */ private val waitForAllCurrentFutures: AtomicReference[Future[Unit]] = new AtomicReference(Future.successful {}) private def entangleFutures[T](b: => Future[T])(implicit ec: ExecutionContext): Unit = { val p = Promise[Unit] b.onComplete { t => p.success {}; } waitForAllCurrentFutures.getAndSet(waitForAllCurrentFutures.get.flatMap { _ => p.future }) } override def breakpointChanged(breakpoint: IBreakpoint, delta: IMarkerDelta): Unit = entangleFutures { subordinate.breakpointChanged(breakpoint, delta) } override def breakpointRemoved(breakpoint: IBreakpoint, delta: IMarkerDelta): Unit = entangleFutures { subordinate.breakpointRemoved(breakpoint) } override def breakpointAdded(breakpoint: IBreakpoint): Unit = entangleFutures { subordinate.breakpointAdded(breakpoint) } /** * Intended to ensure that we'll hit already defined and enabled breakpoints after performing hcr. * * @param changedClassesNames fully qualified names of types */ def reenableBreakpointsInClasses(changedClassesNames: Seq[String]): Future[Unit] = { subordinate.reenableBreakpointsAfterHcr(changedClassesNames) } // ------------ def init(): Unit = { subordinate.initialize() DebugPlugin.getDefault.getBreakpointManager.addBreakpointListener(this) } def dispose(): Unit = { DebugPlugin.getDefault.getBreakpointManager.removeBreakpointListener(this) waitForAllCurrentFutures.getAndSet(Future.successful {}) subordinate.exit() } /** * Wait for all `Future`s to be processed. * * @note Use this for test purposes only! */ protected[debug] def waitForAllCurrentEvents(): Unit = { while (!waitForAllCurrentFutures.get.isCompleted) {} } /** * Check if the event request associated to the passed `breakpoint` is enabled/disabled. * * @return None if the `breakpoint` isn't registered. Otherwise, the enabled state of the associated request is returned, wrapped in a `Some`. * @note Use this for test purposes only! */ protected[debug] def getBreakpointRequestState(breakpoint: IBreakpoint): Option[Boolean] = subordinate.breakpointRequestState(breakpoint) } private[debug] class ScalaDebugBreakpointSubordinate(debugTarget: ScalaDebugTarget)(implicit ec: ExecutionContext) { private final val JdtDebugUID = "org.eclipse.jdt.debug" import scala.collection._ private val breakpoints: concurrent.Map[IBreakpoint, BreakpointSupportSubordinate] = { import scala.collection.JavaConverters._ new ConcurrentHashMap[IBreakpoint, BreakpointSupportSubordinate].asScala } def breakpointChanged(breakpoint: IBreakpoint, delta: IMarkerDelta): Future[Unit] = breakpoints.get(breakpoint).map { breakpointSupport => breakpointSupport.changed(delta) }.getOrElse(Future.successful {}) def breakpointRemoved(breakpoint: IBreakpoint): Future[Unit] = Future { breakpoints.get(breakpoint).map { breakpointSupport => breakpointSupport.exit() breakpoints -= breakpoint } } /** * There might be a situation when breakpoint is not found in map. This is only possible if the message was sent * between when the InitializeExistingBreakpoints message was sent and when the list of the current breakpoint * was fetched. Nothing to do, everything is already in the right state. */ def breakpointAdded(breakpoint: IBreakpoint): Future[Unit] = Future { breakpoints.putIfAbsent(breakpoint, BreakpointSupport(breakpoint, debugTarget)) } def initialize(): Unit = { def createBreakpointSupport(breakpoint: IBreakpoint): Unit = { breakpoints += (breakpoint -> BreakpointSupport(breakpoint, debugTarget)) } DebugPlugin.getDefault.getBreakpointManager.getBreakpoints(JdtDebugUID).foreach(createBreakpointSupport) } private[debug] def breakpointRequestState(breakpoint: IBreakpoint): Option[Boolean] = { breakpoints.get(breakpoint).flatMap { breakpointSupport => Some(breakpointSupport.breakpointRequestState()) } } def reenableBreakpointsAfterHcr(changedClassesNames: Seq[String]): Future[Unit] = Future { /* * We need to prepare names of changed classes and these taken from breakpoints because * for some reasons they differ. We need to change them slightly as: * * Type names used in breakpoints have double intermediate dollars, * e.g. debug.Foo$$x$$Bar instead of debug.Foo$x$Bar, debug.Foo$$x$ instead of debug.Foo$x$. * * There are also anonymous types which really should have double dollars but anyway * breakpoints for such types have currently set type like * com.test.debug.Foo$$x$$Bar$java.lang.Object$java.lang.Object * instead of * debug.Foo$x$Bar$$anon$2$$anon$1 */ val anonTypePattern = """\\$anon\\$[1-9][0-9]*""" val namesToCompareWithOnesFromBreakpoints = changedClassesNames.map(_.replaceAll(anonTypePattern, "java.lang.Object")) def isChanged(typeName: String): Boolean = namesToCompareWithOnesFromBreakpoints.contains(typeName.replace("$$", "$")) val affectedBreakpoints = breakpoints.keys.collect { case bp: JavaLineBreakpoint if isChanged(bp.getTypeName) => bp } affectedBreakpoints.foreach { breakpoint => breakpoints(breakpoint).reenableBreakpointRequestsAfterHcr() } } def exit(): Future[Unit] = Future { breakpoints.values.foreach(_.exit()) breakpoints.clear() } }
stephenh/scala-ide
org.scala-ide.sdt.debug/src/org/scalaide/debug/internal/breakpoints/ScalaDebugBreakpointManager.scala
Scala
bsd-3-clause
7,248
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.server import java.net.InetAddress import java.util.Locale import java.util.concurrent.TimeUnit import kafka.api.ApiVersion import kafka.cluster.EndPoint import kafka.metrics.KafkaMetricsGroup import kafka.utils._ import org.I0Itec.zkclient.IZkStateListener import org.apache.kafka.common.protocol.SecurityProtocol import org.apache.zookeeper.Watcher.Event.KeeperState /** * This class registers the broker in zookeeper to allow * other brokers and consumers to detect failures. It uses an ephemeral znode with the path: * /brokers/ids/[0...N] --> advertisedHost:advertisedPort * * Right now our definition of health is fairly naive. If we register in zk we are healthy, otherwise * we are dead. */ class KafkaHealthcheck(brokerId: Int, advertisedEndpoints: Seq[EndPoint], zkUtils: ZkUtils, rack: Option[String], interBrokerProtocolVersion: ApiVersion) extends Logging { private[server] val sessionExpireListener = new SessionExpireListener def startup() { zkUtils.subscribeStateChanges(sessionExpireListener) register() } /** * Register this broker as "alive" in zookeeper */ def register() { val jmxPort = System.getProperty("com.sun.management.jmxremote.port", "-1").toInt val updatedEndpoints = advertisedEndpoints.map(endpoint => if (endpoint.host == null || endpoint.host.trim.isEmpty) endpoint.copy(host = InetAddress.getLocalHost.getCanonicalHostName) else endpoint ) // the default host and port are here for compatibility with older clients that only support PLAINTEXT // we choose the first plaintext port, if there is one // or we register an empty endpoint, which means that older clients will not be able to connect val plaintextEndpoint = updatedEndpoints.find(_.securityProtocol == SecurityProtocol.PLAINTEXT).getOrElse( new EndPoint(null, -1, null, null)) zkUtils.registerBrokerInZk(brokerId, plaintextEndpoint.host, plaintextEndpoint.port, updatedEndpoints, jmxPort, rack, interBrokerProtocolVersion) } /** * When we get a SessionExpired event, it means that we have lost all ephemeral nodes and ZKClient has re-established * a connection for us. We need to re-register this broker in the broker registry. We rely on `handleStateChanged` * to record ZooKeeper connection state metrics. */ class SessionExpireListener extends IZkStateListener with KafkaMetricsGroup { private[server] val stateToMeterMap = { import KeeperState._ val stateToEventTypeMap = Map( Disconnected -> "Disconnects", SyncConnected -> "SyncConnects", AuthFailed -> "AuthFailures", ConnectedReadOnly -> "ReadOnlyConnects", SaslAuthenticated -> "SaslAuthentications", Expired -> "Expires" ) stateToEventTypeMap.map { case (state, eventType) => state -> newMeter(s"ZooKeeper${eventType}PerSec", eventType.toLowerCase(Locale.ROOT), TimeUnit.SECONDS) } } @throws[Exception] override def handleStateChanged(state: KeeperState) { stateToMeterMap.get(state).foreach(_.mark()) } @throws[Exception] override def handleNewSession() { info("re-registering broker info in ZK for broker " + brokerId) register() info("done re-registering broker") info("Subscribing to %s path to watch for new topics".format(ZkUtils.BrokerTopicsPath)) } override def handleSessionEstablishmentError(error: Throwable) { fatal("Could not establish session with zookeeper", error) } } }
ErikKringen/kafka
core/src/main/scala/kafka/server/KafkaHealthcheck.scala
Scala
apache-2.0
4,460
package integrationtest import _root_.controller.Controllers import service._ import org.scalatra.test.scalatest._ import skinny._ import skinny.test.SkinnyTestSupport class MustacheControllerSpec extends ScalatraFlatSpec with SkinnyTestSupport { addFilter(Controllers.mustache, "/*") it should "show top page" in { get("/mustache?echo=abcdEFG") { // Fails on Travis CI if (status == 500) println(body) else { status should equal(200) body should include("abcdEFG") } } } }
BlackPrincess/skinny-framework
example/src/test/scala/integrationtest/MustacheControllerSpec.scala
Scala
mit
533
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package vta.core import chisel3._ import chisel3.util._ import chisel3.experimental._ import vta.util.config._ import scala.math.pow /** Pipelined multiply and accumulate */ class MAC(dataBits: Int = 8, cBits: Int = 16, outBits: Int = 17) extends Module { require (cBits >= dataBits * 2) require (outBits >= dataBits * 2) val io = IO(new Bundle { val a = Input(SInt(dataBits.W)) val b = Input(SInt(dataBits.W)) val c = Input(SInt(cBits.W)) val y = Output(SInt(outBits.W)) }) val mult = Wire(SInt(cBits.W)) val add = Wire(SInt(outBits.W)) val rA = RegNext(io.a) val rB = RegNext(io.b) val rC = RegNext(io.c) mult := rA * rB add := rC + mult io.y := add } /** Pipelined adder */ class Adder(dataBits: Int = 8, outBits: Int = 17) extends Module { require (outBits >= dataBits) val io = IO(new Bundle { val a = Input(SInt(dataBits.W)) val b = Input(SInt(dataBits.W)) val y = Output(SInt(outBits.W)) }) val add = Wire(SInt(outBits.W)) val rA = RegNext(io.a) val rB = RegNext(io.b) add := rA + rB io.y := add } /** Pipelined DotProduct based on MAC and Adder */ class DotProduct(dataBits: Int = 8, size: Int = 16) extends Module { val errMsg = s"\n\n[VTA] [DotProduct] size must be greater than 4 and a power of 2\n\n" require(size >= 4 && isPow2(size), errMsg) val b = dataBits * 2 val outBits = b + log2Ceil(size) + 1 val io = IO(new Bundle { val a = Input(Vec(size, SInt(dataBits.W))) val b = Input(Vec(size, SInt(dataBits.W))) val y = Output(SInt(outBits.W)) }) val p = log2Ceil(size/2) val s = Seq.tabulate(log2Ceil(size))(i => pow(2, p - i).toInt) val da = Seq.tabulate(s(0))(i => RegNext(io.a(s(0) + i))) val db = Seq.tabulate(s(0))(i => RegNext(io.b(s(0) + i))) val m = Seq.tabulate(2)(i => Seq.fill(s(0))(Module(new MAC(dataBits = dataBits, cBits = b + i, outBits = b + i + 1))) ) val a = Seq.tabulate(p)(i => Seq.fill(s(i + 1))(Module(new Adder(dataBits = b + i + 2, outBits = b + i + 3))) ) for (i <- 0 until log2Ceil(size)) { for (j <- 0 until s(i)) { if (i == 0) { m(i)(j).io.a := io.a(j) m(i)(j).io.b := io.b(j) m(i)(j).io.c := 0.S m(i + 1)(j).io.a := da(j) m(i + 1)(j).io.b := db(j) m(i + 1)(j).io.c := m(i)(j).io.y } else if (i == 1) { a(i - 1)(j).io.a := m(i)(2*j).io.y a(i - 1)(j).io.b := m(i)(2*j + 1).io.y } else { a(i - 1)(j).io.a := a(i - 2)(2*j).io.y a(i - 1)(j).io.b := a(i - 2)(2*j + 1).io.y } } } io.y := a(p-1)(0).io.y } /** Perform matric-vector-multiplication based on DotProduct */ class MatrixVectorCore(implicit p: Parameters) extends Module { val accBits = p(CoreKey).accBits val size = p(CoreKey).blockOut val dataBits = p(CoreKey).inpBits val io = IO(new Bundle{ val reset = Input(Bool()) // FIXME: reset should be replaced by a load-acc instr val inp = new TensorMasterData(tensorType = "inp") val wgt = new TensorMasterData(tensorType = "wgt") val acc_i = new TensorMasterData(tensorType = "acc") val acc_o = new TensorClientData(tensorType = "acc") val out = new TensorClientData(tensorType = "out") }) val dot = Seq.fill(size)(Module(new DotProduct(dataBits, size))) val acc = Seq.fill(size)(Module(new Pipe(UInt(accBits.W), latency = log2Ceil(size) + 1))) val add = Seq.fill(size)(Wire(SInt(accBits.W))) val vld = Wire(Vec(size, Bool())) for (i <- 0 until size) { acc(i).io.enq.valid := io.inp.data.valid & io.wgt.data.valid & io.acc_i.data.valid & ~io.reset acc(i).io.enq.bits := io.acc_i.data.bits(0)(i) for (j <- 0 until size) { dot(i).io.a(j) := io.inp.data.bits(0)(j).asSInt dot(i).io.b(j) := io.wgt.data.bits(i)(j).asSInt } add(i) := acc(i).io.deq.bits.asSInt + dot(i).io.y io.acc_o.data.bits(0)(i) := Mux(io.reset, 0.U, add(i).asUInt) io.out.data.bits(0)(i) := add(i).asUInt vld(i) := acc(i).io.deq.valid } io.acc_o.data.valid := vld.asUInt.andR | io.reset io.out.data.valid := vld.asUInt.andR } /** TensorGemm. * * This unit instantiate the MatrixVectorCore and go over the * micro-ops (uops) which are used to read inputs, weights and biases, * and writes results back to the acc and out scratchpads. * * Also, the TensorGemm uses the reset field in the Gemm instruction to * clear or zero-out the acc-scratchpad locations based on the micro-ops. */ class TensorGemm(debug: Boolean = false)(implicit p: Parameters) extends Module { val io = IO(new Bundle { val start = Input(Bool()) val done = Output(Bool()) val inst = Input(UInt(INST_BITS.W)) val uop = new UopMaster val inp = new TensorMaster(tensorType = "inp") val wgt = new TensorMaster(tensorType = "wgt") val acc = new TensorMaster(tensorType = "acc") val out = new TensorMaster(tensorType = "out") }) val sIdle :: sReadUop :: sComputeIdx :: sReadTensor :: sExe :: sWait :: Nil = Enum(6) val state = RegInit(sIdle) val mvc = Module(new MatrixVectorCore) val dec = io.inst.asTypeOf(new GemmDecode) val uop_idx = Reg(chiselTypeOf(dec.uop_end)) val uop_end = dec.uop_end val uop_acc = Reg(chiselTypeOf(dec.uop_end)) val uop_inp = Reg(chiselTypeOf(dec.uop_end)) val uop_wgt = Reg(chiselTypeOf(dec.uop_end)) val cnt_o = Reg(chiselTypeOf(dec.lp_0)) val acc_o = Reg(chiselTypeOf(dec.uop_end)) val inp_o = Reg(chiselTypeOf(dec.uop_end)) val wgt_o = Reg(chiselTypeOf(dec.uop_end)) val cnt_i = Reg(chiselTypeOf(dec.lp_1)) val acc_i = Reg(chiselTypeOf(dec.uop_end)) val inp_i = Reg(chiselTypeOf(dec.uop_end)) val wgt_i = Reg(chiselTypeOf(dec.uop_end)) val pBits = log2Ceil(p(CoreKey).blockOut) + 1 val inflight = Reg(UInt(pBits.W)) val wrpipe = Module(new Pipe(chiselTypeOf(dec.uop_end), latency = pBits)) val done = inflight === 0.U & ((state === sExe & cnt_o === dec.lp_0 - 1.U & cnt_i === dec.lp_1 - 1.U & uop_idx === uop_end - 1.U & inflight === 0.U) | state === sWait) switch (state) { is (sIdle) { when (io.start) { state := sReadUop } } is (sReadUop) { state := sComputeIdx } is (sComputeIdx) { state := sReadTensor } is (sReadTensor) { state := sExe } is (sExe) { when ((cnt_o === dec.lp_0 - 1.U) && (cnt_i === dec.lp_1 - 1.U) && (uop_idx === uop_end - 1.U)) { when (inflight =/= 0.U) { state := sWait } .otherwise { state := sIdle } } .otherwise { state := sReadUop } } is (sWait) { when (inflight === 0.U) { state := sIdle } } } when (state === sIdle) { inflight := 0.U } .elsewhen (!dec.reset) { when (state === sExe && inflight =/= ((1 << pBits) - 1).asUInt) { // overflow check inflight := inflight + 1.U } .elsewhen (mvc.io.acc_o.data.valid && inflight =/= 0.U) { // underflow check inflight := inflight - 1.U } } when (state === sIdle || (state === sExe && uop_idx === uop_end - 1.U)) { uop_idx := dec.uop_begin } .elsewhen (state === sExe) { uop_idx := uop_idx + 1.U } when (state === sIdle) { cnt_o := 0.U acc_o := 0.U inp_o := 0.U wgt_o := 0.U } .elsewhen (state === sExe && uop_idx === uop_end - 1.U && cnt_i === dec.lp_1 - 1.U) { cnt_o := cnt_o + 1.U acc_o := acc_o + dec.acc_0 inp_o := inp_o + dec.inp_0 wgt_o := wgt_o + dec.wgt_0 } when (state === sIdle) { cnt_i := 0.U acc_i := 0.U inp_i := 0.U wgt_i := 0.U } .elsewhen (state === sReadUop && cnt_i === dec.lp_1) { cnt_i := 0.U acc_i := acc_o inp_i := inp_o wgt_i := wgt_o } .elsewhen (state === sExe && uop_idx === uop_end - 1.U) { cnt_i := cnt_i + 1.U acc_i := acc_i + dec.acc_1 inp_i := inp_i + dec.inp_1 wgt_i := wgt_i + dec.wgt_1 } when (state === sComputeIdx && io.uop.data.valid) { uop_acc := io.uop.data.bits.u0 + acc_i uop_inp := io.uop.data.bits.u1 + inp_i uop_wgt := io.uop.data.bits.u2 + wgt_i } wrpipe.io.enq.valid := state === sExe & ~dec.reset wrpipe.io.enq.bits := uop_acc // uop io.uop.idx.valid := state === sReadUop io.uop.idx.bits := uop_idx // inp io.inp.rd.idx.valid := state === sReadTensor io.inp.rd.idx.bits := uop_inp io.inp.tieoffWrite() // read-only // wgt io.wgt.rd.idx.valid := state === sReadTensor io.wgt.rd.idx.bits := uop_wgt io.wgt.tieoffWrite() // read-only // acc_i io.acc.rd.idx.valid := state === sReadTensor io.acc.rd.idx.bits := uop_acc // mvc mvc.io.reset := dec.reset & state === sExe mvc.io.inp.data <> io.inp.rd.data mvc.io.wgt.data <> io.wgt.rd.data mvc.io.acc_i.data <> io.acc.rd.data // acc_o io.acc.wr.valid := mvc.io.acc_o.data.valid & Mux(dec.reset, true.B, wrpipe.io.deq.valid) io.acc.wr.bits.idx := Mux(dec.reset, uop_acc, wrpipe.io.deq.bits) io.acc.wr.bits.data <> mvc.io.acc_o.data.bits // out io.out.wr.valid := mvc.io.out.data.valid & wrpipe.io.deq.valid io.out.wr.bits.idx := wrpipe.io.deq.bits io.out.wr.bits.data <> mvc.io.out.data.bits io.out.tieoffRead() // write-only io.done := done if (debug) { when (state === sReadUop && ~dec.reset) { printf("[TensorGemm] [uop] idx:%x\n", uop_idx) } when (state === sReadTensor && ~dec.reset) { printf("[TensorGemm] [uop] acc:%x inp:%x wgt:%x\n", uop_acc, uop_inp, uop_wgt) } io.inp.rd.data.bits.zipWithIndex.foreach { case(r, i) => when (io.inp.rd.data.valid && ~dec.reset) { printf("[TensorGemm] [inp] i:%x val:%x\n", i.U, r.asUInt) } } io.wgt.rd.data.bits.zipWithIndex.foreach { case(r, i) => when (io.wgt.rd.data.valid && ~dec.reset) { printf("[TensorGemm] [wgt] i:%x val:%x\n", i.U, r.asUInt) } } io.acc.rd.data.bits.foreach { tensor => tensor.zipWithIndex.foreach { case(elem, i) => when (io.acc.rd.data.valid && ~dec.reset) { printf("[TensorGemm] [acc_i] i:%x val:%x\n", i.U, elem) } } } mvc.io.acc_o.data.bits.foreach { tensor => tensor.zipWithIndex.foreach { case(elem, i) => when (mvc.io.acc_o.data.valid && ~dec.reset) { printf("[TensorGemm] [acc_o] i:%x val:%x\n", i.U, elem) } } } mvc.io.out.data.bits.foreach { tensor => tensor.zipWithIndex.foreach { case(elem, i) => when (mvc.io.out.data.valid && ~dec.reset) { printf("[TensorGemm] [out] i:%x val:%x\n", i.U, elem) } } } } }
mlperf/training_results_v0.7
Fujitsu/benchmarks/resnet/implementations/implementation_open/mxnet/3rdparty/tvm/vta/hardware/chisel/src/main/scala/core/TensorGemm.scala
Scala
apache-2.0
11,435
package com.twitter.summingbird.online import com.twitter.summingbird.memory.Memory import com.twitter.summingbird.planner.StripNamedNode import com.twitter.summingbird.{ Dependants, Producer, OptionMappedProducer, NamedProducer, Source, Summer } import org.scalatest.FunSuite import scala.collection.mutable.{ Map => MMap } class StripNameTest extends FunSuite { test("simple name test") { /* * Here are the irreducible items */ val store = MMap[Int, Int]() val input = List(1, 2, 4) val fn = { k: Int => Some((k % 2, k * k)) } val src = Producer.source[Memory, Int](input) val mapped = src .name("source") .optionMap(fn) val summed = mapped .name("map") .sumByKey(store) val graph = summed .name("sumByKey") val deps = Dependants(graph) assert(deps.namesOf(src).map(_.id).toSet == Set("source", "map", "sumByKey")) assert(deps.namesOf(mapped).map(_.id).toSet == Set("map", "sumByKey")) assert(deps.namesOf(summed).map(_.id).toSet == Set("sumByKey")) val (nameMap, stripped) = StripNamedNode(graph) val strippedDeps = Dependants(stripped) def assertName(names: Set[String])(p: PartialFunction[Producer[Memory, Any], Producer[Memory, Any]]): Unit = { val nodes = strippedDeps.nodes.collect(p) assert(nodes.size == 1) // Only one node assert(nameMap(nodes(0)).toSet == names, s"checking ${names}") } assertName(Set("source", "map", "sumByKey")) { case p @ Source(l) if l == input => p } assertName(Set("map", "sumByKey")) { case p @ OptionMappedProducer(_, f) if f == fn => p } assertName(Set("sumByKey")) { case p @ Summer(_, str, _) if str == store => p } // The final stripped has no names: assert(strippedDeps.nodes.collect { case NamedProducer(_, _) => 1 }.sum == 0) } test("merge name test") { /* * Here are the irreducible items */ val store = MMap[Int, Int]() val input0 = List(1, 2, 4) val input1 = List("100", "200", "400") val fn0 = { k: Int => Some((k % 2, k * k)) } val fn1 = { kstr: String => val k = kstr.toInt; Some((k % 2, k * k)) } val src0 = Producer.source[Memory, Int](input0) val mapped0 = src0 .name("source0") .optionMap(fn0) val named0 = mapped0.name("map0") val src1 = Producer.source[Memory, String](input1) val mapped1 = src1 .name("source1") .optionMap(fn1) val named1 = mapped1.name("map1") val summed = (named0 ++ named1).sumByKey(store) val graph = summed .name("sumByKey") val deps = Dependants(graph) def assertInitName(n: Producer[Memory, Any], s: List[String]) = assert(deps.namesOf(n).map(_.id) == s) assertInitName(src0, List("source0", "map0", "sumByKey")) assertInitName(src1, List("source1", "map1", "sumByKey")) assertInitName(mapped0, List("map0", "sumByKey")) assertInitName(mapped1, List("map1", "sumByKey")) assertInitName(summed, List("sumByKey")) val (nameMap, stripped) = StripNamedNode(graph) val strippedDeps = Dependants(stripped) def assertName(names: List[String])(p: PartialFunction[Producer[Memory, Any], Producer[Memory, Any]]): Unit = { val nodes = strippedDeps.nodes.collect(p) assert(nodes.size == 1) // Only one node assert(nameMap(nodes(0)) == names, s"checking ${names}") } assertName(List("source0", "map0", "sumByKey")) { case p @ Source(l) if l == input0 => p } assertName(List("map0", "sumByKey")) { case p @ OptionMappedProducer(_, f) if f == fn0 => p } assertName(List("source1", "map1", "sumByKey")) { case p @ Source(l) if l == input1 => p } assertName(List("map1", "sumByKey")) { case p @ OptionMappedProducer(_, f) if f == fn1 => p } assertName(List("sumByKey")) { case p @ Summer(_, str, _) if str == store => p } // The final stripped has no names: assert(strippedDeps.nodes.collect { case NamedProducer(_, _) => 1 }.sum == 0) } test("Fan-out name test") { /* * Here are the irreducible items */ val store0 = MMap[Int, Int]() val store1 = MMap[Int, Int]() val input = List(1, 2, 4) val fn0 = { k: Int => Some((k % 2, k * k)) } val fn1 = { k: Int => Some((k % 3, k * k * k)) } // Here is the graph val src = Producer.source[Memory, Int](input) val nameSrc = src.name("source") // branch1 val mapped0 = nameSrc.optionMap(fn0) val summed0 = mapped0.name("map0").sumByKey(store0) // branch2 val mapped1 = nameSrc.optionMap(fn1) val summed1 = mapped1.name("map1").name("map1.1").sumByKey(store1) val graph = summed0.name("sumByKey0").also(summed1.name("sumByKey1")) val namedG = graph.name("also") val deps = Dependants(namedG) /* * With fan-out, a total order on the lists is not defined. * so we check that the given list is in sorted order where * the partial ordering is defined. */ def assertInitName(n: Producer[Memory, Any], s: List[String]) = { val ordering = deps.namesOf(n).map(_.id).zipWithIndex.toMap val order = Ordering.by(ordering) assert(s.sorted(order) == s, s"not sorted: $s != ${s.sorted(order)}") } assertInitName(src, List("source", "map0", "sumByKey0")) assertInitName(src, List("source", "map1", "sumByKey1", "also")) assertInitName(src, List("source", "map1", "map1.1", "sumByKey1", "also")) assertInitName(src, List("also")) // the "also" name only goes up the right hand side // because the output of the also only depends on the right hand side assertInitName(mapped0, List("map0", "sumByKey0")) assertInitName(mapped1, List("map1", "map1.1", "sumByKey1", "also")) assertInitName(summed0, List("sumByKey0")) assertInitName(summed1, List("sumByKey1", "also")) assertInitName(graph, List("also")) val (nameMap, stripped) = StripNamedNode(namedG) val strippedDeps = Dependants(stripped) def assertName(names: List[String])(p: PartialFunction[Producer[Memory, Any], Producer[Memory, Any]]): Unit = { val nodes = strippedDeps.nodes.collect(p) assert(nodes.size == 1) // Only one node val ordering = nameMap(nodes(0)).zipWithIndex.toMap val order = Ordering.by(ordering) assert(names.sorted(order) == names, s"not sorted: $names != ${names.sorted(order)}") } assertName(List("source", "map0", "sumByKey0")) { case p @ Source(l) if l == input => p } assertName(List("source", "map1", "map1.1", "sumByKey1", "also")) { case p @ Source(l) if l == input => p } assertName(List("map0", "sumByKey0")) { case p @ OptionMappedProducer(_, f) if f == fn0 => p } assertName(List("map1", "sumByKey1", "also")) { case p @ OptionMappedProducer(_, f) if f == fn1 => p } assertName(List("sumByKey0")) { case p @ Summer(_, str, _) if str eq store0 => p } assertName(List("sumByKey1", "also")) { case p @ Summer(_, str, _) if str eq store1 => p } // The final stripped has no names: assert(strippedDeps.nodes.collect { case NamedProducer(_, _) => 1 }.sum == 0) } }
nabarunnag/Summingbird_dev
summingbird-online/src/test/scala/com/twitter/summingbird/online/StripNameTest.scala
Scala
apache-2.0
7,059
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.scheduler.cluster.k8s import java.io.File import io.fabric8.kubernetes.client.Config import io.fabric8.kubernetes.client.KubernetesClient import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.deploy.k8s.{KubernetesConf, KubernetesUtils, SparkKubernetesClientFactory} import org.apache.spark.deploy.k8s.Config._ import org.apache.spark.deploy.k8s.Constants.DEFAULT_EXECUTOR_CONTAINER_NAME import org.apache.spark.internal.Logging import org.apache.spark.scheduler.{ExternalClusterManager, SchedulerBackend, TaskScheduler, TaskSchedulerImpl} import org.apache.spark.util.{Clock, SystemClock, ThreadUtils, Utils} private[spark] class KubernetesClusterManager extends ExternalClusterManager with Logging { override def canCreate(masterURL: String): Boolean = masterURL.startsWith("k8s") override def createTaskScheduler(sc: SparkContext, masterURL: String): TaskScheduler = { new TaskSchedulerImpl(sc) } override def createSchedulerBackend( sc: SparkContext, masterURL: String, scheduler: TaskScheduler): SchedulerBackend = { val wasSparkSubmittedInClusterMode = sc.conf.get(KUBERNETES_DRIVER_SUBMIT_CHECK) val (authConfPrefix, apiServerUri, defaultServiceAccountToken, defaultServiceAccountCaCrt) = if (wasSparkSubmittedInClusterMode) { require(sc.conf.get(KUBERNETES_DRIVER_POD_NAME).isDefined, "If the application is deployed using spark-submit in cluster mode, the driver pod name " + "must be provided.") val serviceAccountToken = Some(new File(Config.KUBERNETES_SERVICE_ACCOUNT_TOKEN_PATH)).filter(_.exists) val serviceAccountCaCrt = Some(new File(Config.KUBERNETES_SERVICE_ACCOUNT_CA_CRT_PATH)).filter(_.exists) (KUBERNETES_AUTH_DRIVER_MOUNTED_CONF_PREFIX, sc.conf.get(KUBERNETES_DRIVER_MASTER_URL), serviceAccountToken, serviceAccountCaCrt) } else { (KUBERNETES_AUTH_CLIENT_MODE_PREFIX, KubernetesUtils.parseMasterUrl(masterURL), None, None) } // If KUBERNETES_EXECUTOR_POD_NAME_PREFIX is not set, initialize it so that all executors have // the same prefix. This is needed for client mode, where the feature steps code that sets this // configuration is not used. // // If/when feature steps are executed in client mode, they should instead take care of this, // and this code should be removed. if (!sc.conf.contains(KUBERNETES_EXECUTOR_POD_NAME_PREFIX)) { sc.conf.set(KUBERNETES_EXECUTOR_POD_NAME_PREFIX, KubernetesConf.getResourceNamePrefix(sc.conf.get("spark.app.name"))) } val kubernetesClient = SparkKubernetesClientFactory.createKubernetesClient( apiServerUri, Some(sc.conf.get(KUBERNETES_NAMESPACE)), authConfPrefix, SparkKubernetesClientFactory.ClientType.Driver, sc.conf, defaultServiceAccountToken, defaultServiceAccountCaCrt) if (sc.conf.get(KUBERNETES_EXECUTOR_PODTEMPLATE_FILE).isDefined) { KubernetesUtils.loadPodFromTemplate( kubernetesClient, sc.conf.get(KUBERNETES_EXECUTOR_PODTEMPLATE_FILE).get, sc.conf.get(KUBERNETES_EXECUTOR_PODTEMPLATE_CONTAINER_NAME), sc.conf) } val schedulerExecutorService = ThreadUtils.newDaemonSingleThreadScheduledExecutor( "kubernetes-executor-maintenance") ExecutorPodsSnapshot.setShouldCheckAllContainers( sc.conf.get(KUBERNETES_EXECUTOR_CHECK_ALL_CONTAINERS)) val sparkContainerName = sc.conf.get(KUBERNETES_EXECUTOR_PODTEMPLATE_CONTAINER_NAME) .getOrElse(DEFAULT_EXECUTOR_CONTAINER_NAME) ExecutorPodsSnapshot.setSparkContainerName(sparkContainerName) val subscribersExecutor = ThreadUtils .newDaemonThreadPoolScheduledExecutor( "kubernetes-executor-snapshots-subscribers", 2) val snapshotsStore = new ExecutorPodsSnapshotsStoreImpl(subscribersExecutor) val executorPodsLifecycleEventHandler = new ExecutorPodsLifecycleManager( sc.conf, kubernetesClient, snapshotsStore) val executorPodsAllocator = makeExecutorPodsAllocator(sc, kubernetesClient, snapshotsStore) val podsWatchEventSource = new ExecutorPodsWatchSnapshotSource( snapshotsStore, kubernetesClient) val eventsPollingExecutor = ThreadUtils.newDaemonSingleThreadScheduledExecutor( "kubernetes-executor-pod-polling-sync") val podsPollingEventSource = new ExecutorPodsPollingSnapshotSource( sc.conf, kubernetesClient, snapshotsStore, eventsPollingExecutor) new KubernetesClusterSchedulerBackend( scheduler.asInstanceOf[TaskSchedulerImpl], sc, kubernetesClient, schedulerExecutorService, snapshotsStore, executorPodsAllocator, executorPodsLifecycleEventHandler, podsWatchEventSource, podsPollingEventSource) } private[k8s] def makeExecutorPodsAllocator(sc: SparkContext, kubernetesClient: KubernetesClient, snapshotsStore: ExecutorPodsSnapshotsStore) = { val executorPodsAllocatorName = sc.conf.get(KUBERNETES_ALLOCATION_PODS_ALLOCATOR) match { case "statefulset" => classOf[StatefulsetPodsAllocator].getName case "direct" => classOf[ExecutorPodsAllocator].getName case fullClass => fullClass } val cls = Utils.classForName[AbstractPodsAllocator](executorPodsAllocatorName) val cstr = cls.getConstructor( classOf[SparkConf], classOf[org.apache.spark.SecurityManager], classOf[KubernetesExecutorBuilder], classOf[KubernetesClient], classOf[ExecutorPodsSnapshotsStore], classOf[Clock]) cstr.newInstance( sc.conf, sc.env.securityManager, new KubernetesExecutorBuilder(), kubernetesClient, snapshotsStore, new SystemClock()) } override def initialize(scheduler: TaskScheduler, backend: SchedulerBackend): Unit = { scheduler.asInstanceOf[TaskSchedulerImpl].initialize(backend) } }
ueshin/apache-spark
resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/KubernetesClusterManager.scala
Scala
apache-2.0
6,780
/* * Copyright 2009-2011 WorldWide Conferencing, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.liftweb package webapptest import org.specs2.mutable.Specification import common._ import util._ import http._ object SessionInfo { lazy val session1 = new LiftSession("/", Helpers.randomString(20), Empty) lazy val session2 = new LiftSession("/", Helpers.randomString(20), Empty) object sessionMemo extends SessionMemoize[Int, Int] object requestMemo extends RequestMemoize[Int, Int] } /** * System under specification for Memoize. */ object MemoizeSpec extends Specification { "Memoize Specification".title sequential import SessionInfo._ "Memoize" should { "Session memo should default to empty" >> { S.initIfUninitted(session1) { sessionMemo.get(3) must_== Empty } } "Session memo should be settable" >> { S.initIfUninitted(session1) { sessionMemo.get(3, 8) must_== 8 sessionMemo.get(3) must_== Full(8) } } "Session memo should survive across calls" >> { S.initIfUninitted(session1) { sessionMemo.get(3) must_== Full(8) } } "Session memo should not float across sessions" >> { S.initIfUninitted(session2) { sessionMemo.get(3) must_== Empty } } "Request memo should work in the same request" >> { S.initIfUninitted(session1) { requestMemo(3) must_== Empty requestMemo(3, 44) must_== Full(44) requestMemo(3) must_== Full(44) } } "Request memo should not span requests" >> { S.initIfUninitted(session1) { requestMemo(3) must_== Empty } } } }
pbrant/framework
web/webkit/src/test/scala/net/liftweb/webapptest/MemoizeSpec.scala
Scala
apache-2.0
2,200
/******************************************************************************* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright (c) 2013,2014 by Peter Pilgrim, Addiscombe, Surrey, XeNoNiQUe UK * * All rights reserved. This program and the accompanying materials * are made available under the terms of the GNU GPL v3.0 * which accompanies this distribution, and is available at: * http://www.gnu.org/licenses/gpl-3.0.txt * * Developers: * Peter Pilgrim -- design, development and implementation * -- Blog: http://www.xenonique.co.uk/blog/ * -- Twitter: @peter_pilgrim * * Contributors: * *******************************************************************************/ package uk.co.xenonique.digitalone.simple import java.util.Date import com.fasterxml.jackson.annotation.{JsonIgnoreProperties, JsonProperty} import scala.beans.BeanProperty /** * The type Traveller * * @author Peter Pilgrim */ @JsonIgnoreProperties(Array("id", "computedDeliveryDateTime")) case class Traveller3( @BeanProperty @JsonProperty("givenName") var givenName: String, @BeanProperty @JsonProperty("familyName") var familyName: String, @BeanProperty @JsonProperty("dateOfBirth") var dateOfBirth: Date , @BeanProperty @JsonProperty("documentNo") var docNo: Option[String], @BeanProperty var computedDeliveryDateTime: Date = new Date()) { }
peterpilgrim/digital-scala-javaone-2014
src/main/scala/uk/co/xenonique/digitalone/simple/Traveller3.scala
Scala
gpl-3.0
1,428
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.ml.feature import org.apache.spark.annotation.Since import org.apache.spark.ml.Transformer import org.apache.spark.ml.attribute.{Attribute, AttributeGroup} import org.apache.spark.ml.linalg._ import org.apache.spark.ml.param.{IntArrayParam, ParamMap, StringArrayParam} import org.apache.spark.ml.param.shared.{HasInputCol, HasOutputCol} import org.apache.spark.ml.util._ import org.apache.spark.sql.{DataFrame, Dataset} import org.apache.spark.sql.functions._ import org.apache.spark.sql.types.StructType /** * This class takes a feature vector and outputs a new feature vector with a subarray of the * original features. * * The subset of features can be specified with either indices (`setIndices()`) * or names (`setNames()`). At least one feature must be selected. Duplicate features * are not allowed, so there can be no overlap between selected indices and names. * * The output vector will order features with the selected indices first (in the order given), * followed by the selected names (in the order given). */ @Since("1.5.0") final class VectorSlicer @Since("1.5.0") (@Since("1.5.0") override val uid: String) extends Transformer with HasInputCol with HasOutputCol with DefaultParamsWritable { @Since("1.5.0") def this() = this(Identifiable.randomUID("vectorSlicer")) /** * An array of indices to select features from a vector column. * There can be no overlap with [[names]]. * Default: Empty array * @group param */ @Since("1.5.0") val indices = new IntArrayParam(this, "indices", "An array of indices to select features from a vector column." + " There can be no overlap with names.", VectorSlicer.validIndices) setDefault(indices -> Array.empty[Int]) /** @group getParam */ @Since("1.5.0") def getIndices: Array[Int] = $(indices) /** @group setParam */ @Since("1.5.0") def setIndices(value: Array[Int]): this.type = set(indices, value) /** * An array of feature names to select features from a vector column. * These names must be specified by ML [[org.apache.spark.ml.attribute.Attribute]]s. * There can be no overlap with [[indices]]. * Default: Empty Array * @group param */ @Since("1.5.0") val names = new StringArrayParam(this, "names", "An array of feature names to select features from a vector column." + " There can be no overlap with indices.", VectorSlicer.validNames) setDefault(names -> Array.empty[String]) /** @group getParam */ @Since("1.5.0") def getNames: Array[String] = $(names) /** @group setParam */ @Since("1.5.0") def setNames(value: Array[String]): this.type = set(names, value) /** @group setParam */ @Since("1.5.0") def setInputCol(value: String): this.type = set(inputCol, value) /** @group setParam */ @Since("1.5.0") def setOutputCol(value: String): this.type = set(outputCol, value) @Since("2.0.0") override def transform(dataset: Dataset[_]): DataFrame = { // Validity checks transformSchema(dataset.schema) val inputAttr = AttributeGroup.fromStructField(dataset.schema($(inputCol))) inputAttr.numAttributes.foreach { numFeatures => val maxIndex = $(indices).max require(maxIndex < numFeatures, s"Selected feature index $maxIndex invalid for only $numFeatures input features.") } // Prepare output attributes val inds = getSelectedFeatureIndices(dataset.schema) val selectedAttrs: Option[Array[Attribute]] = inputAttr.attributes.map { attrs => inds.map(index => attrs(index)) } val outputAttr = selectedAttrs match { case Some(attrs) => new AttributeGroup($(outputCol), attrs) case None => new AttributeGroup($(outputCol), inds.length) } // Select features val slicer = udf { vec: Vector => vec match { case features: DenseVector => Vectors.dense(inds.map(features.apply)) case features: SparseVector => features.slice(inds) } } dataset.withColumn($(outputCol), slicer(dataset($(inputCol))), outputAttr.toMetadata()) } /** Get the feature indices in order: indices, names */ private def getSelectedFeatureIndices(schema: StructType): Array[Int] = { val nameFeatures = MetadataUtils.getFeatureIndicesFromNames(schema($(inputCol)), $(names)) val indFeatures = $(indices) val numDistinctFeatures = (nameFeatures ++ indFeatures).distinct.length lazy val errMsg = "VectorSlicer requires indices and names to be disjoint" + s" sets of features, but they overlap." + s" indices: ${indFeatures.mkString("[", ",", "]")}." + s" names: " + nameFeatures.zip($(names)).map { case (i, n) => s"$i:$n" }.mkString("[", ",", "]") require(nameFeatures.length + indFeatures.length == numDistinctFeatures, errMsg) indFeatures ++ nameFeatures } @Since("1.5.0") override def transformSchema(schema: StructType): StructType = { require($(indices).length > 0 || $(names).length > 0, s"VectorSlicer requires that at least one feature be selected.") SchemaUtils.checkColumnType(schema, $(inputCol), new VectorUDT) if (schema.fieldNames.contains($(outputCol))) { throw new IllegalArgumentException(s"Output column ${$(outputCol)} already exists.") } val numFeaturesSelected = $(indices).length + $(names).length val outputAttr = new AttributeGroup($(outputCol), numFeaturesSelected) val outputFields = schema.fields :+ outputAttr.toStructField() StructType(outputFields) } @Since("1.5.0") override def copy(extra: ParamMap): VectorSlicer = defaultCopy(extra) @Since("3.0.0") override def toString: String = { s"VectorSlicer: uid=$uid" + get(indices).map(i => s", numSelectedFeatures=${i.length}").getOrElse("") } } @Since("1.6.0") object VectorSlicer extends DefaultParamsReadable[VectorSlicer] { /** Return true if given feature indices are valid */ private[feature] def validIndices(indices: Array[Int]): Boolean = { if (indices.isEmpty) { true } else { indices.length == indices.distinct.length && indices.forall(_ >= 0) } } /** Return true if given feature names are valid */ private[feature] def validNames(names: Array[String]): Boolean = { names.forall(_.nonEmpty) && names.length == names.distinct.length } @Since("1.6.0") override def load(path: String): VectorSlicer = super.load(path) }
caneGuy/spark
mllib/src/main/scala/org/apache/spark/ml/feature/VectorSlicer.scala
Scala
apache-2.0
7,193
/** * Copyright (C) 2013 Carnegie Mellon University * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tdb.list import akka.actor.ActorRef import java.io.Serializable import scala.collection.mutable.Buffer import tdb.{Context, Mod, Mutator} class PartitionedAggregatorList[T, U] (partitions: Buffer[AggregatorList[T, U]], conf: ListConf) extends AdjustableList[T, U] with Serializable { def filter(pred: ((T, U)) => Boolean) (implicit c: Context): AdjustableList[T, U] = ??? def flatMap[V, W](f: ((T, U)) => Iterable[(V, W)]) (implicit c: Context): AdjustableList[V, W] = ??? def join[V] (that: AdjustableList[T, V], condition: ((T, V), (T, U)) => Boolean) (implicit c: Context): AdjustableList[T, (U, V)] = ??? def map[V, W](f: ((T, U)) => (V, W)) (implicit c: Context): AdjustableList[V, W] = ??? def reduce(f: ((T, U), (T, U)) => (T, U)) (implicit c: Context): Mod[(T, U)] = ??? /* Meta functions */ def toBuffer(mutator: Mutator): Buffer[(T, U)] = { val buf = Buffer[(T, U)]() for (partition <- partitions) { buf ++= partition.toBuffer(mutator) } buf } }
twmarshall/tdb
core/src/main/scala/tdb/list/PartitionedAggregatorList.scala
Scala
apache-2.0
1,674
/* __ *\ ** ________ ___ / / ___ Scala API ** ** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL ** ** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** ** /____/\___/_/ |_/____/_/ | | ** ** |/ ** \* */ package scala.xml package parsing /** default implementation of markup handler always returns NodeSeq.Empty */ abstract class DefaultMarkupHandler extends MarkupHandler { def elem(pos: Int, pre: String, label: String, attrs: MetaData, scope:NamespaceBinding, args: NodeSeq) = NodeSeq.Empty def procInstr(pos: Int, target: String, txt: String) = NodeSeq.Empty def comment(pos: Int, comment: String ): NodeSeq = NodeSeq.Empty def entityRef(pos: Int, n: String) = NodeSeq.Empty def text(pos: Int, txt:String) = NodeSeq.Empty }
cran/rkafkajars
java/scala/xml/parsing/DefaultMarkupHandler.scala
Scala
apache-2.0
1,081
/* * DecisionMetropolisHastings.scala * Decision Metropolis-Hastings sampler. * * Created By: Brian Ruttenberg (bruttenberg@cra.com) * Creation Date: Oct 1, 2012 * * Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc. * See http://www.cra.com or email figaro@cra.com for information. * * See http://www.github.com/p2t2/figaro for a copy of the software license. */ package com.cra.figaro.algorithm.decision import com.cra.figaro.algorithm._ import com.cra.figaro.algorithm.sampling._ import com.cra.figaro.language._ import com.cra.figaro.library.compound._ import com.cra.figaro.library.decision._ import com.cra.figaro.library.decision.DecisionUtil._ import com.cra.figaro.util._ import scala.collection.mutable.Map import scala.language.existentials import scala.math.log import scala.annotation.tailrec /** * Metropolis-Hastings Decision sampler. Almost the exact same as normal MH except that it keeps * track of utilities and probabilities (to compute expected utility) and it implements DecisionAlgorithm trait */ abstract class DecisionMetropolisHastings[T, U] private (universe: Universe, proposalScheme: ProposalScheme, burnIn: Int, interval: Int, utilityNodes: List[Element[_]], decisionTarget: Decision[T, U], dummyTarget: Element[_]) extends MetropolisHastings(universe, proposalScheme, burnIn, interval, dummyTarget) with DecisionAlgorithm[T, U] { def this(universe: Universe, proposalScheme: ProposalScheme, burnIn: Int, interval: Int, utilityNodes: List[Element[_]], decisionTarget: Decision[T, U]) = this(universe, proposalScheme, burnIn, interval, utilityNodes, decisionTarget, createDecisionDummy(decisionTarget)) import MetropolisHastings._ protected type WeightSeen[T] = (Element[T], Map[T, Double]) protected def newWeightSeen[T](target: Element[T]): WeightSeen[T] = (target, Map()) /** * Contains all of the sample data (decision values, utilities) for a given target decision */ private var allUtilitiesSeen: List[WeightSeen[_]] = _ private def utilitySum = (0.0 /: utilityNodes)((s: Double, n: Element[_]) => s + n.value.asInstanceOf[Double]) /** * Cleans up the temporary elements created during sampling */ def cleanup() = universe.deactivate(queryTargets) /* Overrides DecisionAlgorithm Trait */ def computeUtility(): scala.collection.immutable.Map[(T, U), DecisionSample] = { val TimesSeen = allTimesSeen.find(_._1 == dummyTarget).get._2.asInstanceOf[Map[(T, U), Int]] val utilitySeen = allUtilitiesSeen.find(_._1 == dummyTarget).get._2.asInstanceOf[Map[(T, U), Double]] (utilitySeen.map(v => (v._1, DecisionSample(v._2, TimesSeen(v._1).toDouble)))).toMap } // override reset so we can reset the local utilities override protected def resetCounts() = { allUtilitiesSeen = queryTargets.toList map (newWeightSeen(_)) super.resetCounts() } protected def updateWeightSeenWithValue[T](value: T, weight: Double, weightSeen: WeightSeen[T]): Unit = weightSeen._2 += value -> (weightSeen._2.getOrElse(value, 0.0) + weight) protected def updateWeightSeenForTarget[T](sample: (Double, Map[Element[_], Any]), weightSeen: WeightSeen[T]): Unit = { val (weight, values) = sample val value = values(weightSeen._1).asInstanceOf[T] updateWeightSeenWithValue(value, weight, weightSeen) } /** * Produce a single sample. In decision MH, we always update the target (parent and decision) since the utilities mights have changed */ override def sample(): (Boolean, Sample) = { mhStep() if (dissatisfied.isEmpty) { val values = queryTargets map (target => target -> target.value) (true, Map(values: _*)) } else { (false, Map()) } } /* * In the decision version of importance sampling, we keep track of both the sampled utility * and the state probability, so we have to maintain two data structures (allUtilitiesSeen and allTimesSeen) */ protected override final def doSample() = { for { i <- 1 to interval - 1 } { mhStep() } if (sampleCount == 0) { initUpdates } val s = sample() if (s._1) { allUtilitiesSeen foreach (updateWeightSeenForTarget((utilitySum, s._2), _)) sampleCount += 1 s._2 foreach (t => updateTimesSeenForTarget(t._1.asInstanceOf[Element[t._1.Value]], t._2.asInstanceOf[t._1.Value])) } } protected override def update(): Unit = { super.update sampleCount += 1 allUtilitiesSeen foreach (updateWeightSeenForTarget((utilitySum, Map[Element[_], Any](dummyTarget -> dummyTarget.value)), _)) sampleCount -= 1 } } /** * Anytime Decision Metropolis-Hastings sampler. */ class AnytimeDecisionMetropolisHastings[T, U](universe: Universe, scheme: ProposalScheme, burnIn: Int, interval: Int, utilityNodes: List[Element[_]], decisionTarget: Decision[T, U]) extends DecisionMetropolisHastings(universe, scheme, burnIn, interval, utilityNodes, decisionTarget) with UnweightedSampler with AnytimeProbQuerySampler { /** * Initialize the sampler. */ override def initialize(): Unit = { super.initialize() doInitialize() } /** * Clean up the sampler, freeing memory. */ override def cleanUp(): Unit = { universe.clearTemporaries() super.cleanUp() } } /** * One-time Decision Metropolis-Hastings sampler. */ class OneTimeDecisionMetropolisHastings[T, U](universe: Universe, myNumSamples: Int, scheme: ProposalScheme, burnIn: Int, interval: Int, utilityNodes: List[Element[_]], decisionTarget: Decision[T, U]) extends DecisionMetropolisHastings(universe, scheme, burnIn, interval, utilityNodes, decisionTarget) with UnweightedSampler with OneTimeProbQuerySampler { /** * Number of samples to take. */ val numSamples = myNumSamples /** * Run the algorithm, performing its computation to completion. */ override def run(): Unit = { doInitialize() super.run() update } } object DecisionMetropolisHastings { /* Checks conditions of Decision Usage * 1. Double utilities */ private def UsageCheck(utilityNodes: List[Element[_]], target: Decision[_, _]) = { utilityNodes.foreach { u => u.value match { case d: Double => 1 case _ => { throw new IllegalArgumentException("Only double utilities are allowed") } } } } /** * Create an Anytime DecisionMetropolis-Hastings sampler using the given proposal scheme with the given * decision. */ /* * For decisions, we will create a dummy Element that is a tuple of the decision node and its parents. This will be used * to track expected utilities during the sampling * */ def apply[T, U](scheme: ProposalScheme, utilityNodes: List[Element[_]], target: Decision[T, U])(implicit universe: Universe) = { utilityNodes.foreach(_.generate) UsageCheck(utilityNodes, target) new AnytimeDecisionMetropolisHastings[T, U](universe, scheme, 0, 1, utilityNodes, target) } /** * Create a OneTime DecisionMetropolis-Hastings sampler using the given number of samples and proposal * scheme with the given decision. */ def apply[T, U](numSamples: Int, scheme: ProposalScheme, utilityNodes: List[Element[_]], target: Decision[T, U])(implicit universe: Universe) = { utilityNodes.foreach(_.generate) UsageCheck(utilityNodes, target) new OneTimeDecisionMetropolisHastings[T, U](universe, numSamples, scheme, 0, 1, utilityNodes, target) } /** * Create an Anytime DecisionMetropolis-Hastings sampler using the given proposal scheme and number * of burn-in samples with the given decision. */ def apply[T, U](scheme: ProposalScheme, burnIn: Int, utilityNodes: List[Element[_]], target: Decision[T, U])(implicit universe: Universe) = { utilityNodes.foreach(_.generate) UsageCheck(utilityNodes, target) new AnytimeDecisionMetropolisHastings[T, U](universe, scheme, burnIn, 1, utilityNodes, target) } /** * Create a OneTime DecisionMetropolis-Hastings sampler using the given number of samples, proposal scheme, and * number of burn-in samples with the given decision. */ def apply[T, U](numSamples: Int, scheme: ProposalScheme, burnIn: Int, utilityNodes: List[Element[_]], target: Decision[T, U])(implicit universe: Universe) = { utilityNodes.foreach(_.generate) UsageCheck(utilityNodes, target) new OneTimeDecisionMetropolisHastings[T, U](universe, numSamples, scheme, burnIn, 1, utilityNodes, target) } /** * Create an Anytime DecisionMetropolis-Hastings sampler using the given proposal scheme, number of burn-in * samples, and interval between samples with the given decision. */ def apply[T, U](scheme: ProposalScheme, burnIn: Int, interval: Int, utilityNodes: List[Element[_]], target: Decision[T, U])(implicit universe: Universe) = { utilityNodes.foreach(_.generate) UsageCheck(utilityNodes, target) new AnytimeDecisionMetropolisHastings[T, U](universe, scheme, burnIn, interval, utilityNodes, target) } /** * Create a OneTime DecisionMetropolis-Hastings sampler using the given number of samples, proposal scheme, * number of burn-in samples, and interval between samples with the given decision. */ def apply[T, U](numSamples: Int, scheme: ProposalScheme, burnIn: Int, interval: Int, utilityNodes: List[Element[_]], target: Decision[T, U])(implicit universe: Universe) = { utilityNodes.foreach(_.generate) UsageCheck(utilityNodes, target) new OneTimeDecisionMetropolisHastings[T, U](universe, numSamples, scheme, burnIn, interval: Int, utilityNodes, target) } private[figaro] case class State(oldValues: Map[Element[_], Any], oldRandomness: Map[Element[_], Any], proposalProb: Double, modelProb: Double, dissatisfied: scala.collection.mutable.Set[Element[_]]) }
jyuhuan/figaro
Figaro/src/main/scala/com/cra/figaro/algorithm/decision/DecisionMetropolisHastings.scala
Scala
bsd-3-clause
9,842
/* sbt -- Simple Build Tool * Copyright 2011 Mark Harrah */ package sbt import java.io.File import java.net.URI import Def.{ displayFull, ScopedKey, ScopeLocal, Setting } import Attributed.data import BuildPaths.outputDirectory import Scope.GlobalScope import BuildStreams.Streams import Path._ final class BuildStructure(val units: Map[URI, LoadedBuildUnit], val root: URI, val settings: Seq[Setting[_]], val data: Settings[Scope], val index: StructureIndex, val streams: State => Streams, val delegates: Scope => Seq[Scope], val scopeLocal: ScopeLocal) { val rootProject: URI => String = Load getRootProject units def allProjects: Seq[ResolvedProject] = units.values.flatMap(_.defined.values).toSeq def allProjects(build: URI): Seq[ResolvedProject] = units.get(build).toList.flatMap(_.defined.values) def allProjectRefs: Seq[ProjectRef] = units.toSeq flatMap { case (build, unit) => refs(build, unit.defined.values.toSeq) } def allProjectRefs(build: URI): Seq[ProjectRef] = refs(build, allProjects(build)) val extra: BuildUtil[ResolvedProject] = BuildUtil(root, units, index.keyIndex, data) private[this] def refs(build: URI, projects: Seq[ResolvedProject]): Seq[ProjectRef] = projects.map { p => ProjectRef(build, p.id) } } // information that is not original, but can be reconstructed from the rest of BuildStructure final class StructureIndex( val keyMap: Map[String, AttributeKey[_]], val taskToKey: Map[Task[_], ScopedKey[Task[_]]], val triggers: Triggers[Task], val keyIndex: KeyIndex, val aggregateKeyIndex: KeyIndex) /** * A resolved build unit. (`ResolvedBuildUnit` would be a better name to distinguish it from the loaded, but unresolved `BuildUnit`.) * @param unit The loaded, but unresolved [[BuildUnit]] this was resolved from. * @param defined The definitive map from project IDs to resolved projects. * These projects have had [[Reference]]s resolved and [[AutoPlugin]]s evaluated. * @param rootProjects The list of project IDs for the projects considered roots of this build. * The first root project is used as the default in several situations where a project is not otherwise selected. */ final class LoadedBuildUnit(val unit: BuildUnit, val defined: Map[String, ResolvedProject], val rootProjects: Seq[String], val buildSettings: Seq[Setting[_]]) extends BuildUnitBase { assert(!rootProjects.isEmpty, "No root projects defined for build unit " + unit) /** * The project to use as the default when one is not otherwise selected. * [[LocalRootProject]] resolves to this from within the same build. */ val root = rootProjects.head /** The base directory of the build unit (not the build definition).*/ def localBase = unit.localBase /** * The classpath to use when compiling against this build unit's publicly visible code. * It includes build definition and plugin classes, but not classes for .sbt file statements and expressions. */ def classpath: Seq[File] = unit.definitions.target ++ unit.plugins.classpath /** * The class loader to use for this build unit's publicly visible code. * It includes build definition and plugin classes, but not classes for .sbt file statements and expressions. */ def loader = unit.definitions.loader /** The imports to use for .sbt files, `consoleProject` and other contexts that use code from the build definition. */ def imports = BuildUtil.getImports(unit) override def toString = unit.toString } // TODO: figure out how to deprecate and drop buildNames /** * The built and loaded build definition, including loaded but unresolved [[Project]]s, for a build unit (for a single URI). * * @param base The base directory of the build definition, typically `<build base>/project/`. * @param loader The ClassLoader containing all classes and plugins for the build definition project. * Note that this does not include classes for .sbt files. * @param builds The list of [[Build]]s for the build unit. * In addition to auto-discovered [[Build]]s, this includes any auto-generated default [[Build]]s. * @param projects The list of all [[Project]]s from all [[Build]]s. * These projects have not yet been resolved, but they have had auto-plugins applied. * In particular, each [[Project]]'s `autoPlugins` field is populated according to their configured `plugins` * and their `settings` and `configurations` updated as appropriate. * @param buildNames No longer used and will be deprecated once feasible. */ final class LoadedDefinitions(val base: File, val target: Seq[File], val loader: ClassLoader, val builds: Seq[Build], val projects: Seq[Project], val buildNames: Seq[String]) /** Auto-detected top-level modules (as in `object X`) of type `T` paired with their source names. */ final class DetectedModules[T](val modules: Seq[(String, T)]) { /** * The source names of the modules. This is "X" in `object X`, as opposed to the implementation class name "X$". * The names are returned in a stable order such that `names zip values` pairs a name with the actual module. */ def names: Seq[String] = modules.map(_._1) /** * The singleton value of the module. * The values are returned in a stable order such that `names zip values` pairs a name with the actual module. */ def values: Seq[T] = modules.map(_._2) } /** Auto-detected auto plugin. */ case class DetectedAutoPlugin(val name: String, val value: AutoPlugin, val hasAutoImport: Boolean) /** * Auto-discovered modules for the build definition project. These include modules defined in build definition sources * as well as modules in binary dependencies. * * @param builds The [[Build]]s detected in the build definition. This does not include the default [[Build]] that sbt creates if none is defined. */ final class DetectedPlugins(val plugins: DetectedModules[Plugin], val autoPlugins: Seq[DetectedAutoPlugin], val builds: DetectedModules[Build]) { /** Sequence of import expressions for the build definition. This includes the names of the [[Plugin]], [[Build]], and [[AutoImport]] modules, but not the [[AutoPlugin]] modules. */ lazy val imports: Seq[String] = BuildUtil.getImports(plugins.names ++ builds.names ++ (autoPlugins flatMap { case DetectedAutoPlugin(name, ap, hasAutoImport) => if (hasAutoImport) Some(name + ".autoImport") else None })) ++ BuildUtil.importNamesRoot(autoPlugins map { _.name }) /** A function to select the right [[AutoPlugin]]s from [[autoPlugins]] for a [[Project]]. */ lazy val deducePlugins: (Plugins, Logger) => Seq[AutoPlugin] = Plugins.deducer(autoPlugins.toList map { _.value }) } /** * The built and loaded build definition project. * @param base The base directory for the build definition project (not the base of the project itself). * @param pluginData Evaluated tasks/settings from the build definition for later use. * This is necessary because the build definition project is discarded. * @param loader The class loader for the build definition project, notably excluding classes used for .sbt files. * @param detected Auto-detected modules in the build definition. */ final class LoadedPlugins(val base: File, val pluginData: PluginData, val loader: ClassLoader, val detected: DetectedPlugins) { @deprecated("Use the primary constructor.", "0.13.2") def this(base: File, pluginData: PluginData, loader: ClassLoader, plugins: Seq[Plugin], pluginNames: Seq[String]) = this(base, pluginData, loader, new DetectedPlugins(new DetectedModules(pluginNames zip plugins), Nil, new DetectedModules(Nil)) ) @deprecated("Use detected.plugins.values.", "0.13.2") val plugins: Seq[Plugin] = detected.plugins.values @deprecated("Use detected.plugins.names.", "0.13.2") val pluginNames: Seq[String] = detected.plugins.names def fullClasspath: Seq[Attributed[File]] = pluginData.classpath def classpath = data(fullClasspath) } /** * The loaded, but unresolved build unit. * @param uri The uniquely identifying URI for the build. * @param localBase The working location of the build on the filesystem. * For local URIs, this is the same as `uri`, but for remote URIs, this is the local copy or workspace allocated for the build. */ final class BuildUnit(val uri: URI, val localBase: File, val definitions: LoadedDefinitions, val plugins: LoadedPlugins) { override def toString = if (uri.getScheme == "file") localBase.toString else (uri + " (locally: " + localBase + ")") } final class LoadedBuild(val root: URI, val units: Map[URI, LoadedBuildUnit]) { BuildUtil.checkCycles(units) def allProjectRefs: Seq[(ProjectRef, ResolvedProject)] = for ((uri, unit) <- units.toSeq; (id, proj) <- unit.defined) yield ProjectRef(uri, id) -> proj def extra(data: Settings[Scope])(keyIndex: KeyIndex): BuildUtil[ResolvedProject] = BuildUtil(root, units, keyIndex, data) private[sbt] def autos = GroupedAutoPlugins(units) } final class PartBuild(val root: URI, val units: Map[URI, PartBuildUnit]) sealed trait BuildUnitBase { def rootProjects: Seq[String]; def buildSettings: Seq[Setting[_]] } final class PartBuildUnit(val unit: BuildUnit, val defined: Map[String, Project], val rootProjects: Seq[String], val buildSettings: Seq[Setting[_]]) extends BuildUnitBase { def resolve(f: Project => ResolvedProject): LoadedBuildUnit = new LoadedBuildUnit(unit, defined mapValues f toMap, rootProjects, buildSettings) def resolveRefs(f: ProjectReference => ProjectRef): LoadedBuildUnit = resolve(_ resolve f) } object BuildStreams { type Streams = std.Streams[ScopedKey[_]] final val GlobalPath = "$global" final val BuildUnitPath = "$build" final val StreamsDirectory = "streams" def mkStreams(units: Map[URI, LoadedBuildUnit], root: URI, data: Settings[Scope]): State => Streams = s => s get Keys.stateStreams getOrElse std.Streams(path(units, root, data), displayFull, LogManager.construct(data, s)) def path(units: Map[URI, LoadedBuildUnit], root: URI, data: Settings[Scope])(scoped: ScopedKey[_]): File = resolvePath(projectPath(units, root, scoped, data), nonProjectPath(scoped)) def resolvePath(base: File, components: Seq[String]): File = (base /: components)((b, p) => new File(b, p)) def pathComponent[T](axis: ScopeAxis[T], scoped: ScopedKey[_], label: String)(show: T => String): String = axis match { case Global => GlobalPath case This => sys.error("Unresolved This reference for " + label + " in " + displayFull(scoped)) case Select(t) => show(t) } def nonProjectPath[T](scoped: ScopedKey[T]): Seq[String] = { val scope = scoped.scope pathComponent(scope.config, scoped, "config")(_.name) :: pathComponent(scope.task, scoped, "task")(_.label) :: pathComponent(scope.extra, scoped, "extra")(showAMap) :: scoped.key.label :: Nil } def showAMap(a: AttributeMap): String = a.entries.toSeq.sortBy(_.key.label).map { case AttributeEntry(key, value) => key.label + "=" + value.toString } mkString (" ") def projectPath(units: Map[URI, LoadedBuildUnit], root: URI, scoped: ScopedKey[_], data: Settings[Scope]): File = scoped.scope.project match { case Global => refTarget(GlobalScope, units(root).localBase, data) / GlobalPath case Select(br @ BuildRef(uri)) => refTarget(br, units(uri).localBase, data) / BuildUnitPath case Select(pr @ ProjectRef(uri, id)) => refTarget(pr, units(uri).defined(id).base, data) case Select(pr) => sys.error("Unresolved project reference (" + pr + ") in " + displayFull(scoped)) case This => sys.error("Unresolved project reference (This) in " + displayFull(scoped)) } def refTarget(ref: ResolvedReference, fallbackBase: File, data: Settings[Scope]): File = refTarget(GlobalScope.copy(project = Select(ref)), fallbackBase, data) def refTarget(scope: Scope, fallbackBase: File, data: Settings[Scope]): File = (Keys.target in scope get data getOrElse outputDirectory(fallbackBase).asFile) / StreamsDirectory }
xeno-by/old-scalameta-sbt
main/src/main/scala/sbt/BuildStructure.scala
Scala
bsd-3-clause
12,210
package com.gilt.play.json.controllers import com.gilt.play.json.templates.JsonFormat import play.api.libs.concurrent.Execution.Implicits.defaultContext import play.api.libs.json.{Json, Writes} import play.api.mvc._ import play.mvc.Http import scala.concurrent.Future trait PaginatedController { /** Should generated links be https (as opposed to http) */ def https: Boolean def uri(call: Call)(implicit request: Request[_]): String = call.absoluteURL(https) sealed trait IPagination { def link(url: String, dir: String) = s"""<$url>; rel="$dir"""" def nextLink(url: String) = link(url, "next") def prevLink(url: String) = link(url, "previous") def links(implicit request: Request[_]): Option[(String, String)] = { this match { case Pagination(Some(prev), Some(next)) => Some(("Link", Seq(prevLink(uri(prev)), nextLink(uri(next))).mkString(", "))) case Pagination(Some(prev), None) => Some(("Link", prevLink(uri(prev)))) case Pagination(None, Some(next)) => Some(("Link", nextLink(uri(next)))) case _ => None } } } object NoPagination extends IPagination case class Pagination(prev: Option[Call], next: Option[Call]) extends IPagination def paginate(items: Iterable[_], limit: Int, offset: Int)(callCapture: (Int, Int) => Call): IPagination = { val prev = if (offset <= 0) None else Some(callCapture(limit, math.max(0, offset - limit))) val next = if (items.isEmpty || items.size < limit) None else Some(callCapture(limit, offset + limit)) (prev, next) match { case (None, None) => NoPagination case _ => Pagination(prev, next) } } } trait JsonController extends Controller with PaginatedController { import scala.language.reflectiveCalls def errorView: {def apply(msgs: String*): JsonFormat.Appendable} override val BadRequest = new Status(Http.Status.BAD_REQUEST) { def apply(messages: String*): Result = Results.BadRequest(errorView(messages: _*)) } override val Ok = new Status(Http.Status.OK) { def apply[T](obj: T)(implicit writes: Writes[T], request: Request[_], pagination: IPagination = NoPagination): Result = { pagination.links.fold(Results.Ok(Json.toJson(obj))) { case links => Results.Ok(Json.toJson(obj)).withHeaders(links) } } } override val Created = new Status(Http.Status.CREATED) { def apply[T](obj: T)(implicit writes: Writes[T], request: Request[_], call: Call): Result = { Results.Created(Json.toJson(obj)).withHeaders(("Location", uri(call))) } } override val NotFound = new Status(Http.Status.NOT_FOUND) { def apply(messages: String*): Result = Results.NotFound(errorView(messages: _*)) } val OkOption = new Status(Http.Status.OK) { def apply[T](opt: Option[T], notFoundMessage: String = "") (implicit writes: Writes[T], request: Request[_], pagination: IPagination = NoPagination): Result = { opt.fold(NotFound(notFoundMessage))(Ok(_)) } } val OkFuture = new Status(Http.Status.OK) { def apply[T](fo: Future[T]) (implicit writes: Writes[T], request: Request[_], pagination: Future[IPagination] = Future.successful(NoPagination)): Future[Result] = { pagination flatMap { implicit p => fo map (Ok(_)) } } } val OkFutureOption = new Status(Http.Status.OK) { def apply[T](fo: Future[Option[T]], notFoundMessage: String = "") (implicit writes: Writes[T], request: Request[_], pagination: Future[IPagination] = Future.successful(NoPagination)): Future[Result] = { pagination flatMap { implicit p => fo map (_.fold(NotFound(notFoundMessage))(Ok(_))) } } } }
gilt/play-json-service-lib
play-2.3/src/main/scala/com/gilt/play/json/controllers/JsonController.scala
Scala
mit
3,808
package org.apache.spark.streaming.talos.perfcounter import org.apache.spark.streaming.talos.perfcounter.CounterType.CounterType import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ import scala.collection.mutable /** * Created by jiasheng on 19/12/2016. */ case class PerfBean( endpoint: String, metric: String, timestamp: Long, step: Int, value: Long, counterType: CounterType, tags: mutable.LinkedHashMap[String, String] ) { def toJsonStr: String = { val json = ("endpoint" -> endpoint) ~ ("metric" -> metric) ~ ("timestamp" -> timestamp) ~ ("step" -> step) ~ ("value" -> value) ~ ("counterType" -> counterType.toString) ~ ("tags" -> tags.map(t => s"${t._1}=${t._2}").mkString(",")) compact(render(json)) } } object CounterType extends Enumeration { type CounterType = Value val COUNTER = Value("COUNTER") val GAUGE = Value("GAUGE") }
XiaoMi/galaxy-sdk-java
galaxy-talos-client/galaxy-talos-spark/src/main/scala/org/apache/spark/streaming/talos/perfcounter/PerfBean.scala
Scala
apache-2.0
929
/* * Copyright (C) 2015 Stratio (http://stratio.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.crossdata.daos import com.stratio.common.utils.components.config.impl.{MapConfigComponent, TypesafeConfigComponent} import com.stratio.common.utils.components.dao.GenericDAOComponent import com.stratio.common.utils.components.logger.impl.SparkLoggerComponent import org.apache.spark.sql.crossdata.daos.DAOConstants._ import org.apache.spark.sql.crossdata.models.EphemeralStatusModel import org.apache.spark.sql.crossdata.serializers.CrossdataSerializer trait EphemeralTableStatusMapDAO extends GenericDAOComponent[EphemeralStatusModel] with MapConfigComponent with SparkLoggerComponent with CrossdataSerializer with PrefixedDAO { override implicit val formats = json4sJacksonFormats override val dao: DAO = new GenericDAO(Option(s"$BaseZKPath/$prefix$EphemeralTableStatusPath")) }
Stratio/crossdata
core/src/main/scala/org/apache/spark/sql/crossdata/daos/EphemeralTableStatusMapDAO.scala
Scala
apache-2.0
1,434
package com.github.jeanadrien.evrythng.scala.rest import com.github.jeanadrien.evrythng.scala.json.EvtJsonProtocol._ import com.github.jeanadrien.evrythng.scala.json.{Collection, Ref, Thng} import com.typesafe.scalalogging.LazyLogging /** * */ class CollectionContext(collectionId : Ref, val apiKey : String, projectScope : Option[Ref] = None) extends Environment with AuthorizedEnvironment with LazyLogging { self => override def defaultQueryParams : Seq[(String, String)] = super .defaultQueryParams ++ projectScope.map("project" -> _.toString) object thngs { def read = self.getPage[Thng](s"/collections/${collectionId}/thngs") def add(thngIds : List[Ref]) : EvtPutRequest[List[Ref], Collection] = self.put[List[Ref], Collection](s"/collections/${collectionId}/thngs", thngIds) def remove(thngId : Ref) = self.delete(s"/collections/${collectionId}/thngs/${thngId}") def removeAll() = self.delete(s"/collections/${collectionId}/thngs") } object collections { def read = self.getPage[Collection](s"/collections/${collectionId}/collections") def add(collectionIds : List[Ref]) = self.postAndForget[List[Ref]](s"/collections/${collectionId}/collections", collectionIds) def remove(toRemoveCollectionId : Ref) = self .delete(s"/collections/${collectionId}/collections/${toRemoveCollectionId}") def removeAll() = self.delete(s"/collections/${collectionId}/collections") } val actions = new ContextWithActions { override def apply( actionType : String ) : ActionContext = new ActionContext(s"/collections/${collectionId}", actionType, apiKey, projectScope) } }
jeanadrien/evrythng-scala-sdk
src/main/scala/com/github/jeanadrien/evrythng/scala/rest/CollectionContext.scala
Scala
apache-2.0
1,751
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sparklinedata.druid.client.test import org.apache.spark.sql.hive.test.sparklinedata.TestHive._ class HLLTest extends QueryExtTest { test("basicHLL", "select approx_count_distinct(city) " + "from zipCodes", 1, true, true, false, Seq(hasHLLAgg _) ) test("basicApproxCardinality", "select approx_count_distinct(zip_code) " + "from zipCodes", 1, true, true, false, Seq(hasCardinalityAgg _) ) test("hllWithFilter", "select approx_count_distinct(city) " + "from zipCodes where state = 'NY'", 1, true, true, false, Seq(hasHLLAgg _) ) test("hllWithJSFilter", "select approx_count_distinct(city) " + "from zipCodes where substring(state,1,1) = 'N'", 1, true, true, false, Seq(hasHLLAgg _) ) test("hllWithJSFilterOnFull", "select approx_count_distinct(city) " + "from zipCodesFull where substring(state,1,1) = 'N'", 1, true, true, false, Seq(hasHLLAgg _) ) test("hllSelect", "select city " + "from zipCodes where substring(state,1,1) = 'N'", 1, true, true, false ) test("spatialSelect", "select latitude, longitude " + "from zipCodes " + "where latitude > 42.5 and longitude is not null " + "limit 10000", 1, true, true, false, Seq(hasSpatialFilter _) ) test("spatialFilOnAgg", "select approx_count_distinct(city) " + "from zipCodes " + "where latitude > 42.5 and longitude is not null ", 1, true, true, false, Seq(hasHLLAgg _, hasSpatialFilter _) ) test("combineSpatialFilter1", "select approx_count_distinct(city) " + "from zipCodes " + "where latitude > 0 and longitude is not null " + "and latitude < 18 and longitude > -80 and longitude < 10", 1, true, true, false, Seq(hasHLLAgg _, hasSpatialFilter _) ) test("combineSpatialFilter2", "select approx_count_distinct(city) " + "from zipCodes " + "where latitude > 0 and longitude is not null " + "and latitude < 18 or (longitude > -80 and longitude < 10)", 1, true, true, false, Seq(hasHLLAgg _, hasSpatialFilter _) ) test("combineSpatialFilter3", "select approx_count_distinct(city) " + "from zipCodes " + "where latitude > 0 and substring(state,1,1) = 'N' " + "and latitude < 18 and (longitude > -80 and longitude < 10)", 1, true, true, false, Seq(hasHLLAgg _, hasSpatialFilter _) ) test("combineSpatialFilter4", "select approx_count_distinct(city) " + "from zipCodes " + "where latitude > 0 and substring(state,1,1) = 'N' " + "and latitude < 18 or (longitude > -80 and longitude < 10)", 1, true, true, false, Seq(hasHLLAgg _, hasSpatialFilter _) ) }
SparklineData/spark-druid-olap
src/test/scala/org/sparklinedata/druid/client/test/HLLTest.scala
Scala
apache-2.0
3,708
import quoted.* class Foo { class Bar def foo()(using Quotes) = { Type[Bar] // error } }
dotty-staging/dotty
tests/neg-macros/i7013b.scala
Scala
apache-2.0
100
package delta.hazelcast import scala.concurrent.duration.DurationInt import com.hazelcast.core.IAtomicLong import scuff.LamportClock import scuff.LamportClock.CASLong import scuff.concurrent.ScuffScalaFuture import delta.EventSource import delta.LamportTicker object AtomicLongLamportTicker { def apply(al: IAtomicLong, es: EventSource[_, _]): LamportTicker = { val cas = new AtomicCAS(al) val lc = new LamportClock(cas) es.maxTick.await(11.seconds).foreach(lc.sync) LamportTicker(lc) } } private class AtomicCAS(al: IAtomicLong) extends CASLong { def value: Long = al.get def compAndSwap(expected: Long, update: Long): Boolean = al.compareAndSet(expected, update) def incrAndGet(): Long = al.incrementAndGet() }
nilskp/delta
delta-hazelcast/src/main/scala/delta/hazelcast/AtomicLongLamportTicker.scala
Scala
mit
744
/*********************************************************************** * Copyright (c) 2013-2020 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.features.kryo.json import com.typesafe.scalalogging.LazyLogging import org.geotools.filter.FunctionExpressionImpl import org.geotools.filter.capability.FunctionNameImpl import org.geotools.filter.capability.FunctionNameImpl.parameter import org.locationtech.geomesa.utils.geotools.{SimpleFeaturePropertyAccessor, SimpleFeatureTypes} import org.opengis.feature.simple.SimpleFeature import org.opengis.filter.expression.VolatileFunction class JsonPathFilterFunction extends FunctionExpressionImpl( new FunctionNameImpl("jsonPath", parameter("value", classOf[String]), parameter("path", classOf[String])) ) with LazyLogging with VolatileFunction { override def evaluate(obj: java.lang.Object): AnyRef = { val sf = try { obj.asInstanceOf[SimpleFeature] } catch { case e: Exception => throw new IllegalArgumentException(s"Expected SimpleFeature, Received ${obj.getClass}. " + s"Only simple features are supported. ${obj.toString}", e) } val path = getExpression(0).evaluate(null).asInstanceOf[String] SimpleFeaturePropertyAccessor.getAccessor(sf, path) match { case Some(a) => a.get(sf, path, classOf[AnyRef]) case None => throw new RuntimeException(s"Can't handle property '$name' for feature type " + s"${sf.getFeatureType.getTypeName} ${SimpleFeatureTypes.encodeType(sf.getFeatureType)}") } } }
aheyne/geomesa
geomesa-features/geomesa-feature-kryo/src/main/scala/org/locationtech/geomesa/features/kryo/json/JsonPathFilterFunction.scala
Scala
apache-2.0
1,877
package org.scalaide.core.internal.builder.zinc import java.util.Optional import sbt.internal.inc.Analysis import xsbti.compile.AnalysisContents import xsbti.compile.CompileAnalysis import xsbti.compile.MiniSetup object AnalysisStore { import xsbti.compile.{ AnalysisStore => SbtAnalysisStore } def materializeLazy(backing: SbtAnalysisStore): SbtAnalysisStore = new SbtAnalysisStore { private def materializeApis(analysis: CompileAnalysis, setup: MiniSetup): AnalysisContents = { if (setup.storeApis()) { val apis = analysis match { case a: Analysis => a.apis } apis.internal.foreach { case (_, v) => v.api } apis.external.foreach { case (_, v) => v.api } } AnalysisContents.create(analysis, setup) } def set(contents: AnalysisContents): Unit = { backing.set(materializeApis(contents.getAnalysis, contents.getMiniSetup)) } def get(): Optional[AnalysisContents] = backing.get() } }
scala-ide/scala-ide
org.scala-ide.sdt.core/src/org/scalaide/core/internal/builder/zinc/AnalysisStore.scala
Scala
bsd-3-clause
957
/* * Copyright (c) 2014-2021 by The Monix Project Developers. * See the project homepage at: https://monix.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.eval import monix.execution.exceptions.DummyException import scala.util.{Failure, Success, Try} object CoevalRunSuite extends BaseTestSuite { def testRun(build: (() => Int) => Coeval[Int]): Unit = { val fa1 = build(() => 10 + 20) val eager1 = fa1.run() assertEquals(eager1, Coeval.Now(30)) assertEquals(eager1.toTry, Success(30)) assertEquals(eager1.toEither, Right(30)) assertEquals(eager1.run(), eager1) assertEquals(eager1.value(), 30) assertEquals(eager1(), 30) assert(eager1.isSuccess, "eager1.isSuccess") assert(!eager1.isError, "!eager1.isError") assertEquals(fa1.runAttempt(), Right(30)) assertEquals(fa1.runTry(), Success(30)) assertEquals(fa1.value(), 30) val dummy = DummyException("dummy") val fa2 = build(() => throw dummy) val eager2 = fa2.run() assertEquals(eager2, Coeval.Error(dummy)) assertEquals(eager2.toTry, Failure(dummy)) assertEquals(eager2.toEither, Left(dummy)) assertEquals(eager2.run(), eager2) intercept[DummyException] { eager2.value(); () } intercept[DummyException] { eager2(); () } assert(!eager2.isSuccess, "!eager2.isSuccess") assert(eager2.isError, "!eager2.isSuccess") assertEquals(fa2.runAttempt(), Left(dummy)) assertEquals(fa2.runTry(), Failure(dummy)) intercept[DummyException] { fa2.value(); () } () } test("Coeval.Always") { _ => testRun(f => Coeval.eval(f())) } test("Coeval.FlatMap") { _ => testRun(f => Coeval.eval(f()).flatMap(Coeval.pure)) } test("Coeval.Once") { _ => testRun(f => Coeval.evalOnce(f())) } test("Coeval.Eager") { _ => testRun(f => Coeval.fromTry(Try(f()))) } test("Eager(f)") { _ => testRun(f => Coeval.Eager(f())) } }
monix/monix
monix-eval/shared/src/test/scala/monix/eval/CoevalRunSuite.scala
Scala
apache-2.0
2,440
package com.twitter.finagle.memcached.stress import com.twitter.finagle.Service import com.twitter.finagle.builder.ClientBuilder import com.twitter.finagle.memcached.integration.InProcessMemcached import com.twitter.finagle.memcached.protocol._ import com.twitter.finagle.memcached.protocol.text.Memcached import com.twitter.finagle.memcached.util.ChannelBufferUtils._ import com.twitter.util.{Await, Time} import java.net.{InetAddress, InetSocketAddress} import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest.{BeforeAndAfter, FunSuite} @RunWith(classOf[JUnitRunner]) class InterpreterServiceTest extends FunSuite with BeforeAndAfter { var server: InProcessMemcached = null var client: Service[Command, Response] = null before { server = new InProcessMemcached(new InetSocketAddress(InetAddress.getLoopbackAddress, 0)) val address = server.start().localAddress client = ClientBuilder() .hosts(address) .codec(new Memcached) .hostConnectionLimit(1) .build() } after { server.stop() } test("set & get") { val _key = "key" val value = "value" val zero = "0" val start = System.currentTimeMillis (0 until 100) map { i => val key = _key + i Await.result(client(Delete(key))) Await.result(client(Set(key, 0, Time.epoch, value))) assert(Await.result(client(Get(Seq(key)))) === Values(Seq(Value(key, value, None, Some(zero))))) } val end = System.currentTimeMillis // println("%d ms".format(end - start)) } }
kristofa/finagle
finagle-memcached/src/test/scala/com/twitter/finagle/memcached/stress/InterpreterServiceTest.scala
Scala
apache-2.0
1,560
package wop package object game { type Point = (Int, Int) }
ognick/wizards-of-portal
core/src/main/scala/wop/game/package.scala
Scala
gpl-3.0
63
package rere.sasl.util import org.scalatest.FlatSpec import org.scalatest.Matchers._ class NoCommaStringTest extends FlatSpec { behavior of "NoCommaString" it should "provide correct equals implementation" in { new NoCommaString("abc") shouldBe new NoCommaString("abc") new NoCommaString("abc").equals("abc") shouldBe false } it should "provide correct hashCode implementation" in { new NoCommaString("abc").hashCode() shouldBe new NoCommaString("abc").hashCode() } it should "return original string as result of toString method" in { new NoCommaString("abc").toString shouldBe "abc" } }
pbaun/rere
modules/sasl/src/test/scala/rere/sasl/util/NoCommaStringTest.scala
Scala
apache-2.0
624
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.utils import scala.collection.mutable.PriorityQueue import java.util.concurrent.{Delayed, ScheduledFuture, TimeUnit} import org.apache.kafka.common.utils.Time /** * A mock scheduler that executes tasks synchronously using a mock time instance. Tasks are executed synchronously when * the time is advanced. This class is meant to be used in conjunction with MockTime. * * Example usage * <code> * val time = new MockTime * time.scheduler.schedule("a task", println("hello world: " + time.milliseconds), delay = 1000) * time.sleep(1001) // this should cause our scheduled task to fire * </code> * * Incrementing the time to the exact next execution time of a task will result in that task executing (it as if execution itself takes no time). */ class MockScheduler(val time: Time) extends Scheduler { /* a priority queue of tasks ordered by next execution time */ private val tasks = new PriorityQueue[MockTask]() def isStarted = true def startup(): Unit = {} def shutdown(): Unit = { this synchronized { tasks.foreach(_.fun()) tasks.clear() } } /** * Check for any tasks that need to execute. Since this is a mock scheduler this check only occurs * when this method is called and the execution happens synchronously in the calling thread. * If you are using the scheduler associated with a MockTime instance this call be triggered automatically. */ def tick(): Unit = { this synchronized { val now = time.milliseconds while(tasks.nonEmpty && tasks.head.nextExecution <= now) { /* pop and execute the task with the lowest next execution time */ val curr = tasks.dequeue curr.fun() /* if the task is periodic, reschedule it and re-enqueue */ if(curr.periodic) { curr.nextExecution += curr.period this.tasks += curr } } } } def schedule(name: String, fun: () => Unit, delay: Long = 0, period: Long = -1, unit: TimeUnit = TimeUnit.MILLISECONDS): ScheduledFuture[Unit] = { var task : MockTask = null this synchronized { task = MockTask(name, fun, time.milliseconds + delay, period = period, time=time) tasks += task tick() } task } def clear(): Unit = { this synchronized { tasks.clear() } } } case class MockTask(name: String, fun: () => Unit, var nextExecution: Long, period: Long, time: Time) extends ScheduledFuture[Unit] { def periodic = period >= 0 def compare(t: MockTask): Int = { if(t.nextExecution == nextExecution) 0 else if (t.nextExecution < nextExecution) -1 else 1 } /** * Not used, so not not fully implemented */ def cancel(mayInterruptIfRunning: Boolean) : Boolean = { false } def get(): Unit = { } def get(timeout: Long, unit: TimeUnit): Unit = { } def isCancelled: Boolean = { false } def isDone: Boolean = { false } def getDelay(unit: TimeUnit): Long = { this synchronized { time.milliseconds - nextExecution } } def compareTo(o : Delayed) : Int = { this.getDelay(TimeUnit.MILLISECONDS).compareTo(o.getDelay(TimeUnit.MILLISECONDS)) } } object MockTask { implicit def MockTaskOrdering : Ordering[MockTask] = new Ordering[MockTask] { def compare(x: MockTask, y: MockTask): Int = { x.compare(y) } } }
noslowerdna/kafka
core/src/test/scala/unit/kafka/utils/MockScheduler.scala
Scala
apache-2.0
4,205
package notebook.io import scala.concurrent.Future import scala.util.{Failure, Success, Try} object FutureUtil { // remove when scala.version >= 2.11 def tryToFuture[T](t:Try[T]):Future[T] = t match { case Success(s) => Future.successful(s) case Failure(f) => Future.failed(f) } implicit class TryToFutureConverter[T](t: Try[T]) { def toFuture: Future[T] = { tryToFuture(t) } } }
andypetrella/spark-notebook
modules/core/src/main/scala/notebook/io/FutureUtil.scala
Scala
apache-2.0
417
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.analysis import org.apache.spark.SparkFunSuite import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.dsl.plans._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.aggregate._ import org.apache.spark.sql.catalyst.plans.logical.LocalRelation import org.apache.spark.sql.types._ class ExpressionTypeCheckingSuite extends SparkFunSuite { val testRelation = LocalRelation( Symbol("intField").int, Symbol("stringField").string, Symbol("booleanField").boolean, Symbol("decimalField").decimal(8, 0), Symbol("arrayField").array(StringType), Symbol("mapField").map(StringType, LongType)) def assertError(expr: Expression, errorMessage: String): Unit = { val e = intercept[AnalysisException] { assertSuccess(expr) } assert(e.getMessage.contains( s"cannot resolve '${expr.sql}' due to data type mismatch:")) assert(e.getMessage.contains(errorMessage)) } def assertSuccess(expr: Expression): Unit = { val analyzed = testRelation.select(expr.as("c")).analyze SimpleAnalyzer.checkAnalysis(analyzed) } def assertErrorForDifferingTypes(expr: Expression): Unit = { assertError(expr, s"differing types in '${expr.sql}'") } test("check types for unary arithmetic") { assertError(BitwiseNot(Symbol("stringField")), "requires integral type") } test("check types for binary arithmetic") { // We will cast String to Double for binary arithmetic assertSuccess(Add(Symbol("intField"), Symbol("stringField"))) assertSuccess(Subtract(Symbol("intField"), Symbol("stringField"))) assertSuccess(Multiply(Symbol("intField"), Symbol("stringField"))) assertSuccess(Divide(Symbol("intField"), Symbol("stringField"))) assertSuccess(Remainder(Symbol("intField"), Symbol("stringField"))) // checkAnalysis(BitwiseAnd(Symbol("intField"), Symbol("stringField"))) assertErrorForDifferingTypes(Add(Symbol("intField"), Symbol("booleanField"))) assertErrorForDifferingTypes(Subtract(Symbol("intField"), Symbol("booleanField"))) assertErrorForDifferingTypes(Multiply(Symbol("intField"), Symbol("booleanField"))) assertErrorForDifferingTypes(Divide(Symbol("intField"), Symbol("booleanField"))) assertErrorForDifferingTypes(Remainder(Symbol("intField"), Symbol("booleanField"))) assertErrorForDifferingTypes(BitwiseAnd(Symbol("intField"), Symbol("booleanField"))) assertErrorForDifferingTypes(BitwiseOr(Symbol("intField"), Symbol("booleanField"))) assertErrorForDifferingTypes(BitwiseXor(Symbol("intField"), Symbol("booleanField"))) assertError(Add(Symbol("booleanField"), Symbol("booleanField")), "requires (numeric or interval or interval day to second or interval year to month) type") assertError(Subtract(Symbol("booleanField"), Symbol("booleanField")), "requires (numeric or interval or interval day to second or interval year to month) type") assertError(Multiply(Symbol("booleanField"), Symbol("booleanField")), "requires numeric type") assertError(Divide(Symbol("booleanField"), Symbol("booleanField")), "requires (double or decimal) type") assertError(Remainder(Symbol("booleanField"), Symbol("booleanField")), "requires numeric type") assertError(BitwiseAnd(Symbol("booleanField"), Symbol("booleanField")), "requires integral type") assertError(BitwiseOr(Symbol("booleanField"), Symbol("booleanField")), "requires integral type") assertError(BitwiseXor(Symbol("booleanField"), Symbol("booleanField")), "requires integral type") } test("check types for predicates") { // We will cast String to Double for binary comparison assertSuccess(EqualTo(Symbol("intField"), Symbol("stringField"))) assertSuccess(EqualNullSafe(Symbol("intField"), Symbol("stringField"))) assertSuccess(LessThan(Symbol("intField"), Symbol("stringField"))) assertSuccess(LessThanOrEqual(Symbol("intField"), Symbol("stringField"))) assertSuccess(GreaterThan(Symbol("intField"), Symbol("stringField"))) assertSuccess(GreaterThanOrEqual(Symbol("intField"), Symbol("stringField"))) // We will transform EqualTo with numeric and boolean types to CaseKeyWhen assertSuccess(EqualTo(Symbol("intField"), Symbol("booleanField"))) assertSuccess(EqualNullSafe(Symbol("intField"), Symbol("booleanField"))) assertErrorForDifferingTypes(EqualTo(Symbol("intField"), Symbol("mapField"))) assertErrorForDifferingTypes(EqualNullSafe(Symbol("intField"), Symbol("mapField"))) assertErrorForDifferingTypes(LessThan(Symbol("intField"), Symbol("booleanField"))) assertErrorForDifferingTypes(LessThanOrEqual(Symbol("intField"), Symbol("booleanField"))) assertErrorForDifferingTypes(GreaterThan(Symbol("intField"), Symbol("booleanField"))) assertErrorForDifferingTypes(GreaterThanOrEqual(Symbol("intField"), Symbol("booleanField"))) assertError(EqualTo(Symbol("mapField"), Symbol("mapField")), "EqualTo does not support ordering on type map") assertError(EqualNullSafe(Symbol("mapField"), Symbol("mapField")), "EqualNullSafe does not support ordering on type map") assertError(LessThan(Symbol("mapField"), Symbol("mapField")), "LessThan does not support ordering on type map") assertError(LessThanOrEqual(Symbol("mapField"), Symbol("mapField")), "LessThanOrEqual does not support ordering on type map") assertError(GreaterThan(Symbol("mapField"), Symbol("mapField")), "GreaterThan does not support ordering on type map") assertError(GreaterThanOrEqual(Symbol("mapField"), Symbol("mapField")), "GreaterThanOrEqual does not support ordering on type map") assertError(If(Symbol("intField"), Symbol("stringField"), Symbol("stringField")), "type of predicate expression in If should be boolean") assertErrorForDifferingTypes( If(Symbol("booleanField"), Symbol("intField"), Symbol("booleanField"))) assertError( CaseWhen(Seq((Symbol("booleanField").attr, Symbol("intField").attr), (Symbol("booleanField").attr, Symbol("mapField").attr))), "THEN and ELSE expressions should all be same type or coercible to a common type") assertError( CaseKeyWhen(Symbol("intField"), Seq(Symbol("intField"), Symbol("stringField"), Symbol("intField"), Symbol("mapField"))), "THEN and ELSE expressions should all be same type or coercible to a common type") assertError( CaseWhen(Seq((Symbol("booleanField").attr, Symbol("intField").attr), (Symbol("intField").attr, Symbol("intField").attr))), "WHEN expressions in CaseWhen should all be boolean type") } test("check types for aggregates") { // We use AggregateFunction directly at here because the error will be thrown from it // instead of from AggregateExpression, which is the wrapper of an AggregateFunction. // We will cast String to Double for sum and average assertSuccess(Sum(Symbol("stringField"))) assertSuccess(Average(Symbol("stringField"))) assertSuccess(Min(Symbol("arrayField"))) assertSuccess(new BoolAnd(Symbol("booleanField"))) assertSuccess(new BoolOr(Symbol("booleanField"))) assertError(Min(Symbol("mapField")), "min does not support ordering on type") assertError(Max(Symbol("mapField")), "max does not support ordering on type") assertError(Sum(Symbol("booleanField")), "function sum requires numeric or interval types") assertError(Average(Symbol("booleanField")), "function average requires numeric or interval types") } test("check types for others") { assertError(CreateArray(Seq(Symbol("intField"), Symbol("booleanField"))), "input to function array should all be the same type") assertError(Coalesce(Seq(Symbol("intField"), Symbol("booleanField"))), "input to function coalesce should all be the same type") assertError(Coalesce(Nil), "function coalesce requires at least one argument") assertError(new Murmur3Hash(Nil), "function hash requires at least one argument") assertError(new XxHash64(Nil), "function xxhash64 requires at least one argument") assertError(Explode(Symbol("intField")), "input to function explode should be array or map type") assertError(PosExplode(Symbol("intField")), "input to function explode should be array or map type") } test("check types for CreateNamedStruct") { assertError( CreateNamedStruct(Seq("a", "b", 2.0)), "even number of arguments") assertError( CreateNamedStruct(Seq(1, "a", "b", 2.0)), "Only foldable string expressions are allowed to appear at odd position") assertError( CreateNamedStruct(Seq(Symbol("a").string.at(0), "a", "b", 2.0)), "Only foldable string expressions are allowed to appear at odd position") assertError( CreateNamedStruct(Seq(Literal.create(null, StringType), "a")), "Field name should not be null") } test("check types for CreateMap") { assertError(CreateMap(Seq("a", "b", 2.0)), "even number of arguments") assertError( CreateMap(Seq(Symbol("intField"), Symbol("stringField"), Symbol("booleanField"), Symbol("stringField"))), "keys of function map should all be the same type") assertError( CreateMap(Seq(Symbol("stringField"), Symbol("intField"), Symbol("stringField"), Symbol("booleanField"))), "values of function map should all be the same type") } test("check types for ROUND/BROUND") { assertSuccess(Round(Literal(null), Literal(null))) assertSuccess(Round(Symbol("intField"), Literal(1))) assertError(Round(Symbol("intField"), Symbol("intField")), "Only foldable Expression is allowed") assertError(Round(Symbol("intField"), Symbol("booleanField")), "requires int type") assertError(Round(Symbol("intField"), Symbol("mapField")), "requires int type") assertError(Round(Symbol("booleanField"), Symbol("intField")), "requires numeric type") assertSuccess(BRound(Literal(null), Literal(null))) assertSuccess(BRound(Symbol("intField"), Literal(1))) assertError(BRound(Symbol("intField"), Symbol("intField")), "Only foldable Expression is allowed") assertError(BRound(Symbol("intField"), Symbol("booleanField")), "requires int type") assertError(BRound(Symbol("intField"), Symbol("mapField")), "requires int type") assertError(BRound(Symbol("booleanField"), Symbol("intField")), "requires numeric type") } test("check types for Greatest/Least") { for (operator <- Seq[(Seq[Expression] => Expression)](Greatest, Least)) { assertError(operator(Seq(Symbol("booleanField"))), "requires at least two arguments") assertError(operator(Seq(Symbol("intField"), Symbol("stringField"))), "should all have the same type") assertError(operator(Seq(Symbol("mapField"), Symbol("mapField"))), "does not support ordering") } } }
wangmiao1981/spark
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
Scala
apache-2.0
11,827
package com.github.symcal.dp import scala.annotation.tailrec import scala.reflect.ClassTag import spire.syntax.cfor._ /** Potentially very large ( > 2^31) array with fractal structure. * New memory allocations are limited to arrays of fixed base length. **/ final class Massif[@specialized(Int, Long) T: ClassTag](init_length: Long, fill_value: T) { private[dp] var container_massif: ContainerMassif[T] = new BaseMassif(math.min(init_length, Massif.base_array_length).toInt, fill_value) def apply(i: Long): T = container_massif.apply(i) def update(i: Long, new_value: T): Unit = container_massif.update(i, new_value) def length: Long = container_massif.length def fill(x: T): Unit = container_massif.fill(x) @tailrec def realloc(new_size: Long, fill_value: T): Unit = { if (new_size > container_massif.max_capacity) { // Need to upgrade our level by create an upper-level `OverMassif`. container_massif.realloc(container_massif.max_capacity, fill_value) // Make sure it is full before inserting it into an upper-level `OverMassif`. val new_scale = container_massif.max_capacity val new_container: OverMassif[T] = new OverMassif(new_scale, fill_value) new_container.set_first(container_massif) container_massif = new_container realloc(new_size, fill_value) // We may need to repeat this process. } else if (new_size < container_massif.max_capacity / Massif.branch_array_length) { // We may need to downgrade our level. if (length > Massif.base_array_length) { // We have an `OverMassif`. Downgrade one level and realloc recursively. container_massif = container_massif.get_first realloc(new_size, fill_value) } else { // We have a `BaseMassif`. container_massif.realloc(new_size, fill_value) } } else { // We do not need to upgrade or downgrade our level. Let the container handle the rest of the realloc. container_massif.realloc(new_size, fill_value) } } realloc(init_length, fill_value) } private[dp] sealed trait ContainerMassif[@specialized(Int, Long) T] { def apply(i: Long): T def update(i: Long, new_value: T): Unit def length: Long def max_capacity: Long private[dp] def realloc(new_size: Long, fill_value: T): Unit def fill(x: T): Unit private[dp] def get_first: ContainerMassif[T] } private[dp] final class BaseMassif[@specialized(Int, Long) T: ClassTag](init_length: Int, fill_value: T) extends ContainerMassif[T] { def apply(i: Long): T = mantissa.apply(i.toInt) override def update(i: Long, new_value: T): Unit = mantissa.update(i.toInt, new_value) /** The length of the `mantissa` array that is currently being used. * */ private var used_length: Int = init_length private val mantissa: Array[T] = new Array(Massif.base_array_length) // It seems that writing `Array[T]` here will introduce boxing. override def max_capacity: Long = Massif.base_array_length override def length: Long = used_length private[dp] override def realloc(new_size: Long, fill_value: T): Unit = { val effective_size = math.min(new_size, max_capacity).toInt // Fill with zeros if we are increasing the length. if (effective_size > used_length) { fill(fill_value, used_length, effective_size - used_length) } used_length = effective_size } def fill(x: T, init_offset: Int, count: Int): Unit = cfor(init_offset)(_ < init_offset + count, _ + 1) { i ⇒ mantissa.update(i, x) } override def fill(x: T): Unit = fill(x, 0, used_length) private[dp] override def get_first: ContainerMassif[T] = this fill(fill_value) // This is perhaps unnecessary, but for some reason, heap space is exceeded if this is not done! } private[dp] final class OverMassif[@specialized(Int, Long) T: ClassTag](scale: Long, fill_value: T) extends ContainerMassif[T] { def apply(i: Long): T = mantissa.apply((i / scale).toInt).apply(i % scale) /** The length of the `mantissa` array that is currently being used. * */ private var used_length: Int = 0 // Need to investigate: This line, while unused, causes an IllegalAccessError. Can a specialized class have `val`s? // val total_capacity = scale * Massif.branch_array_length override def max_capacity: Long = scale * Massif.branch_array_length private val mantissa: Array[ContainerMassif[T]] = new Array(Massif.branch_array_length) override def length: Long = (used_length - 1) * scale + mantissa(used_length - 1).length override def update(i: Long, new_value: T): Unit = mantissa.apply((i / scale).toInt).update(i % scale, new_value) private[dp] def set_first(m: ContainerMassif[T]): Unit = { mantissa.update(0, m) used_length = 1 } override def fill(x: T): Unit = cfor(0)(_ < used_length, _ + 1) { i ⇒ mantissa(i).fill(x) } private val new_subarray: () ⇒ ContainerMassif[T] = { if (scale > Massif.base_array_length) () ⇒ new OverMassif(scale / Massif.branch_array_length, fill_value) else () ⇒ new BaseMassif(Massif.base_array_length, fill_value) } private[dp] override def realloc(new_size: Long, fill_value: T): Unit = { val new_used_length = ((new_size + scale - 1) / scale).toInt val elements_differ = new_used_length - used_length if (elements_differ > 0) { // We need to allocate more elements of `mantissa`. cfor(used_length)(_ < new_used_length, _ + 1) { i ⇒ mantissa(i) = new_subarray() if (i < new_used_length - 1) mantissa(i).realloc(scale, fill_value) // Make them full until last-but-one. } } else if (elements_differ < 0) { // We need to use fewer elements of `mantissa`. cfor(new_used_length)(_ < used_length, _ + 1) { i ⇒ mantissa(i) = null } } else { // We need to realloc just one element of `mantissa`. // Nothing to do here since `used_length` remains unchanged. } used_length = new_used_length // At this time, `used_length` has been already updated and points to the last used element after all reallocations. mantissa(used_length - 1).realloc(new_size - scale * (new_used_length - 1), fill_value) } private[dp] override def get_first: ContainerMassif[T] = mantissa(0) } object Massif { final val base_array_length: Int = 1 << 12 final val branch_array_length: Int = 1 << 11 }
winitzki/dpnum
dpint/src/main/scala/com/github/symcal/dp/Massif.scala
Scala
apache-2.0
6,364
package mesosphere.marathon.upgrade import akka.actor.{ ActorRef, ActorSystem, Props } import akka.event.EventStream import akka.pattern.ask import akka.testkit.TestActor.{ AutoPilot, NoAutoPilot } import akka.testkit.{ TestActorRef, TestKit, TestProbe } import akka.util.Timeout import mesosphere.marathon.io.storage.StorageProvider import mesosphere.marathon.state.PathId._ import mesosphere.marathon.state.{ AppDefinition, AppRepository, Group, MarathonStore } import mesosphere.marathon.tasks.{ TaskQueue, TaskTracker } import mesosphere.marathon.upgrade.DeploymentActor.Cancel import mesosphere.marathon.upgrade.DeploymentManager.{ CancelDeployment, PerformDeployment } import mesosphere.marathon.{ MarathonConf, SchedulerActions } import org.apache.mesos.SchedulerDriver import org.apache.mesos.state.InMemoryState import org.scalatest.mock.MockitoSugar import org.scalatest.{ BeforeAndAfter, BeforeAndAfterAll, FunSuiteLike, Matchers } import scala.concurrent.Await import scala.concurrent.duration._ class DeploymentManagerTest extends TestKit(ActorSystem("System")) with FunSuiteLike with Matchers with BeforeAndAfter with BeforeAndAfterAll with MockitoSugar { override protected def afterAll(): Unit = { super.afterAll() system.shutdown() } var driver: SchedulerDriver = _ var eventBus: EventStream = _ var taskQueue: TaskQueue = _ var config: MarathonConf = _ var taskTracker: TaskTracker = _ var scheduler: SchedulerActions = _ var appRepo: AppRepository = _ var storage: StorageProvider = _ before { driver = mock[SchedulerDriver] eventBus = mock[EventStream] taskQueue = mock[TaskQueue] config = mock[MarathonConf] taskTracker = new TaskTracker(new InMemoryState, config) scheduler = mock[SchedulerActions] storage = mock[StorageProvider] appRepo = new AppRepository(new MarathonStore[AppDefinition](new InMemoryState, () => AppDefinition())) } test("deploy") { val manager = TestActorRef[DeploymentManager](Props(classOf[DeploymentManager], appRepo, taskTracker, taskQueue, scheduler, storage, eventBus)) val app = AppDefinition("app".toRootPath) val oldGroup = Group("/".toRootPath) val newGroup = Group("/".toRootPath, Set(app)) val plan = DeploymentPlan(oldGroup, newGroup) manager ! PerformDeployment(driver, plan) awaitCond( manager.underlyingActor.runningDeployments.contains(plan.id), 5.seconds ) } test("StopActor") { val manager = TestActorRef[DeploymentManager](Props(classOf[DeploymentManager], appRepo, taskTracker, taskQueue, scheduler, storage, eventBus)) val probe = TestProbe() probe.setAutoPilot(new AutoPilot { override def run(sender: ActorRef, msg: Any): AutoPilot = msg match { case Cancel(_) => system.stop(probe.ref) NoAutoPilot } }) val ex = new Exception val res = manager.underlyingActor.stopActor(probe.ref, ex) Await.result(res, 5.seconds) should be(true) } test("Cancel deployment") { val manager = TestActorRef[DeploymentManager](Props(classOf[DeploymentManager], appRepo, taskTracker, taskQueue, scheduler, storage, eventBus)) implicit val timeout = Timeout(1.minute) val app = AppDefinition("app".toRootPath) val oldGroup = Group("/".toRootPath) val newGroup = Group("/".toRootPath, Set(app)) val plan = DeploymentPlan(oldGroup, newGroup) val res = manager ? PerformDeployment(driver, plan) manager ! CancelDeployment(plan.id, new Exception) intercept[Exception] { Await.result(res, 5.seconds) } } }
tnachen/marathon
src/test/scala/mesosphere/marathon/upgrade/DeploymentManagerTest.scala
Scala
apache-2.0
3,627
package cats.examples.typeclasses.semigroupsandmonads import cats._ import cats.implicits._ /** * Monoid extends Semigroup, adding an empty method, that returns a value that * can be combined with other values without modifying the other value. * * For example Monoid[String] may define empty as "", Monoid[Int] 0 etc... * * See http://typelevel.org/cats/typeclasses/monoid.html */ object MonoidExample extends App { // Examples of empty values assert(Monoid[String].empty == "") assert(Monoid[Int].empty == 0) assert(Monoid[Option[Int]].empty == None) // The empty value provides a default value allowing us to combine elements // of a collection that could be empty. If the collection is empty then we // can use the empty value as a fallback rather. assert(Monoid[String].combineAll(List("a", "b", "c")) == "abc") assert(Monoid[String].combineAll(List.empty[String]) == "") // Again, the advantage here is composition. Monoids can be composed to // operate on more complex types. assert { val combined = Monoid[Map[String,Int]].combineAll(List( Map("a" -> 1, "b" -> 2), Map("a" -> 3) )) combined == Map("a" -> 4, "b" -> 2) } assert(Monoid[Map[String,Int]].combineAll(List.empty) == Map.empty) // Monoids can also be used to combine values of a given type, where a // monoid exists for the type, for example by using foldMap from Foldable, // which combines results using the monoid available for the type being // mapped over. val l = List(1,2,3,4,5) assert(l.foldMap(identity) == 15) assert(l.foldMap(i => i.toString) == "12345") // Cats also provides support for a mapping function that returns a tuple. assert(l.foldMap(i => (i, i.toString)) == (15, "12345")) }
carwynellis/cats-examples
src/main/scala/cats/examples/typeclasses/semigroupsandmonads/MonoidExample.scala
Scala
mit
1,761
package dotty.tools package dotc package printing import core._ import Texts._, ast.Trees._ import Types.{Type, SingletonType}, Symbols.Symbol, Scopes.Scope, Constants.Constant, Names.Name, Denotations._, Annotations.Annotation import typer.Implicits.SearchResult import util.SourcePosition import typer.ImportInfo import scala.annotation.internal.sharable /** The base class of all printers */ abstract class Printer { private var prec: Precedence = GlobalPrec /** The current precedence level. * When pretty-printing arguments of operator `op`, `currentPrecedence` must equal `op`'s precedence level, * so that pretty-printing expressions using lower-precedence operators can insert parentheses automatically * by calling `changePrec`. */ def currentPrecedence: Precedence = prec /** Generate text using `op`, assuming a given precedence level `prec`. * * ### `atPrec` vs `changePrec` * * This is to be used when changing precedence inside some sort of parentheses: * for instance, to print T[A]` use * `toText(T) ~ '[' ~ atPrec(GlobalPrec) { toText(A) } ~ ']'`. * * If the presence of the parentheses depends on precedence, inserting them manually is most certainly a bug. * Use `changePrec` instead to generate them exactly when needed. */ def atPrec(prec: Precedence)(op: => Text): Text = { val outerPrec = this.prec this.prec = prec try op finally this.prec = outerPrec } /** Generate text using `op`, assuming a given precedence level `prec`. * If new level `prec` is lower than previous level, put text in parentheses. * * ### `atPrec` vs `changePrec` * * To pretty-print `A op B`, you need something like * `changePrec(parsing.precedence(op, isType)) { toText(a) ~ op ~ toText(b) }` // BUGGY * that will insert parentheses around `A op B` if, for instance, the * preceding operator has higher precedence. * * But that does not handle infix operators with left- or right- associativity. * * If op and op' have the same precedence and associativity, * A op B op' C parses as (A op B) op' C if op and op' are left-associative, and as * A op (B op' C) if they're right-associative, so we need respectively * ```scala * val isType = ??? // is this a term or type operator? * val prec = parsing.precedence(op, isType) * // either: * changePrec(prec) { toText(a) ~ op ~ atPrec(prec + 1) { toText(b) } } // for left-associative op and op' * // or: * changePrec(prec) { atPrec(prec + 1) { toText(a) } ~ op ~ toText(b) } // for right-associative op and op' * ``` */ def changePrec(prec: Precedence)(op: => Text): Text = if (prec < this.prec) atPrec(prec) ("(" ~ op ~ ")") else atPrec(prec)(op) /** The name, possibly with with namespace suffix if debugNames is set: * /L for local names, /V for other term names, /T for type names */ def nameString(name: Name): String /** The name of the given symbol. * If !settings.debug, the original name where * expansions of operators are translated back to operator symbol. * E.g. $eq => =. * If settings.uniqid, adds id. */ def nameString(sym: Symbol): String /** The fully qualified name of the symbol */ def fullNameString(sym: Symbol): String /** The kind of the symbol */ def kindString(sym: Symbol): String /** The name as a text */ def toText(name: Name): Text /** Textual representation, including symbol's kind e.g., "class Foo", "method Bar". * If hasMeaninglessName is true, uses the owner's name to disambiguate identity. */ def toText(sym: Symbol): Text /** Textual representation of singeton type reference */ def toTextRef(tp: SingletonType): Text /** Textual representation of symbol's declaration */ def dclText(sym: Symbol): Text /** Textual representation of single denotation's declaration */ def dclText(sd: SingleDenotation): Text /** If symbol's owner is a printable class C, the text "in C", otherwise "" */ def locationText(sym: Symbol): Text /** Textual representation of symbol and its location */ def locatedText(sym: Symbol): Text /** A description of sym's location */ def extendedLocationText(sym: Symbol): Text /** Textual representation of denotation */ def toText(denot: Denotation): Text /** Textual representation of constant */ def toText(const: Constant): Text /** Textual representation of annotation */ def toText(annot: Annotation): Text /** Textual representation of type */ def toText(tp: Type): Text /** Textual representation of all symbols in given list, * using `dclText` for displaying each. */ def dclsText(syms: List[Symbol], sep: String = "\\n"): Text /** Textual representation of all definitions in a scope using `dclText` for each */ def toText(sc: Scope): Text /** Textual representation of tree */ def toText[T >: Untyped](tree: Tree[T]): Text /** Textual representation of source position */ def toText(pos: SourcePosition): Text /** Textual representation of implicit search result */ def toText(result: SearchResult): Text /** Textual representation of info relating to an import clause */ def toText(result: ImportInfo): Text /** Render element within highest precedence */ def toTextLocal(elem: Showable): Text = atPrec(DotPrec) { elem.toText(this) } /** Render element within lowest precedence */ def toTextGlobal(elem: Showable): Text = atPrec(GlobalPrec) { elem.toText(this) } /** Render elements alternating with `sep` string */ def toText(elems: Traversable[Showable], sep: String): Text = Text(elems map (_ toText this), sep) /** Render elements within highest precedence */ def toTextLocal(elems: Traversable[Showable], sep: String): Text = atPrec(DotPrec) { toText(elems, sep) } /** Render elements within lowest precedence */ def toTextGlobal(elems: Traversable[Showable], sep: String): Text = atPrec(GlobalPrec) { toText(elems, sep) } /** A plain printer without any embellishments */ def plain: Printer } object Printer { /** Debug hook; set to true if you want to see unique ids but cannot run with option * -uniqid. A typical use case is for further exploration after a -Ytest-pickler failure. */ @sharable var debugPrintUnique: Boolean = false }
som-snytt/dotty
compiler/src/dotty/tools/dotc/printing/Printer.scala
Scala
apache-2.0
6,361
package scala_sample case class Color(red: Int, green: Int, blue: Int) object SimpleSample extends App { val red = Color(255, 0, 0) val green = Color(0, 255, 0) val blue = Color(0, 0, 255) println(s"red = $red, green = $green, blue = $blue") }
j5ik2o/scala-first-study
src/main/scala/scala_sample/SimpleSample.scala
Scala
mit
257
package org.jetbrains.plugins.scala package codeInsight.intentions import com.intellij.codeInsight.intention.IntentionAction import org.junit.Assert import java.util import extensions._ import scala.Some import com.intellij.psi.codeStyle.CodeStyleManager import base.ScalaLightCodeInsightFixtureTestAdapter import scala.collection.JavaConversions._ /** * @author Ksenia.Sautina * @since 4/11/12 */ abstract class ScalaIntentionTestBase extends ScalaLightCodeInsightFixtureTestAdapter { def familyName: String def doTest(text: String, resultText: String, familyName: String = this.familyName) { intentionByFamilyName(text, familyName) match { case Some(action) => startCommand(getProject, "Test Intention") { action.invoke(myFixture.getProject, myFixture.getEditor, myFixture.getFile) } case None => Assert.fail("Intention is not found") } startCommand(getProject, "Test Intention Formatting") { CodeStyleManager.getInstance(getProject).reformat(myFixture.getFile) myFixture.checkResult(groom(resultText)) } } def checkIntentionIsNotAvailable(text: String, familyName: String = this.familyName) { assert(intentionByFamilyName(text, familyName).isEmpty, "Intention is found") } def checkIntentionIsAvailable(text: String, familyName: String = this.familyName) { assert(intentionByFamilyName(text, familyName).isDefined, "Intention is not found") } def intentionByFamilyName(text: String, familyName: String): Option[IntentionAction] = { myFixture.configureByText(ScalaFileType.SCALA_FILE_TYPE, groom(text)) val intentions: util.List[IntentionAction] = myFixture.getAvailableIntentions intentions.find(action => action.getFamilyName == familyName) } protected def groom(text: String) = text.stripMargin.replace("\\r", "").trim }
consulo/consulo-scala
test/org/jetbrains/plugins/scala/codeInsight/intentions/ScalaIntentionTestBase.scala
Scala
apache-2.0
1,843
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.container import java.util import org.apache.samza.diagnostics.DiagnosticsExceptionEvent import org.apache.samza.metrics.{Gauge, ReadableMetricsRegistry, MetricsRegistryMap, MetricsHelper} class SamzaContainerMetrics( val source: String = "unknown", val registry: ReadableMetricsRegistry = new MetricsRegistryMap) extends MetricsHelper { val commits = newCounter("commit-calls") val windows = newCounter("window-calls") val timers = newCounter("timer-calls") val processes = newCounter("process-calls") val envelopes = newCounter("process-envelopes") val nullEnvelopes = newCounter("process-null-envelopes") val chooseNs = newTimer("choose-ns") val windowNs = newTimer("window-ns") val timerNs = newTimer("timer-ns") val processNs = newTimer("process-ns") val commitNs = newTimer("commit-ns") val blockNs = newTimer("block-ns") val containerStartupTime = newTimer("container-startup-time") val utilization = newGauge("event-loop-utilization", 0.0F) val diskUsageBytes = newGauge("disk-usage-bytes", 0L) val diskQuotaBytes = newGauge("disk-quota-bytes", Long.MaxValue) val executorWorkFactor = newGauge("executor-work-factor", 1.0) val physicalMemoryMb = newGauge[Double]("physical-memory-mb", 0.0F) val taskStoreRestorationMetrics: util.Map[TaskName, Gauge[Long]] = new util.HashMap[TaskName, Gauge[Long]]() val exceptions = newListGauge[DiagnosticsExceptionEvent]("exceptions") def addStoreRestorationGauge(taskName: TaskName, storeName: String) { taskStoreRestorationMetrics.put(taskName, newGauge("%s-%s-restore-time" format(taskName.toString, storeName), -1L)) } }
bharathkk/samza
samza-core/src/main/scala/org/apache/samza/container/SamzaContainerMetrics.scala
Scala
apache-2.0
2,463
/* * Copyright (c) 2016 BBC Design and Engineering * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package bbc.schedulerplus.system import bbc.schedulerplus.persistence.{Cache, RedisCache} /** * Connects the RedisCache implementation together with the JobManagerEngine to implement the JobManager interface */ object RedisJobManager extends JobManager with JobManagerEngine { val cache: Cache = RedisCache }
bbc/scheduler-plus
src/main/scala/bbc/schedulerplus/system/RedisJobManager.scala
Scala
mit
1,449
/* ************************************************************************************* * Copyright 2011 Normation SAS ************************************************************************************* * * This file is part of Rudder. * * Rudder is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * In accordance with the terms of section 7 (7. Additional Terms.) of * the GNU General Public License version 3, the copyright holders add * the following Additional permissions: * Notwithstanding to the terms of section 5 (5. Conveying Modified Source * Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General * Public License version 3, when you create a Related Module, this * Related Module is not considered as a part of the work and may be * distributed under the license agreement of your choice. * A "Related Module" means a set of sources files including their * documentation that, without modification of the Source Code, enables * supplementary functions or services in addition to those offered by * the Software. * * Rudder is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Rudder. If not, see <http://www.gnu.org/licenses/>. * ************************************************************************************* */ package com.normation.rudder.domain import com.normation.eventlog.EventActor package object eventlog { val RudderEventActor = EventActor("rudder") }
armeniaca/rudder
rudder-core/src/main/scala/com/normation/rudder/domain/eventlog/Package.scala
Scala
gpl-3.0
1,846
package mesosphere.marathon package integration import mesosphere.AkkaIntegrationTest import mesosphere.marathon.integration.facades.ITEnrichedTask import mesosphere.marathon.integration.facades.MarathonFacade._ import mesosphere.marathon.integration.facades.MesosFacade.{ ITMesosState, ITResources } import mesosphere.marathon.integration.setup.{ EmbeddedMarathonTest, RestResult } import mesosphere.marathon.raml.{ App, AppPersistentVolume, AppResidency, AppUpdate, AppVolume, Container, EngineType, PersistentVolume, PortDefinition, ReadMode, UnreachableDisabled, UpgradeStrategy } import mesosphere.marathon.state.PathId import org.slf4j.LoggerFactory import scala.collection.immutable.Seq import scala.concurrent.duration._ import scala.util.Try @IntegrationTest class ResidentTaskIntegrationTest extends AkkaIntegrationTest with EmbeddedMarathonTest { import Fixture._ private[this] val log = LoggerFactory.getLogger(getClass) //clean up state before running the test case before(cleanUp()) // Any test in this suite that restarts an existing task can fail because of: https://issues.apache.org/jira/browse/MESOS-7752 // TL;DR: we are reusing taskIds for resident task, which triggers a race condition in mesos by reusing the executor of the // previous task. Though reusing taskIds is discouraged it should be possible for tasks after a terminal task status. // Solution: either mesos fixes the bug or we walk away from reusing taskIds which is somewhat non-trivial on our side. "ResidentTaskIntegrationTest" should { "resident task can be deployed and write to persistent volume" in new Fixture { Given("An app that writes into a persistent volume") val containerPath = "persistent-volume" val app = residentApp( id = appId("resident-task-can-be-deployed-and-write-to-persistent-volume"), containerPath = containerPath, cmd = s"""echo "data" > $containerPath/data""") When("A task is launched") val result = createAsynchronously(app) Then("It writes successfully to the persistent volume and finishes") waitForStatusUpdates(StatusUpdate.TASK_RUNNING) waitForDeployment(result) waitForStatusUpdates(StatusUpdate.TASK_FINISHED) } "resident task can be deployed along with constraints" in new Fixture { // background: Reserved tasks may not be considered while making sure constraints are met, because they // would prevent launching a task because there `is` already a task (although not launched) Given("A resident app that uses a hostname:UNIQUE constraints") val containerPath = "persistent-volume" val unique = raml.Constraints("hostname" -> "UNIQUE") val app = residentApp( id = appId("resident-task-that-uses-hostname-unique"), containerPath = containerPath, cmd = """sleep 1""", constraints = unique) When("A task is launched") val result = createAsynchronously(app) Then("It it successfully launched") waitForStatusUpdates(StatusUpdate.TASK_RUNNING) waitForDeployment(result) } "persistent volume will be re-attached and keep state" in new Fixture { Given("An app that writes into a persistent volume") val containerPath = "persistent-volume" val app = residentApp( id = appId("resident-task-with-persistent-volumen-will-be-reattached-and-keep-state"), containerPath = containerPath, cmd = s"""echo data > $containerPath/data && sleep 1000""") When("a task is launched") val result = createAsynchronously(app) Then("it successfully writes to the persistent volume and then finishes") waitForStatusUpdates(StatusUpdate.TASK_RUNNING) waitForDeployment(result) When("the app is suspended") suspendSuccessfully(PathId(app.id)) And("we wait for a while") // FIXME: we need to retry starting tasks since there is a race-condition in Mesos, // probably related to our recycling of the task ID (but unconfirmed) Thread.sleep(2000L) And("a new task is started that checks for the previously written file") // deploy a new version that checks for the data written the above step val update = marathon.updateApp( PathId(app.id), AppUpdate( instances = Some(1), cmd = Some(s"""test -e $containerPath/data && sleep 2""") ) ) update.code shouldBe 200 // we do not wait for the deployment to finish here to get the task events waitForStatusUpdates(StatusUpdate.TASK_RUNNING) waitForDeployment(update) waitForStatusUpdates(StatusUpdate.TASK_FINISHED) } "resident task is launched completely on reserved resources" in new Fixture { Given("A resident app") val app = residentApp( id = appId("resident-task-is-launched-completely-on-reserved-resources")) When("A task is launched") createSuccessfully(app) Then("used and reserved resources correspond to the app") val state: RestResult[ITMesosState] = mesos.state withClue("used_resources") { state.value.agents.head.usedResources should equal(itMesosResources) } withClue("reserved_resources") { state.value.agents.head.reservedResourcesByRole.get("foo") should equal(Some(itMesosResources)) } When("the app is suspended") suspendSuccessfully(PathId(app.id)) Then("there are no used resources anymore but there are the same reserved resources") val state2: RestResult[ITMesosState] = mesos.state withClue("used_resources") { state2.value.agents.head.usedResources should be(empty) } withClue("reserved_resources") { state2.value.agents.head.reservedResourcesByRole.get("foo") should equal(Some(itMesosResources)) } // we check for a blank slate of mesos reservations after each test // TODO: Once we wait for the unreserves before finishing the StopApplication deployment step, // we should test that here } "Scale Up" in new Fixture { Given("A resident app with 0 instances") val app = createSuccessfully(residentApp( id = appId("scale-up-resident-app-with-zero-instances"), instances = 0)) When("We scale up to 5 instances") scaleToSuccessfully(PathId(app.id), 5) Then("exactly 5 tasks have been created") launchedTasks(PathId(app.id)).size shouldBe 5 } "Scale Down" in new Fixture { Given("a resident app with 5 instances") val app = createSuccessfully(residentApp( id = appId("scale-down-resident-app-with-five-instances"), instances = 5)) When("we scale down to 0 instances") suspendSuccessfully(PathId(app.id)) Then("all tasks are suspended") val all = allTasks(PathId(app.id)) all.size shouldBe 5 all.count(_.launched) shouldBe 0 all.count(_.suspended) shouldBe 5 } "Restart" in new Fixture { Given("a resident app with 5 instances") val app = createSuccessfully( residentApp( id = appId("restart-resident-app-with-five-instances"), instances = 5, // FIXME: we need to retry starting tasks since there is a race-condition in Mesos, // probably related to our recycling of the task ID (but unconfirmed) backoffDuration = 300.milliseconds ) ) When("we restart the app") val newVersion = restartSuccessfully(app) withClue ("The app did not restart.") val all = allTasks(PathId(app.id)) log.info("tasks after relaunch: {}", all.mkString(";")) Then("no extra task was created") all.size shouldBe 5 And("exactly 5 instances are running") all.count(_.launched) shouldBe 5 And("all 5 tasks are restarted and of the new version") all.map(_.version).forall(_.contains(newVersion)) shouldBe true } "Config Change" in new Fixture { Given("a resident app with 5 instances") val app = createSuccessfully( residentApp( id = appId("config-change-resident-app-with-five-instances"), instances = 5, // FIXME: we need to retry starting tasks since there is a race-condition in Mesos, // probably related to our recycling of the task ID (but unconfirmed) backoffDuration = 300.milliseconds ) ) When("we change the config") val newVersion = updateSuccessfully(PathId(app.id), AppUpdate(cmd = Some("sleep 1234"))).toString val all = allTasks(PathId(app.id)) log.info("tasks after config change: {}", all.mkString(";")) Then("no extra task was created") all should have size 5 And("exactly 5 instances are running") all.filter(_.launched) should have size 5 And("all 5 tasks are of the new version") all.map(_.version).forall(_.contains(newVersion)) shouldBe true } /** * FIXME (3043): implement the following tests. TASK_LOST can be induced when launching a task with permission: * * When a framework launches a task, “run_tasks” ACLs are checked to see if the framework * (FrameworkInfo.principal) is authorized to run the task/executor as the given user. If not authorized, * the launch is rejected and the framework gets a TASK_LOST. * * (From http://mesos.apache.org/documentation/latest/authorization/) */ "taskLostBehavior = RELAUNCH_AFTER_TIMEOUT, timeout = 10s" in { pending Given("A resident app with 1 instance") When("The task is lost") Then("The task is not relaunched within the timeout") And("The task is relaunched with a new Id after the timeout") } "taskLostBehavior = WAIT_FOREVER" in { pending Given("A resident app with 1 instance") When("The task is lost") Then("No timeout is scheduled") // can we easily verify this? And("The task is not relaunched") // can we verify this without waiting? } "relaunchEscalationTimeoutSeconds = 5s" in { pending Given("A resident app with 1 instance") When("The task terminates") And("We don't get an offer within the timeout") Then("We launch a new task on any matching offer") } } class Fixture { val cpus: Double = 0.001 val mem: Double = 1.0 val disk: Double = 1.0 val gpus: Double = 0.0 val persistentVolumeSize = 2L val itMesosResources = ITResources( "mem" -> mem, "cpus" -> cpus, "disk" -> (disk + persistentVolumeSize), "gpus" -> gpus ) def appId(suffix: String): PathId = PathId(s"/$testBasePath/app-$suffix") def residentApp( id: PathId = PathId(s"/$testBasePath/app-${IdGenerator.generate()}"), containerPath: String = "persistent-volume", cmd: String = "sleep 1000", instances: Int = 1, backoffDuration: FiniteDuration = 1.hour, portDefinitions: Seq[PortDefinition] = Seq.empty, /* prevent problems by randomized port assignment */ constraints: Set[Seq[String]] = Set.empty): App = { val persistentVolume: AppVolume = AppPersistentVolume( containerPath = containerPath, persistent = PersistentVolume(size = persistentVolumeSize), mode = ReadMode.Rw ) val app = App( id.toString, instances = instances, residency = Some(AppResidency()), constraints = constraints, container = Some(Container( `type` = EngineType.Mesos, volumes = Seq(persistentVolume) )), cmd = Some(cmd), // cpus, mem and disk are really small because otherwise we'll soon run out of reservable resources cpus = cpus, mem = mem, disk = disk, portDefinitions = Some(portDefinitions), backoffSeconds = backoffDuration.toSeconds.toInt, upgradeStrategy = Some(UpgradeStrategy(minimumHealthCapacity = 0.5, maximumOverCapacity = 0.0)), unreachableStrategy = Some(UnreachableDisabled.DefaultValue) ) app } def createSuccessfully(app: App): App = { waitForDeployment(createAsynchronously(app)) app } def createAsynchronously(app: App): RestResult[App] = { val result = marathon.createAppV2(app) result should be(Created) extractDeploymentIds(result) should have size 1 result } def scaleToSuccessfully(appId: PathId, instances: Int): Seq[ITEnrichedTask] = { val result = marathon.updateApp(appId, AppUpdate(instances = Some(instances))) result should be(OK) waitForDeployment(result) waitForTasks(appId, instances) } def suspendSuccessfully(appId: PathId): Seq[ITEnrichedTask] = scaleToSuccessfully(appId, 0) def updateSuccessfully(appId: PathId, update: AppUpdate): VersionString = { val result = marathon.updateApp(appId, update) result should be(OK) waitForDeployment(result) result.value.version.toString } def restartSuccessfully(app: App): VersionString = { val result = marathon.restartApp(PathId(app.id)) result should be(OK) waitForDeployment(result) result.value.version.toString } def allTasks(appId: PathId): Seq[ITEnrichedTask] = { Try(marathon.tasks(appId)).map(_.value).getOrElse(Nil) } def launchedTasks(appId: PathId): Seq[ITEnrichedTask] = allTasks(appId).filter(_.launched) def suspendedTasks(appId: PathId): Seq[ITEnrichedTask] = allTasks(appId).filter(_.suspended) } object Fixture { type VersionString = String object StatusUpdate { val TASK_FINISHED = "TASK_FINISHED" val TASK_RUNNING = "TASK_RUNNING" val TASK_FAILED = "TASK_FAILED" } /** * Resident Tasks reside in the TaskTracker even after they terminate and after the associated app is deleted. * To prevent spurious state in the above test cases, each test case should use a unique appId. */ object IdGenerator { private[this] var index: Int = 0 def generate(): String = { index += 1 index.toString } } } }
Caerostris/marathon
src/test/scala/mesosphere/marathon/integration/ResidentTaskIntegrationTest.scala
Scala
apache-2.0
14,203
package scala.meta.trees import com.intellij.openapi.progress.ProgressManager import com.intellij.psi._ import org.jetbrains.plugins.scala.extensions.PsiMethodExt import org.jetbrains.plugins.scala.lang.psi.api.base._ import org.jetbrains.plugins.scala.lang.psi.api.base.types._ import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScParameter import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScFunction, ScFunctionDefinition, ScTypeAliasDeclaration} import org.jetbrains.plugins.scala.lang.psi.api.toplevel._ import org.jetbrains.plugins.scala.lang.psi.impl.base.types.ScInfixTypeElementImpl import org.jetbrains.plugins.scala.lang.psi.types.ScSubstitutor import org.jetbrains.plugins.scala.lang.psi.types.api.TypeParameterType import org.jetbrains.plugins.scala.lang.psi.types.result.{Failure, Success, TypeResult, TypingContext} import org.jetbrains.plugins.scala.lang.psi.{api => p, types => ptype} import org.jetbrains.plugins.scala.lang.refactoring.util.ScTypeUtil.AliasType import scala.collection.immutable.Seq import scala.language.postfixOps import scala.meta.collections._ import scala.meta.internal.{semantic => h} import scala.meta.trees.error._ import scala.{meta => m, Seq => _} trait TypeAdapter { self: TreeConverter => private val typeCache = TwoWayCache[ptype.ScType, m.Type]() private val typeElementCache = TwoWayCache[ScTypeElement, m.Type]() private val psiElementTypeChache = TwoWayCache[PsiElement, m.Type]() def toType(tp: ScTypeElement): m.Type = { ProgressManager.checkCanceled() typeElementCache.getOrElseUpdate(tp, { tp match { case t: ScSimpleTypeElement if dumbMode => t.reference match { case Some(ref) => ref.qualifier.map(qual=>m.Type.Select(getTypeQualifier(qual.asInstanceOf[ScReferenceElement]), toTypeName(ref))) .getOrElse(toTypeName(ref)) case None => m.Type.Name(t.getText) } case t: ScSimpleTypeElement => val s = ScSubstitutor(ScSubstitutor.cache.toMap) toType(s.subst(t.calcType)) // case t: ScReferenceElement if dumbMode => // t.qualifier.map(qual=>m.Type.Select(getTypeQualifier(qual.asInstanceOf[ScReferenceElement]), toTypeName(t))) // .getOrElse(toTypeName(t)) case t: ScFunctionalTypeElement => toType(t.paramTypeElement) match { case m.Type.Tuple(elements) => m.Type.Function(elements, toType(t.returnTypeElement.get)) case param => m.Type.Function(Seq(param), toType(t.returnTypeElement.get)) } case t: ScParameterizedTypeElement => m.Type.Apply(toType(t.typeElement), t.typeArgList.typeArgs.toStream.map(toType)) case t: ScInfixTypeElementImpl => m.Type.ApplyInfix(toType(t.leftTypeElement), m.Type.Name(t.reference.refName), toType(t.rightTypeElement.get)) case t: ScTupleTypeElement => m.Type.Tuple(Seq(t.components.map(toType): _*)) case t: ScWildcardTypeElement => m.Type.Placeholder(typeBounds(t)) case t: ScCompoundTypeElement => t.components .dropRight(1) .foldLeft(toType(t.components.last))((mtp, stp) => m.Type.With(toType(stp), mtp)) case t: ScParenthesisedTypeElement => t.typeElement match { case Some(t: ScReferenceableInfixTypeElement) => m.Type.ApplyInfix(toType(t.leftTypeElement), toTypeName(t.reference), toType(t.rightTypeElement.get)) case _ => unreachable } case t: ScTypeVariableTypeElement => die("i cannot into type variables") case t: ScExistentialTypeElement => val clauses = Seq(t.clause.declarations map { case tp: ScTypeAliasDeclaration => toTypeDecl(tp) case other => other ?! }: _*) val quantified = toType(t.quantified) m.Type.Existential(quantified, clauses) case other: ScTypeElement if dumbMode => m.Type.Name(other.getText) case other: ScTypeElement => LOG.warn(s"Using slow type conversion of type element ${other.getClass}: ${other.getText}") toType(other.getType()) case other => other ?! } }) } private def getTypeQualifier(ref: ScReferenceElement): m.Term.Ref = { ref.qualifier match { case Some(r: ScReferenceElement) => m.Term.Select(getTypeQualifier(r), toTermName(ref)) case None => toTermName(ref) case _ => unreachable } } def toType(tr: TypeResult[ptype.ScType]): m.Type = { import org.jetbrains.plugins.scala.lang.psi.types.result._ tr match { case Success(res, _) => toType(res) case Failure(cause, place) => throw new ScalaMetaTypeResultFailure(place, cause) } } def toType(elem: PsiElement): m.Type = { ProgressManager.checkCanceled() psiElementTypeChache.getOrElseUpdate(elem, { elem match { case t: typedef.ScTemplateDefinition if dumbMode => m.Type.Name(t.name) case t: typedef.ScTemplateDefinition => val s = ScSubstitutor(ScSubstitutor.cache.toMap) toType(s.subst(t.getType(TypingContext.empty).get)) // FIXME: what about typing context? case t: ScPackaging => m.Type.Singleton(toTermName(t.reference.get))//.setTypechecked case t: ScConstructor => ??? // m.Type.Method(toParams(Seq(t.arguments:_*)), toType(t.newTemplate.get.getType(TypingContext.empty))).setTypechecked case t: ScPrimaryConstructor => ??? // m.Type.Method(Seq(t.parameterList.clauses.map(convertParamClause):_*), toType(t.containingClass)).setTypechecked case t: ScFunctionDefinition => ??? // m.Type.Method(Seq(t.parameterList.clauses.map(convertParamClause):_*), toType(t.getTypeWithCachedSubst)).setTypechecked case t: ScFunction => m.Type.Function(Seq(t.parametersTypes.map(toType(_, t).asInstanceOf[m.Type.Arg]): _*), toType(t.returnType)) //.setTypechecked case t: ScParameter if dumbMode => m.Type.Name(t.getText) case t: ScParameter => val s = ScSubstitutor(ScSubstitutor.cache.toMap) toType(s.subst(t.typeElement.get.getType().get)) case t: ScTypedDefinition if dumbMode => m.Type.Name(t.name) case t: ScTypedDefinition => t.getTypeWithCachedSubst match { case Success(res, place) => toType(res) case Failure(cause, place) => unresolved(cause, place) } case t: ScReferenceElement if dumbMode => m.Type.Name(t.refName) case t: ScReferenceElement => t.bind() match { case Some(result) => toType(result.element) case None => m.Type.Placeholder(m.Type.Bounds(None, None)) } case t: PsiPackage if t.getName == null => m.Type.Singleton(std.rootPackageName)//.setTypechecked case t: PsiPackage => m.Type.Singleton(toTermName(t))//.setTypechecked case t: PsiClass => m.Type.Name(t.getName)//.withAttrsFor(t) case t: PsiMethod => t ??? // m.Type.Method(Seq(t.getParameterList.getParameters // .map(Compatibility.toParameter) // .map(i=> convertParam(i.paramInCode.get)) // .toStream), // toType(ScTypePsiTypeBridge.toScType(t.getReturnType, t.getProject))).setTypechecked case other => other ?! } }) } def toType(tp: ptype.ScType, pivot: PsiElement = null): m.Type = { ProgressManager.checkCanceled() typeCache.getOrElseUpdate(tp, { tp.isAliasType match { case Some(AliasType(ta, lower, upper)) => return toTypeName(ta) case _ => } tp match { case t: ptype.ScParameterizedType => m.Type.Apply(toType(t.designator), Seq(t.typeArguments.map(toType(_)): _*))//.setTypechecked case t: ptype.api.designator.ScThisType => toTypeName(t.element)//.setTypechecked case t: ptype.api.designator.ScProjectionType => t.projected match { case tt: ptype.api.designator.ScThisType => m.Type.Select(toTermName(tt.element), toTypeName(t.actualElement))//.setTypechecked case _ => m.Type.Project(toType(t.projected), toTypeName(t.actualElement))//.setTypechecked } case t: ptype.api.designator.ScDesignatorType => if (t.element.isSingletonType) toSingletonType(t.element) else toTypeName(t.element) case t: ptype.ScCompoundType if t.components.size < 2 => unreachable("Number of parts in compound type must be >= 2") case t: ptype.ScCompoundType => t.components .dropRight(1) .foldLeft(toType(t.components.last))((mtp, stp) => m.Type.With(toType(stp), mtp)) case t: ptype.ScExistentialType => val wcards = t.wildcards.map {wc => // val (name, args, lower, upper) = wc val ubound = if (wc.upper.isAny) None else Some(toType(wc.upper)) val lbound = if (wc.lower.isNothing) None else Some(toType(wc.lower)) m.Decl.Type(Nil, m.Type.Name(wc.name), //FIXME: pass actual prefix, when solution for recursive prefix computation is ready // .withAttrs(h.Denotation.Single(h.Prefix.None, toSymbolWtihParent(wc.name, pivot, h.ScalaSig.Type(wc.name)))) // .setTypechecked, Nil, m.Type.Bounds(lbound, ubound))//.setTypechecked } m.Type.Existential(toType(t.quantified), wcards)//.setTypechecked case t: ptype.api.StdType => toStdTypeName(t) case t: TypeParameterType => m.Type.Name(t.name)//.withAttrsFor(t.nameAndId._2) case t: ptype.ScType => LOG.warn(s"Unknown type: ${t.getClass} - ${t.canonicalText}") m.Type.Name(t.canonicalText)//.withAttrs(h.Denotation.None) } }) } def toSingletonType(elem: PsiElement): m.Type.Singleton = { m.Type.Singleton(toTermName(elem))//.setTypechecked } def toTypeParams(tp: TypeParameterType): m.Type.Param = { val ubound = if (tp.upperType.v.isAny) None else Some(toType(tp.upperType.v)) val lbound = if (tp.lowerType.v.isNothing) None else Some(toType(tp.lowerType.v)) m.Type.Param( if(tp.isCovariant) m.Mod.Covariant() :: Nil else if(tp.isContravariant) m.Mod.Contravariant() :: Nil else Nil, if (tp.name != "_") m.Type.Name(tp.name) else m.Name.Anonymous(),//.withAttrs(h.Denotation.None).setTypechecked, Seq(tp.arguments.map(toTypeParams):_*), m.Type.Bounds(lbound, ubound), Nil, Nil ) } def toTypeParams(tp: p.statements.params.ScTypeParam): m.Type.Param = { m.Type.Param( if(tp.isCovariant) m.Mod.Covariant() :: Nil else if(tp.isContravariant) m.Mod.Contravariant() :: Nil else Nil, if (tp.name != "_") toTypeName(tp) else m.Name.Anonymous(),//.withAttrsFor(tp), Seq(tp.typeParameters.map(toTypeParams):_*), typeBounds(tp), viewBounds(tp), contextBounds(tp) )//.setTypechecked } def toTypeParams(tp: PsiTypeParameter): m.Type.Param = { m.Type.Param( m.Mod.Covariant() :: Nil, toTypeName(tp), Seq(tp.getTypeParameters.map(toTypeParams):_*), m.Type.Bounds(None, None), Seq.empty, Seq.empty )//.setTypechecked } def viewBounds(tp: ScTypeBoundsOwner): Seq[m.Type] = { Seq(tp.viewTypeElement.map(toType):_*) } def contextBounds(tp: ScTypeBoundsOwner): Seq[m.Type] = { Seq(tp.contextBoundTypeElement.map(toType):_*) } def typeBounds(tp: ScTypeBoundsOwner): m.Type.Bounds = { m.Type.Bounds(tp.lowerTypeElement.map(toType), tp.upperTypeElement.map(toType))//.setTypechecked } def returnType(tr: ptype.result.TypeResult[ptype.ScType]): m.Type = { import ptype.result._ tr match { case Success(t, elem) => toType(t) case Failure(cause, place) => LOG.warn(s"Failed to infer return type($cause) at ${place.map(_.getText).getOrElse("UNKNOWN")}") m.Type.Name("Unit")//.setTypechecked } } def fromType(tpe: m.Type): ptype.ScType = ??? }
ilinum/intellij-scala
src/scala/meta/trees/TypeAdapter.scala
Scala
apache-2.0
12,226
package org.juanitodread.pitayafinch.nlp.tools.models.entities import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future class MoneyEntityModel extends FinderModel("/nlp/models/entities/en-ner-money.bin") { override def getName: String = "Money" } object MoneyEntityModel { private val model: MoneyEntityModel = new MoneyEntityModel() def apply(): MoneyEntityModel = model } object MoneyEntityModelAsync { def apply(): Future[MoneyEntityModel] = Future[MoneyEntityModel] { new MoneyEntityModel() } }
juanitodread/pitaya-finch
src/main/scala/org/juanitodread/pitayafinch/nlp/tools/models/entities/MoneyEntityModel.scala
Scala
apache-2.0
551
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.mllib.classification import java.lang.{Iterable => JIterable} import scala.collection.JavaConverters._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ import org.apache.spark.{Logging, SparkContext, SparkException} import org.apache.spark.annotation.Since import org.apache.spark.mllib.linalg.{BLAS, DenseMatrix, DenseVector, SparseVector, Vector} import org.apache.spark.mllib.regression.LabeledPoint import org.apache.spark.mllib.util.{Loader, Saveable} import org.apache.spark.rdd.RDD import org.apache.spark.sql.{DataFrame, SQLContext} /** * Model for Naive Bayes Classifiers. * * @param labels list of labels * @param pi log of class priors, whose dimension is C, number of labels * @param theta log of class conditional probabilities, whose dimension is C-by-D, * where D is number of features * @param modelType The type of NB model to fit can be "multinomial" or "bernoulli" */ @Since("0.9.0") class NaiveBayesModel private[spark] ( @Since("1.0.0") val labels: Array[Double], @Since("0.9.0") val pi: Array[Double], @Since("0.9.0") val theta: Array[Array[Double]], @Since("1.4.0") val modelType: String) extends ClassificationModel with Serializable with Saveable { import NaiveBayes.{Bernoulli, Multinomial, supportedModelTypes} private val piVector = new DenseVector(pi) private val thetaMatrix = new DenseMatrix(labels.length, theta(0).length, theta.flatten, true) private[mllib] def this(labels: Array[Double], pi: Array[Double], theta: Array[Array[Double]]) = this(labels, pi, theta, NaiveBayes.Multinomial) /** A Java-friendly constructor that takes three Iterable parameters. */ private[mllib] def this( labels: JIterable[Double], pi: JIterable[Double], theta: JIterable[JIterable[Double]]) = this(labels.asScala.toArray, pi.asScala.toArray, theta.asScala.toArray.map(_.asScala.toArray)) require(supportedModelTypes.contains(modelType), s"Invalid modelType $modelType. Supported modelTypes are $supportedModelTypes.") // Bernoulli scoring requires log(condprob) if 1, log(1-condprob) if 0. // This precomputes log(1.0 - exp(theta)) and its sum which are used for the linear algebra // application of this condition (in predict function). private val (thetaMinusNegTheta, negThetaSum) = modelType match { case Multinomial => (None, None) case Bernoulli => val negTheta = thetaMatrix.map(value => math.log(1.0 - math.exp(value))) val ones = new DenseVector(Array.fill(thetaMatrix.numCols){1.0}) val thetaMinusNegTheta = thetaMatrix.map { value => value - math.log(1.0 - math.exp(value)) } (Option(thetaMinusNegTheta), Option(negTheta.multiply(ones))) case _ => // This should never happen. throw new UnknownError(s"Invalid modelType: $modelType.") } @Since("1.0.0") override def predict(testData: RDD[Vector]): RDD[Double] = { val bcModel = testData.context.broadcast(this) testData.mapPartitions { iter => val model = bcModel.value iter.map(model.predict) } } @Since("1.0.0") override def predict(testData: Vector): Double = { modelType match { case Multinomial => labels(multinomialCalculation(testData).argmax) case Bernoulli => labels(bernoulliCalculation(testData).argmax) } } /** * Predict values for the given data set using the model trained. * * @param testData RDD representing data points to be predicted * @return an RDD[Vector] where each entry contains the predicted posterior class probabilities, * in the same order as class labels */ @Since("1.5.0") def predictProbabilities(testData: RDD[Vector]): RDD[Vector] = { val bcModel = testData.context.broadcast(this) testData.mapPartitions { iter => val model = bcModel.value iter.map(model.predictProbabilities) } } /** * Predict posterior class probabilities for a single data point using the model trained. * * @param testData array representing a single data point * @return predicted posterior class probabilities from the trained model, * in the same order as class labels */ @Since("1.5.0") def predictProbabilities(testData: Vector): Vector = { modelType match { case Multinomial => posteriorProbabilities(multinomialCalculation(testData)) case Bernoulli => posteriorProbabilities(bernoulliCalculation(testData)) } } private def multinomialCalculation(testData: Vector) = { val prob = thetaMatrix.multiply(testData) BLAS.axpy(1.0, piVector, prob) prob } private def bernoulliCalculation(testData: Vector) = { testData.foreachActive((_, value) => if (value != 0.0 && value != 1.0) { throw new SparkException( s"Bernoulli naive Bayes requires 0 or 1 feature values but found $testData.") } ) val prob = thetaMinusNegTheta.get.multiply(testData) BLAS.axpy(1.0, piVector, prob) BLAS.axpy(1.0, negThetaSum.get, prob) prob } private def posteriorProbabilities(logProb: DenseVector) = { val logProbArray = logProb.toArray val maxLog = logProbArray.max val scaledProbs = logProbArray.map(lp => math.exp(lp - maxLog)) val probSum = scaledProbs.sum new DenseVector(scaledProbs.map(_ / probSum)) } @Since("1.3.0") override def save(sc: SparkContext, path: String): Unit = { val data = NaiveBayesModel.SaveLoadV2_0.Data(labels, pi, theta, modelType) NaiveBayesModel.SaveLoadV2_0.save(sc, path, data) } override protected def formatVersion: String = "2.0" } @Since("1.3.0") object NaiveBayesModel extends Loader[NaiveBayesModel] { import org.apache.spark.mllib.util.Loader._ private[mllib] object SaveLoadV2_0 { def thisFormatVersion: String = "2.0" /** Hard-code class name string in case it changes in the future */ def thisClassName: String = "org.apache.spark.mllib.classification.NaiveBayesModel" /** Model data for model import/export */ case class Data( labels: Array[Double], pi: Array[Double], theta: Array[Array[Double]], modelType: String) def save(sc: SparkContext, path: String, data: Data): Unit = { val sqlContext = new SQLContext(sc) import sqlContext.implicits._ // Create JSON metadata. val metadata = compact(render( ("class" -> thisClassName) ~ ("version" -> thisFormatVersion) ~ ("numFeatures" -> data.theta(0).length) ~ ("numClasses" -> data.pi.length))) sc.parallelize(Seq(metadata), 1).saveAsTextFile(metadataPath(path)) // Create Parquet data. val dataRDD: DataFrame = sc.parallelize(Seq(data), 1).toDF() dataRDD.write.parquet(dataPath(path)) } @Since("1.3.0") def load(sc: SparkContext, path: String): NaiveBayesModel = { val sqlContext = new SQLContext(sc) // Load Parquet data. val dataRDD = sqlContext.read.parquet(dataPath(path)) // Check schema explicitly since erasure makes it hard to use match-case for checking. checkSchema[Data](dataRDD.schema) val dataArray = dataRDD.select("labels", "pi", "theta", "modelType").take(1) assert(dataArray.length == 1, s"Unable to load NaiveBayesModel data from: ${dataPath(path)}") val data = dataArray(0) val labels = data.getAs[Seq[Double]](0).toArray val pi = data.getAs[Seq[Double]](1).toArray val theta = data.getAs[Seq[Seq[Double]]](2).map(_.toArray).toArray val modelType = data.getString(3) new NaiveBayesModel(labels, pi, theta, modelType) } } private[mllib] object SaveLoadV1_0 { def thisFormatVersion: String = "1.0" /** Hard-code class name string in case it changes in the future */ def thisClassName: String = "org.apache.spark.mllib.classification.NaiveBayesModel" /** Model data for model import/export */ case class Data( labels: Array[Double], pi: Array[Double], theta: Array[Array[Double]]) def save(sc: SparkContext, path: String, data: Data): Unit = { val sqlContext = new SQLContext(sc) import sqlContext.implicits._ // Create JSON metadata. val metadata = compact(render( ("class" -> thisClassName) ~ ("version" -> thisFormatVersion) ~ ("numFeatures" -> data.theta(0).length) ~ ("numClasses" -> data.pi.length))) sc.parallelize(Seq(metadata), 1).saveAsTextFile(metadataPath(path)) // Create Parquet data. val dataRDD: DataFrame = sc.parallelize(Seq(data), 1).toDF() dataRDD.write.parquet(dataPath(path)) } def load(sc: SparkContext, path: String): NaiveBayesModel = { val sqlContext = new SQLContext(sc) // Load Parquet data. val dataRDD = sqlContext.read.parquet(dataPath(path)) // Check schema explicitly since erasure makes it hard to use match-case for checking. checkSchema[Data](dataRDD.schema) val dataArray = dataRDD.select("labels", "pi", "theta").take(1) assert(dataArray.length == 1, s"Unable to load NaiveBayesModel data from: ${dataPath(path)}") val data = dataArray(0) val labels = data.getAs[Seq[Double]](0).toArray val pi = data.getAs[Seq[Double]](1).toArray val theta = data.getAs[Seq[Seq[Double]]](2).map(_.toArray).toArray new NaiveBayesModel(labels, pi, theta) } } override def load(sc: SparkContext, path: String): NaiveBayesModel = { val (loadedClassName, version, metadata) = loadMetadata(sc, path) val classNameV1_0 = SaveLoadV1_0.thisClassName val classNameV2_0 = SaveLoadV2_0.thisClassName val (model, numFeatures, numClasses) = (loadedClassName, version) match { case (className, "1.0") if className == classNameV1_0 => val (numFeatures, numClasses) = ClassificationModel.getNumFeaturesClasses(metadata) val model = SaveLoadV1_0.load(sc, path) (model, numFeatures, numClasses) case (className, "2.0") if className == classNameV2_0 => val (numFeatures, numClasses) = ClassificationModel.getNumFeaturesClasses(metadata) val model = SaveLoadV2_0.load(sc, path) (model, numFeatures, numClasses) case _ => throw new Exception( s"NaiveBayesModel.load did not recognize model with (className, format version):" + s"($loadedClassName, $version). Supported:\\n" + s" ($classNameV1_0, 1.0)") } assert(model.pi.length == numClasses, s"NaiveBayesModel.load expected $numClasses classes," + s" but class priors vector pi had ${model.pi.length} elements") assert(model.theta.length == numClasses, s"NaiveBayesModel.load expected $numClasses classes," + s" but class conditionals array theta had ${model.theta.length} elements") assert(model.theta.forall(_.length == numFeatures), s"NaiveBayesModel.load expected $numFeatures features," + s" but class conditionals array theta had elements of size:" + s" ${model.theta.map(_.length).mkString(",")}") model } } /** * Trains a Naive Bayes model given an RDD of `(label, features)` pairs. * * This is the Multinomial NB ([[http://tinyurl.com/lsdw6p]]) which can handle all kinds of * discrete data. For example, by converting documents into TF-IDF vectors, it can be used for * document classification. By making every vector a 0-1 vector, it can also be used as * Bernoulli NB ([[http://tinyurl.com/p7c96j6]]). The input feature values must be nonnegative. */ @Since("0.9.0") class NaiveBayes private ( private var lambda: Double, private var modelType: String) extends Serializable with Logging { import NaiveBayes.{Bernoulli, Multinomial} @Since("1.4.0") def this(lambda: Double) = this(lambda, NaiveBayes.Multinomial) @Since("0.9.0") def this() = this(1.0, NaiveBayes.Multinomial) /** Set the smoothing parameter. Default: 1.0. */ @Since("0.9.0") def setLambda(lambda: Double): NaiveBayes = { this.lambda = lambda this } /** Get the smoothing parameter. */ @Since("1.4.0") def getLambda: Double = lambda /** * Set the model type using a string (case-sensitive). * Supported options: "multinomial" (default) and "bernoulli". */ @Since("1.4.0") def setModelType(modelType: String): NaiveBayes = { require(NaiveBayes.supportedModelTypes.contains(modelType), s"NaiveBayes was created with an unknown modelType: $modelType.") this.modelType = modelType this } /** Get the model type. */ @Since("1.4.0") def getModelType: String = this.modelType /** * Run the algorithm with the configured parameters on an input RDD of LabeledPoint entries. * * @param data RDD of [[org.apache.spark.mllib.regression.LabeledPoint]]. */ @Since("0.9.0") def run(data: RDD[LabeledPoint]): NaiveBayesModel = { val requireNonnegativeValues: Vector => Unit = (v: Vector) => { val values = v match { case sv: SparseVector => sv.values case dv: DenseVector => dv.values } if (!values.forall(_ >= 0.0)) { throw new SparkException(s"Naive Bayes requires nonnegative feature values but found $v.") } } val requireZeroOneBernoulliValues: Vector => Unit = (v: Vector) => { val values = v match { case sv: SparseVector => sv.values case dv: DenseVector => dv.values } if (!values.forall(v => v == 0.0 || v == 1.0)) { throw new SparkException( s"Bernoulli naive Bayes requires 0 or 1 feature values but found $v.") } } // Aggregates term frequencies per label. // TODO: Calling combineByKey and collect creates two stages, we can implement something // TODO: similar to reduceByKeyLocally to save one stage. val aggregated = data.map(p => (p.label, p.features)).combineByKey[(Long, DenseVector)]( createCombiner = (v: Vector) => { if (modelType == Bernoulli) { requireZeroOneBernoulliValues(v) } else { requireNonnegativeValues(v) } (1L, v.copy.toDense) }, mergeValue = (c: (Long, DenseVector), v: Vector) => { requireNonnegativeValues(v) BLAS.axpy(1.0, v, c._2) (c._1 + 1L, c._2) }, mergeCombiners = (c1: (Long, DenseVector), c2: (Long, DenseVector)) => { BLAS.axpy(1.0, c2._2, c1._2) (c1._1 + c2._1, c1._2) } ).collect().sortBy(_._1) val numLabels = aggregated.length var numDocuments = 0L aggregated.foreach { case (_, (n, _)) => numDocuments += n } val numFeatures = aggregated.head match { case (_, (_, v)) => v.size } val labels = new Array[Double](numLabels) val pi = new Array[Double](numLabels) val theta = Array.fill(numLabels)(new Array[Double](numFeatures)) val piLogDenom = math.log(numDocuments + numLabels * lambda) var i = 0 aggregated.foreach { case (label, (n, sumTermFreqs)) => labels(i) = label pi(i) = math.log(n + lambda) - piLogDenom val thetaLogDenom = modelType match { case Multinomial => math.log(sumTermFreqs.values.sum + numFeatures * lambda) case Bernoulli => math.log(n + 2.0 * lambda) case _ => // This should never happen. throw new UnknownError(s"Invalid modelType: $modelType.") } var j = 0 while (j < numFeatures) { theta(i)(j) = math.log(sumTermFreqs(j) + lambda) - thetaLogDenom j += 1 } i += 1 } new NaiveBayesModel(labels, pi, theta, modelType) } } /** * Top-level methods for calling naive Bayes. */ @Since("0.9.0") object NaiveBayes { /** String name for multinomial model type. */ private[spark] val Multinomial: String = "multinomial" /** String name for Bernoulli model type. */ private[spark] val Bernoulli: String = "bernoulli" /* Set of modelTypes that NaiveBayes supports */ private[spark] val supportedModelTypes = Set(Multinomial, Bernoulli) /** * Trains a Naive Bayes model given an RDD of `(label, features)` pairs. * * This is the default Multinomial NB ([[http://tinyurl.com/lsdw6p]]) which can handle all * kinds of discrete data. For example, by converting documents into TF-IDF vectors, it * can be used for document classification. * * This version of the method uses a default smoothing parameter of 1.0. * * @param input RDD of `(label, array of features)` pairs. Every vector should be a frequency * vector or a count vector. */ @Since("0.9.0") def train(input: RDD[LabeledPoint]): NaiveBayesModel = { new NaiveBayes().run(input) } /** * Trains a Naive Bayes model given an RDD of `(label, features)` pairs. * * This is the default Multinomial NB ([[http://tinyurl.com/lsdw6p]]) which can handle all * kinds of discrete data. For example, by converting documents into TF-IDF vectors, it * can be used for document classification. * * @param input RDD of `(label, array of features)` pairs. Every vector should be a frequency * vector or a count vector. * @param lambda The smoothing parameter */ @Since("0.9.0") def train(input: RDD[LabeledPoint], lambda: Double): NaiveBayesModel = { new NaiveBayes(lambda, Multinomial).run(input) } /** * Trains a Naive Bayes model given an RDD of `(label, features)` pairs. * * The model type can be set to either Multinomial NB ([[http://tinyurl.com/lsdw6p]]) * or Bernoulli NB ([[http://tinyurl.com/p7c96j6]]). The Multinomial NB can handle * discrete count data and can be called by setting the model type to "multinomial". * For example, it can be used with word counts or TF_IDF vectors of documents. * The Bernoulli model fits presence or absence (0-1) counts. By making every vector a * 0-1 vector and setting the model type to "bernoulli", the fits and predicts as * Bernoulli NB. * * @param input RDD of `(label, array of features)` pairs. Every vector should be a frequency * vector or a count vector. * @param lambda The smoothing parameter * * @param modelType The type of NB model to fit from the enumeration NaiveBayesModels, can be * multinomial or bernoulli */ @Since("1.4.0") def train(input: RDD[LabeledPoint], lambda: Double, modelType: String): NaiveBayesModel = { require(supportedModelTypes.contains(modelType), s"NaiveBayes was created with an unknown modelType: $modelType.") new NaiveBayes(lambda, modelType).run(input) } }
pronix/spark
mllib/src/main/scala/org/apache/spark/mllib/classification/NaiveBayes.scala
Scala
apache-2.0
19,398
package embrace object `package` { implicit class EmbraceAny [ A ] ( val a : A ) extends AnyVal { @inline def $ [ Z ] ( f : A => Z ) = f(a) } implicit class EmbraceTuple2 [ A, B ] ( val a : (A, B) ) extends AnyVal { @inline def $$ [ Z ] ( f : (A, B) => Z ) = f.tupled(a) } implicit class EmbraceTuple3 [ A, B, C ] ( val a : (A, B, C) ) extends AnyVal { @inline def $$ [ Z ] ( f : (A, B, C) => Z ) = f.tupled(a) } implicit class EmbraceTuple4 [ A, B, C, D ] ( val a : (A, B, C, D) ) extends AnyVal { @inline def $$ [ Z ] ( f : (A, B, C, D) => Z ) = f.tupled(a) } }
nikita-volkov/embrace
src/main/scala/embrace/package.scala
Scala
mit
598
/** * Track the trackers * Copyright (C) 2015 Sebastian Schelter, Felix Neutatz * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package io.ssc.trackthetrackers.analysis.plots import java.io.{FileWriter, File} import io.ssc.trackthetrackers.analysis.algorithms.plots.ChoroplethMapAppTracker import org.apache.flink.shaded.com.google.common.io.Closeables import processing.core.PApplet import scala.collection.mutable import scala.io.Source import io.ssc.trackthetrackers.Config import io.ssc.trackthetrackers.analysis.statistics._ object ToplevelCountryMap extends App { val company = "Google" var writer: FileWriter = null try { writer = new FileWriter(Config.get("company.distribution.by.country"), true) val lines = Source.fromFile(new File(Config.get("topleveldomainByCountry.csv"))).getLines for (domainCountry <- lines) { val splits = domainCountry.split(",") CompanyDistribution.computeDistribution(Config.get("analysis.trackingraphsample.path"), Config.get("webdatacommons.pldfile.unzipped"), Config.get("analysis.results.path") + "companyDistribution", splits(0).toLowerCase, null, 0) for (file <- new File(Config.get("analysis.results.path") + "companyDistribution").listFiles) { val dataEntries = Source.fromFile(file).getLines for (dataEntry <- dataEntries) { val tokens = dataEntry.split("\\t") if (tokens(0).equals(company)) { writer.write(splits(0) + "," + splits(1) + "," + splits(2) + "," + company + "," + tokens(1) + "\\n") } } } writer.flush() } } finally { Closeables.close(writer, false) } val mapApp = new ChoroplethMapAppTracker() PApplet.main(Array("io.ssc.trackthetrackers.analysis.algorithms.plots.ChoroplethMapAppTracker")) }
HungUnicorn/trackthetrackers
analysis/src/main/scala/io/ssc/trackthetrackers/analysis/plots/ToplevelCountryMap.scala
Scala
gpl-3.0
2,414
/* * La Trobe University - Distributed Deep Learning System * Copyright 2015 Matthias Langer (t3l@threelights.de) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.latrobe.blaze package object modules { type SelectInput = Identity type SelectInputBuilder = IdentityBuilder final implicit class ModuleBuilderFunctions(mod: ModuleBuilder) { // TODO: Maybe should replace Sequence if having sequence layer?! def benchmark() : BenchmarkBuilder = benchmark("") def benchmark(caption: String) : BenchmarkBuilder = BenchmarkBuilder(caption, mod) /* def makeTransient() : TransientContainerBuilder = TransientContainerBuilder(mod) */ } }
bashimao/ltudl
blaze/src/main/scala/edu/latrobe/blaze/modules/package.scala
Scala
apache-2.0
1,212
package org.bitcoins.rpc.channels import org.bitcoins.core.channels._ import org.bitcoins.core.crypto.{ECPrivateKey, ECPublicKey} import org.bitcoins.core.currency.{CurrencyUnit, Satoshis} import org.bitcoins.core.number.Int64 import org.bitcoins.core.policy.Policy import org.bitcoins.core.protocol.script._ import org.bitcoins.core.protocol.transaction._ import org.bitcoins.core.protocol.{P2PKHAddress, P2SHAddress} import org.bitcoins.core.util.BitcoinSLogger import org.bitcoins.rpc.RPCClient import org.bitcoins.rpc.bitcoincore.wallet.ImportMultiRequest import scala.concurrent.{ExecutionContext, Future} /** * Created by chris on 5/10/17. */ sealed trait ChannelClient extends BitcoinSLogger { def client: RPCClient def channel: Channel def clientKey: ECPrivateKey def update(amount: CurrencyUnit): Future[(ChannelClient, WitnessTransaction)] = channel match { case _: ChannelAwaitingAnchorTx => Future.failed(new IllegalArgumentException("Cannot sign a payment channel awaiting the anchor transaction, need to provide clientSPK and serverSPK")) case inProgress: ChannelInProgress => val clientSigned = inProgress.clientSign(amount,clientKey) //TODO: This is hacky, but we want the current value to be of type ChannelInProgress, NOT ChannelInProgressClientSigned val ip = clientSigned.map(c => ChannelInProgress(inProgress.anchorTx,inProgress.lock,inProgress.clientChangeSPK,c.current,c.old)) val newClient = ip.map(c => (ChannelClient(client,c,clientKey),c.current.transaction)) Future.fromTry(newClient) case _: ChannelInProgressClientSigned => //TODO: Look at this case, is this right? Future.failed(new IllegalArgumentException("Channel was ChannelInProgressClientSigned")) case _: ChannelClosed => Future.failed(new IllegalArgumentException("Cannot update a payment channel that is already closed")) } /** Creates the first spending transaction in the payment channel, then signs it with the client's key */ def update(clientSPK: ScriptPubKey, amount: CurrencyUnit)(implicit ec: ExecutionContext): Future[(ChannelClient,WitnessTransaction)] = { val invariant = Future(require(channel.isInstanceOf[ChannelAwaitingAnchorTx], "Cannot create the first spending transaction for a payment channel if the type is NOT ChannelAwaitingAnchorTx")) val ch = invariant.map(_ => channel.asInstanceOf[ChannelAwaitingAnchorTx]) //get the amount of confs on the anchor tx val updatedConfs = ch.flatMap(c => client.getConfirmations(c.anchorTx.txId)) val newAwaiting = ch.flatMap { c => updatedConfs.flatMap(confs => Future.fromTry(ChannelAwaitingAnchorTx(c.anchorTx, c.lock, confs.getOrElse(c.confirmations)))) } val clientSigned = newAwaiting.flatMap(ch => Future.fromTry(ch.clientSign(clientSPK,amount,clientKey))) //TODO: This is hacky, but we want the current value to be of type ChannelInProgress, NOT ChannelInProgressClientSigned val ip = clientSigned.map(c => ChannelInProgress(c.anchorTx,c.lock,c.clientChangeSPK,c.current,c.old)) val newClient = ip.map(c => (ChannelClient(client,c,clientKey),c.current.transaction)) newClient } def closeWithTimeout(implicit ec: ExecutionContext): Future[Transaction] = { val invariant = Future(require(channel.lock.timeout.nestedScriptPubKey.isInstanceOf[P2PKHScriptPubKey])) val nestedTimeoutSPK = invariant.map(_ => channel.lock.timeout.nestedScriptPubKey.asInstanceOf[P2PKHScriptPubKey]) val timeoutKey = nestedTimeoutSPK.flatMap(spk => client.dumpPrivateKey(P2PKHAddress(spk,client.instance.network))) val closedWithTimeout = channel match { case awaiting: ChannelAwaitingAnchorTx => val address = client.getNewAddress //we don't have anything to estimate a fee based upon, so create the tx first with //one satoshis as the fee val txSigComponentNoFee = timeoutKey.flatMap { key => address.flatMap { addr => Future.fromTry(awaiting.closeWithTimeout(addr.scriptPubKey, key, Satoshis.one)) } } val closed = timeoutKey.flatMap { key => txSigComponentNoFee.flatMap { t => address.flatMap { addr => //re-estimate the fee now that we have an idea what the size of the tx actually is val fee = estimateFeeForTx(t.current.transaction) fee.flatMap{ f => Future.fromTry(awaiting.closeWithTimeout(addr.scriptPubKey, key,f)) } } } } closed case inProgress: ChannelInProgress => val estimatedFee = estimateFeeForTx(inProgress.current.transaction) val closed = timeoutKey.flatMap { key => estimatedFee.flatMap(f => Future.fromTry(inProgress.closeWithTimeout(key, f))) } closed case inProgress: ChannelInProgressClientSigned => val estimatedFee = estimateFeeForTx(inProgress.current.transaction) val closed = timeoutKey.flatMap { key => estimatedFee.flatMap(f => Future.fromTry(inProgress.closeWithTimeout(key, f))) } closed case _: ChannelClosed => Future.failed(new IllegalArgumentException("Cannot close a payment channel that has already been closed")) } val sendRawTx = closedWithTimeout.flatMap(c => client.sendRawTransaction(c.current.transaction)) sendRawTx.flatMap { txid => logger.info("Closed with timeout txid: " + txid) closedWithTimeout.map(_.current.transaction) } } private def estimateFeeForTx(tx: Transaction)(implicit ec: ExecutionContext): Future[CurrencyUnit] = { val estimateFeePerByte = client.estimateFee(Policy.confirmations.toInt) estimateFeePerByte.map(f => f * Satoshis(Int64(tx.bytes.size))) } } object ChannelClient extends BitcoinSLogger { private case class ChannelClientImpl(client: RPCClient, channel: Channel, clientKey: ECPrivateKey) extends ChannelClient /** Creates a [[org.bitcoins.core.channels.ChannelAwaitingAnchorTx]], * it also broadcasts the anchor tx to the network */ def apply(client: RPCClient, serverPublicKey: ECPublicKey, timeout: LockTimeScriptPubKey, depositAmount: CurrencyUnit)(implicit ec: ExecutionContext): Future[ChannelClient] = { require(timeout.nestedScriptPubKey.isInstanceOf[P2PKHScriptPubKey], "We only support P2PKHScriptPubKey's for timeout branches for a payment channel currently") val clientPrivKey = ECPrivateKey() val importPrivKey = client.importPrivateKey(clientPrivKey) val clientPubKey = clientPrivKey.publicKey val escrow = MultiSignatureScriptPubKey(2,Seq(clientPubKey, serverPublicKey)) val lock = EscrowTimeoutScriptPubKey(escrow,timeout) val witSPK = WitnessScriptPubKeyV0(lock) val p2sh = P2SHScriptPubKey(witSPK) val addr = P2SHAddress(p2sh,client.instance.network) val importMulti = importPrivKey.map { _ => val i = ImportMultiRequest(Right(addr),None,Some(lock),Seq(serverPublicKey,clientPubKey), Nil, false,true, client.instance.network) client.importMulti(i) } val outputs = Seq(TransactionOutput(depositAmount,p2sh)) val bTx = BaseTransaction(TransactionConstants.validLockVersion,Nil,outputs, TransactionConstants.lockTime) //fund, sign then broadcast the tx val funded: Future[(Transaction,CurrencyUnit,Int)] = importMulti.flatMap { i => client.fundRawTransaction(bTx,None) } val signed: Future[(Transaction,Boolean)] = funded.flatMap { f => client.signRawTransaction(f._1) } val sent = signed.flatMap { s => client.sendRawTransaction(s._1) } val anchorTx = sent.flatMap { _ => val tx = signed.map { t => logger.info("Anchor txid: " + t._1.txId) logger.info("Anchor transaction: " + t._1.hex) t._1 } tx } val channel = anchorTx.flatMap(aTx => Future.fromTry(ChannelAwaitingAnchorTx(aTx,lock))) channel.map(chan => ChannelClient(client,chan,clientPrivKey)) } def apply(client: RPCClient, channel: Channel, clientKey: ECPrivateKey): ChannelClient = { ChannelClientImpl(client,channel, clientKey) } }
bitcoin-s/bitcoin-s-rpc-client
src/main/scala/org/bitcoins/rpc/channels/ChannelClient.scala
Scala
mit
8,230
/* * Scala (https://www.scala-lang.org) * * Copyright EPFL and Lightbend, Inc. * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). * * See the NOTICE file distributed with this work for * additional information regarding copyright ownership. */ package scala package reflect package internal package pickling import java.io.IOException import java.lang.Float.intBitsToFloat import java.lang.Double.longBitsToDouble import Flags._ import PickleFormat._ import scala.collection.mutable import scala.collection.mutable.ListBuffer import scala.annotation.switch import scala.util.control.NonFatal abstract class UnPickler { val symbolTable: SymbolTable import symbolTable._ /** Unpickle symbol table information descending from a class and/or module root * from an array of bytes. * @param bytes bytearray from which we unpickle * @param offset offset from which unpickling starts * @param classRoot the top-level class which is unpickled * @param moduleRoot the top-level module which is unpickled * @param filename filename associated with bytearray, only used for error messages */ def unpickle(bytes: Array[Byte], offset: Int, classRoot: ClassSymbol, moduleRoot: ModuleSymbol, filename: String): Unit = { try { assert(classRoot != NoSymbol && moduleRoot != NoSymbol, s"The Unpickler expects a class and module symbol: $classRoot - $moduleRoot") new Scan(bytes, offset, classRoot, moduleRoot, filename).run() } catch { case NonFatal(ex) => /*if (settings.debug.value)*/ ex.printStackTrace() throw new RuntimeException("error reading Scala signature of "+filename+": "+ex.getMessage()) } } /** Keep track of the symbols pending to be initialized. * * Useful for reporting on stub errors and cyclic errors. */ private[this] val completingStack = new mutable.ArrayBuffer[Symbol](24) class Scan(_bytes: Array[Byte], offset: Int, classRoot: ClassSymbol, moduleRoot: ModuleSymbol, filename: String) extends PickleBuffer(_bytes, offset, -1) { //println("unpickle " + classRoot + " and " + moduleRoot)//debug checkVersion() private[this] val loadingMirror = mirrorThatLoaded(classRoot) /** A map from entry numbers to array offsets */ private[this] val index = createIndex /** A map from entry numbers to symbols, types, or annotations */ private[this] val entries = new Array[AnyRef](index.length) /** A map from symbols to their associated `decls` scopes */ private[this] val symScopes = mutable.HashMap[Symbol, Scope]() private def expect(expected: Int, msg: => String): Unit = { val tag = readByte() if (tag != expected) errorBadSignature(s"$msg ($tag)") } //println("unpickled " + classRoot + ":" + classRoot.rawInfo + ", " + moduleRoot + ":" + moduleRoot.rawInfo);//debug @inline private def runAtIndex[T](i: Int)(body: => T): T = { val saved = readIndex readIndex = index(i) try body finally readIndex = saved } // Laboriously unrolled for performance. def run(): Unit = { var i = 0 while (i < index.length) { if (entries(i) == null && isSymbolEntry(i)) runAtIndex(i)(entries(i) = readSymbol()) i += 1 } // read children last, fix for #3951 i = 0 while (i < index.length) { if (entries(i) == null) { if (isSymbolAnnotationEntry(i)) runAtIndex(i)(readSymbolAnnotation()) else if (isChildrenEntry(i)) runAtIndex(i)(readChildren()) } i += 1 } } private def checkVersion(): Unit = { val major = readNat() val minor = readNat() if (major != MajorVersion || minor > MinorVersion) throw new IOException("Scala signature " + classRoot.decodedName + " has wrong version\n expected: " + MajorVersion + "." + MinorVersion + "\n found: " + major + "." + minor + " in "+filename) } /** The `decls` scope associated with given symbol */ protected def symScope(sym: Symbol) = symScopes.getOrElseUpdate(sym, newScope) /** Does entry represent an (internal) symbol */ protected def isSymbolEntry(i: Int): Boolean = { val tag = bytes(index(i)).toInt (firstSymTag <= tag && tag <= lastSymTag && (tag != CLASSsym || !isRefinementSymbolEntry(i))) } /** Does entry represent an (internal or external) symbol */ protected def isSymbolRef(i: Int): Boolean = { val tag = bytes(index(i)) (firstSymTag <= tag && tag <= lastExtSymTag) } /** Does entry represent a name? */ protected def isNameEntry(i: Int): Boolean = { val tag = bytes(index(i)).toInt tag == TERMname || tag == TYPEname } /** Does entry represent a symbol annotation? */ protected def isSymbolAnnotationEntry(i: Int): Boolean = { val tag = bytes(index(i)).toInt tag == SYMANNOT } /** Does the entry represent children of a symbol? */ protected def isChildrenEntry(i: Int): Boolean = { val tag = bytes(index(i)).toInt tag == CHILDREN } /** Does entry represent a refinement symbol? * pre: Entry is a class symbol */ protected def isRefinementSymbolEntry(i: Int): Boolean = { val savedIndex = readIndex readIndex = index(i) val tag = readByte().toInt assert(tag == CLASSsym, "Entry must be a class symbol") readNat(); // read length val result = readNameRef() == tpnme.REFINE_CLASS_NAME readIndex = savedIndex result } /** If entry at `i` is undefined, define it by performing * operation `op` with `readIndex` at start of i'th * entry. Restore `readIndex` afterwards. */ protected def at[T <: AnyRef](i: Int, op: () => T): T = { var r = entries(i) if (r eq null) { val savedIndex = readIndex readIndex = index(i) r = op() assert(entries(i) eq null, entries(i)) entries(i) = r readIndex = savedIndex } r.asInstanceOf[T] } /** Read a name */ protected def readName(): Name = { val tag = readByte() val len = readNat() tag match { case TERMname => newTermName(bytes, readIndex, len) case TYPEname => newTypeName(bytes, readIndex, len) case _ => errorBadSignature("bad name tag: " + tag) } } private def readEnd() = readNat() + readIndex /** Read a symbol */ protected def readSymbol(): Symbol = { val tag = readByte() val end = readEnd() def atEnd = readIndex == end def readExtSymbol(): Symbol = { val name = readNameRef() val owner = if (atEnd) loadingMirror.RootClass else readSymbolRef() def adjust(sym: Symbol) = if (tag == EXTref) sym else sym.moduleClass def fromName(name: Name) = name.toTermName match { case nme.ROOT => loadingMirror.RootClass case nme.ROOTPKG => loadingMirror.RootPackage case _ => val decl = owner match { case stub: StubSymbol => NoSymbol // scala/bug#8502 Don't call .info and fail the stub case _ => owner.info.decl(name) } adjust(decl) } def moduleAdvice(missing: String): String = { val module = if (missing.startsWith("scala.xml")) Some(("org.scala-lang.modules", "scala-xml")) else if (missing.startsWith("scala.util.parsing")) Some(("org.scala-lang.modules", "scala-parser-combinators")) else if (missing.startsWith("scala.swing")) Some(("org.scala-lang.modules", "scala-swing")) else if (missing.startsWith("scala.collection.parallel")) Some(("org.scala-lang.modules", "scala-parallel-collections")) else None (module map { case (group, art) => s"""\n(NOTE: It looks like the $art module is missing; try adding a dependency on "$group" : "$art". | See https://docs.scala-lang.org/overviews/ for more information.)""".stripMargin } getOrElse "") } def localDummy = { if (nme.isLocalDummyName(name)) owner.newLocalDummy(NoPosition) else NoSymbol } if (owner == definitions.ScalaPackageClass && name == tpnme.AnyRef) return definitions.AnyRefClass // (1) Try name. localDummy orElse fromName(name) orElse { // (2) Try with expanded name. Can happen if references to private // symbols are read from outside: for instance when checking the children // of a class. See #1722. fromName(nme.expandedName(name.toTermName, owner)) orElse { // (3) Call the mirror's "missing" hook. adjust(mirrorThatLoaded(owner).missingHook(owner, name)) orElse { // (4) Create a stub symbol to defer hard failure a little longer. val advice = moduleAdvice(s"${owner.fullName}.$name") val lazyCompletingSymbol = if (completingStack.isEmpty) NoSymbol else completingStack.apply(completingStack.length - 1) val missingMessage = s"""|Symbol '${name.nameKind} ${owner.fullName}.$name' is missing from the classpath. |This symbol is required by '${lazyCompletingSymbol.kindString} ${lazyCompletingSymbol.fullName}'. |Make sure that ${name.longString} is in your classpath and check for conflicting dependencies with `-Ylog-classpath`. |A full rebuild may help if '$filename' was compiled against an incompatible version of ${owner.fullName}.$advice""".stripMargin val stubName = if (tag == EXTref) name else name.toTypeName // The position of the error message is set by `newStubSymbol` owner.newStubSymbol(stubName, missingMessage) } } } } tag match { case NONEsym => return NoSymbol case EXTref | EXTMODCLASSref => return readExtSymbol() case _ => () } // symbols that were pickled with Pickler.writeSymInfo val nameref = readNat() val name = at(nameref, () => readName()) val owner = readSymbolRef() val flags = pickledToRawFlags(readLongNat()) var privateWithin: Symbol = null var inforef: Int = 0 readNat() match { case index if isSymbolRef(index) => privateWithin = at(index, () => readSymbol()) inforef = readNat() case index => privateWithin = NoSymbol inforef = index } def isModuleFlag = (flags & MODULE) != 0L def isClassRoot = (name == classRoot.name) && (owner == classRoot.owner) def isModuleRoot = (name == moduleRoot.name) && (owner == moduleRoot.owner) def isModuleClassRoot = (name == moduleRoot.name.toTypeName) && (owner == moduleRoot.owner) def pflags = flags & PickledFlags def finishSym(sym: Symbol): Symbol = { /* * member symbols (symbols owned by a class) are added to the class's scope, with a number * of exceptions: * * (.) ... * (1) `local child` represents local child classes, see comment in Pickler.putSymbol. * Since it is not a member, it should not be entered in the owner's scope. * (2) Similarly, we ignore local dummy symbols, as seen in scala/bug#8868 */ def shouldEnterInOwnerScope = { sym.owner.isClass && sym != classRoot && sym != moduleRoot && !sym.isModuleClass && !sym.isRefinementClass && !sym.isTypeParameter && !sym.isExistentiallyBound && sym.rawname != tpnme.LOCAL_CHILD && // (1) !nme.isLocalDummyName(sym.rawname) // (2) } markFlagsCompleted(sym)(mask = AllFlags) sym.privateWithin = privateWithin sym.info = ( if (atEnd) { assert(!sym.isSuperAccessor, sym) newLazyTypeRef(inforef) } else { assert(sym.isSuperAccessor || sym.isParamAccessor, sym) newLazyTypeRefAndAlias(inforef, readNat()) } ) if (shouldEnterInOwnerScope) symScope(sym.owner) enter sym sym } finishSym(tag match { case TYPEsym | ALIASsym => owner.newNonClassSymbol(name.toTypeName, NoPosition, pflags) case CLASSsym => val sym = { if (isModuleFlag && isModuleClassRoot) moduleRoot.moduleClass setFlag pflags else if (!isModuleFlag && isClassRoot) classRoot setFlag pflags else owner.newClassSymbol(name.toTypeName, NoPosition, pflags) } if (!atEnd) sym.typeOfThis = newLazyTypeRef(readNat()) sym case MODULEsym => val moduleClass = at(inforef, () => readType()).typeSymbol if (isModuleRoot) moduleRoot setFlag pflags else owner.newLinkedModule(moduleClass, pflags) case VALsym => if (isModuleRoot) { abort(s"VALsym at module root: owner = $owner, name = $name") } else owner.newTermSymbol(name.toTermName, NoPosition, pflags) case _ => errorBadSignature("bad symbol tag: " + tag) }) } protected def readType(): Type = { val tag = readByte() val end = readEnd() @inline def all[T](body: => T): List[T] = until(end, () => body) def readTypes() = all(readTypeRef()) def readSymbols() = all(readSymbolRef()) def readAnnots() = all(readAnnotationRef()) // if the method is overloaded, the params cannot be determined (see readSymbol) => return NoType. // Only happen for trees, "case Apply" in readTree() takes care of selecting the correct // alternative after parsing the arguments. def MethodTypeRef(restpe: Type, params: List[Symbol]): Type = ( if (restpe == NoType || (params contains NoSymbol)) NoType else MethodType(params, restpe) ) def PolyOrNullaryType(restpe: Type, tparams: List[Symbol]): Type = tparams match { case Nil => NullaryMethodType(restpe) case _ => PolyType(tparams, restpe) } def CompoundType(clazz: Symbol, parents: List[Type]): Type = tag match { case REFINEDtpe => RefinedType(parents, symScope(clazz), clazz) case CLASSINFOtpe => ClassInfoType(parents, symScope(clazz), clazz) } def readThisType(): Type = { val sym = readSymbolRef() match { case stub: StubSymbol => stub.setFlag(PACKAGE | MODULE) case sym => sym } ThisType(sym) } def mkTypeRef(pre: Type, sym: Symbol, args: List[Type]): Type = { if (classRoot.isJava && (sym == definitions.ObjectClass)) { definitions.ObjectTpeJava } else TypeRef(pre, sym, args) } // We're stuck with the order types are pickled in, but with judicious use // of named parameters we can recapture a declarative flavor in a few cases. // But it's still a rat's nest of ad-hockery. (tag: @switch) match { case NOtpe => NoType case NOPREFIXtpe => NoPrefix case THIStpe => readThisType() case SINGLEtpe => SingleType(readTypeRef(), readSymbolRef().filter(_.isStable)) // scala/bug#7596 account for overloading case SUPERtpe => SuperType(readTypeRef(), readTypeRef()) case CONSTANTtpe => ConstantType(readConstantRef()) case TYPEREFtpe => mkTypeRef(readTypeRef(), readSymbolRef(), readTypes()) case TYPEBOUNDStpe => TypeBounds(readTypeRef(), readTypeRef()) case REFINEDtpe | CLASSINFOtpe => CompoundType(readSymbolRef(), readTypes()) case METHODtpe => MethodTypeRef(readTypeRef(), readSymbols()) case POLYtpe => PolyOrNullaryType(readTypeRef(), readSymbols()) case EXISTENTIALtpe => ExistentialType(underlying = readTypeRef(), quantified = readSymbols()) case ANNOTATEDtpe => AnnotatedType(underlying = readTypeRef(), annotations = readAnnots()) } } def noSuchTypeTag(tag: Int, end: Int): Type = errorBadSignature("bad type tag: " + tag) /** Read a constant */ protected def readConstant(): Constant = { val tag = readByte().toInt val len = readNat() (tag: @switch) match { case LITERALunit => Constant(()) case LITERALboolean => Constant(readLong(len) != 0L) case LITERALbyte => Constant(readLong(len).toByte) case LITERALshort => Constant(readLong(len).toShort) case LITERALchar => Constant(readLong(len).toChar) case LITERALint => Constant(readLong(len).toInt) case LITERALlong => Constant(readLong(len)) case LITERALfloat => Constant(intBitsToFloat(readLong(len).toInt)) case LITERALdouble => Constant(longBitsToDouble(readLong(len))) case LITERALstring => Constant(readNameRef().toString) case LITERALnull => Constant(null) case LITERALclass => Constant(readTypeRef()) case LITERALenum => Constant(readSymbolRef()) case LITERALsymbol => Constant(null) // TODO: needed until we have a STARR that does not emit it. case _ => noSuchConstantTag(tag, len) } } def noSuchConstantTag(tag: Int, len: Int): Constant = errorBadSignature("bad constant tag: " + tag) /** Read children and store them into the corresponding symbol. */ protected def readChildren(): Unit = { val tag = readByte() assert(tag == CHILDREN, "Entry must be children") val end = readEnd() val target = readSymbolRef() while (readIndex != end) target addChild readSymbolRef() } /** Read an annotation argument, which is pickled either * as a Constant or a Tree. */ protected def readAnnotArg(i: Int): Tree = bytes(index(i)) match { case TREE => at(i, () => readTree()) case _ => val const = at(i, () => readConstant()) Literal(const) setType const.tpe } /** Read a ClassfileAnnotArg (argument to a classfile annotation) */ private def readArrayAnnot() = { readByte() // skip the `annotargarray` tag val end = readEnd() until(end, () => readClassfileAnnotArg(readNat())).toArray } protected def readClassfileAnnotArg(i: Int): ClassfileAnnotArg = bytes(index(i)) match { case ANNOTINFO => NestedAnnotArg(at(i, () => readAnnotation())) case ANNOTARGARRAY => at(i, () => ArrayAnnotArg(readArrayAnnot())) case _ => LiteralAnnotArg(at(i, () => readConstant())) } /** Read an AnnotationInfo. Not to be called directly, use * readAnnotation or readSymbolAnnotation */ protected def readAnnotationInfo(end: Int): AnnotationInfo = { val atp = readTypeRef() val args = new ListBuffer[Tree] val assocs = new ListBuffer[(Name, ClassfileAnnotArg)] while (readIndex != end) { val argref = readNat() if (isNameEntry(argref)) { val name = at(argref, () => readName()) val arg = readClassfileAnnotArg(readNat()) assocs += ((name, arg)) } else args += readAnnotArg(argref) } AnnotationInfo(atp, args.toList, assocs.toList) } /** Read an annotation and as a side effect store it into * the symbol it requests. Called at top-level, for all * (symbol, annotInfo) entries. */ protected def readSymbolAnnotation(): Unit = { expect(SYMANNOT, "symbol annotation expected") val end = readEnd() val target = readSymbolRef() target.addAnnotation(readAnnotationInfo(end)) } /** Read an annotation and return it. Used when unpickling * an ANNOTATED(WSELF)tpe or a NestedAnnotArg */ protected def readAnnotation(): AnnotationInfo = { val tag = readByte() if (tag != ANNOTINFO) errorBadSignature("annotation expected (" + tag + ")") val end = readEnd() readAnnotationInfo(end) } private def readNonEmptyTree(tag: Int, end: Int): Tree = { @inline def all[T](body: => T): List[T] = until(end, () => body) @inline def rep[T](body: => T): List[T] = times(readNat(), () => body) // !!! What is this doing here? def fixApply(tree: Apply, tpe: Type): Apply = { val Apply(fun, args) = tree if (fun.symbol.isOverloaded) { fun setType fun.symbol.info inferMethodAlternative(fun, args map (_.tpe), tpe) } tree } def ref() = readTreeRef() def caseRef() = readCaseDefRef() def modsRef() = readModifiersRef() def implRef() = readTemplateRef() def nameRef() = readNameRef() def tparamRef() = readTypeDefRef() def vparamRef() = readValDefRef() def memberRef() = readMemberDefRef() def constRef() = readConstantRef() def idRef() = readIdentRef() def termNameRef() = readNameRef().toTermName def typeNameRef() = readNameRef().toTypeName def refTreeRef() = ref() match { case t: RefTree => t case t => errorBadSignature("RefTree expected, found " + t.shortClass) } def selectorsRef() = all(ImportSelector(nameRef(), -1, nameRef(), -1)) /* A few of the most popular trees have been pulled to the top for * switch efficiency purposes. */ def readTree(tpe: Type): Tree = (tag: @switch) match { case IDENTtree => Ident(nameRef()) case SELECTtree => Select(ref(), nameRef()) case APPLYtree => fixApply(Apply(ref(), all(ref())), tpe) // !!! case BINDtree => Bind(nameRef(), ref()) case BLOCKtree => all(ref()) match { case stats :+ expr => Block(stats, expr) case x => throw new MatchError(x) } case IFtree => If(ref(), ref(), ref()) case LITERALtree => Literal(constRef()) case TYPEAPPLYtree => TypeApply(ref(), all(ref())) case TYPEDtree => Typed(ref(), ref()) case ALTERNATIVEtree => Alternative(all(ref())) case ANNOTATEDtree => Annotated(ref(), ref()) case APPLIEDTYPEtree => AppliedTypeTree(ref(), all(ref())) case APPLYDYNAMICtree => ApplyDynamic(ref(), all(ref())) case ARRAYVALUEtree => ArrayValue(ref(), all(ref())) case ASSIGNtree => Assign(ref(), ref()) case CASEtree => CaseDef(ref(), ref(), ref()) case CLASStree => ClassDef(modsRef(), typeNameRef(), rep(tparamRef()), implRef()) case COMPOUNDTYPEtree => CompoundTypeTree(implRef()) case DEFDEFtree => DefDef(modsRef(), termNameRef(), rep(tparamRef()), rep(rep(vparamRef())), ref(), ref()) case EXISTENTIALTYPEtree => ExistentialTypeTree(ref(), all(memberRef())) case FUNCTIONtree => Function(rep(vparamRef()), ref()) case IMPORTtree => Import(ref(), selectorsRef()) case LABELtree => LabelDef(termNameRef(), rep(idRef()), ref()) case MATCHtree => Match(ref(), all(caseRef())) case MODULEtree => ModuleDef(modsRef(), termNameRef(), implRef()) case NEWtree => New(ref()) case PACKAGEtree => PackageDef(refTreeRef(), all(ref())) case RETURNtree => Return(ref()) case SELECTFROMTYPEtree => SelectFromTypeTree(ref(), typeNameRef()) case SINGLETONTYPEtree => SingletonTypeTree(ref()) case STARtree => Star(ref()) case SUPERtree => Super(ref(), typeNameRef()) case TEMPLATEtree => Template(rep(ref()), vparamRef(), all(ref())) case THIStree => This(typeNameRef()) case THROWtree => Throw(ref()) case TREtree => Try(ref(), rep(caseRef()), ref()) case TYPEBOUNDStree => TypeBoundsTree(ref(), ref()) case TYPEDEFtree => TypeDef(modsRef(), typeNameRef(), rep(tparamRef()), ref()) case TYPEtree => TypeTree() case UNAPPLYtree => UnApply(ref(), all(ref())) case VALDEFtree => ValDef(modsRef(), termNameRef(), ref(), ref()) case _ => noSuchTreeTag(tag, end) } val tpe = readTypeRef() val sym = if (isTreeSymbolPickled(tag)) readSymbolRef() else null val result = readTree(tpe) if (sym ne null) result setSymbol sym result setType tpe } /* Read an abstract syntax tree */ protected def readTree(): Tree = { expect(TREE, "tree expected") val end = readEnd() readByte() match { case EMPTYtree => EmptyTree case tag => readNonEmptyTree(tag, end) } } def noSuchTreeTag(tag: Int, end: Int) = errorBadSignature("unknown tree type (" + tag + ")") def readModifiers(): Modifiers = { val tag = readNat() if (tag != MODIFIERS) errorBadSignature("expected a modifiers tag (" + tag + ")") readEnd() val pflagsHi = readNat() val pflagsLo = readNat() val pflags = (pflagsHi.toLong << 32) + pflagsLo val flags = pickledToRawFlags(pflags) val privateWithin = readNameRef() Modifiers(flags, privateWithin, Nil) } /* Read a reference to a pickled item */ protected def readSymbolRef(): Symbol = {//OPT inlined from: at(readNat(), readSymbol) to save on closure creation val i = readNat() var r = entries(i) if (r eq null) { val savedIndex = readIndex readIndex = index(i) r = readSymbol() assert(entries(i) eq null, entries(i)) entries(i) = r readIndex = savedIndex } r.asInstanceOf[Symbol] } protected def readNameRef(): Name = at(readNat(), () => readName()) protected def readTypeRef(): Type = at(readNat(), () => readType()) protected def readConstantRef(): Constant = at(readNat(), () => readConstant()) protected def readAnnotationRef(): AnnotationInfo = at(readNat(), () => readAnnotation()) protected def readModifiersRef(): Modifiers = at(readNat(), () => readModifiers()) protected def readTreeRef(): Tree = at(readNat(), () => readTree()) protected def readTypeNameRef(): TypeName = readNameRef().toTypeName protected def readTemplateRef(): Template = readTreeRef() match { case templ:Template => templ case other => errorBadSignature("expected a template (" + other + ")") } protected def readCaseDefRef(): CaseDef = readTreeRef() match { case tree:CaseDef => tree case other => errorBadSignature("expected a case def (" + other + ")") } protected def readValDefRef(): ValDef = readTreeRef() match { case tree:ValDef => tree case other => errorBadSignature("expected a ValDef (" + other + ")") } protected def readIdentRef(): Ident = readTreeRef() match { case tree:Ident => tree case other => errorBadSignature("expected an Ident (" + other + ")") } protected def readTypeDefRef(): TypeDef = readTreeRef() match { case tree:TypeDef => tree case other => errorBadSignature("expected an TypeDef (" + other + ")") } protected def readMemberDefRef(): MemberDef = readTreeRef() match { case tree:MemberDef => tree case other => errorBadSignature("expected an MemberDef (" + other + ")") } protected def errorBadSignature(msg: String) = throw new RuntimeException("malformed Scala signature of " + classRoot.name + " at " + readIndex + "; " + msg) def inferMethodAlternative(fun: Tree, argtpes: List[Type], restpe: Type): Unit = {} // can't do it; need a compiler for that. def newLazyTypeRef(i: Int): LazyType = new LazyTypeRef(i) def newLazyTypeRefAndAlias(i: Int, j: Int): LazyType = new LazyTypeRefAndAlias(i, j) /** Convert to a type error, that is printed gracefully instead of crashing. * * Similar in intent to what SymbolLoader does (but here we don't have access to * error reporting, so we rely on the typechecker to report the error). */ def toTypeError(e: MissingRequirementError) = { new TypeError(e.msg) } /** A lazy type which when completed returns type at index `i`. */ private class LazyTypeRef(i: Int) extends LazyType with FlagAgnosticCompleter { private[this] val definedAtRunId = currentRunId private[this] val p = phase protected def completeInternal(sym: Symbol) : Unit = try { completingStack += sym val tp = at(i, () => readType()) if (p ne null) { slowButSafeEnteringPhase(p)(sym setInfo tp) } if (currentRunId != definedAtRunId) sym.setInfo(adaptToNewRunMap(tp)) } catch { case e: MissingRequirementError => throw toTypeError(e) } finally { completingStack.remove(completingStack.length - 1) } override def complete(sym: Symbol) : Unit = { completeInternal(sym) if (!isCompilerUniverse) markAllCompleted(sym) } override def load(sym: Symbol): Unit = { complete(sym) } } /** A lazy type which when completed returns type at index `i` and sets alias * of completed symbol to symbol at index `j`. */ private class LazyTypeRefAndAlias(i: Int, j: Int) extends LazyTypeRef(i) { override def completeInternal(sym: Symbol) = try { super.completeInternal(sym) var alias = at(j, () => readSymbol()) if (alias.isOverloaded) { alias = slowButSafeEnteringPhase(picklerPhase)(alias suchThat { alt => if (sym.isParamAccessor) alt.isParamAccessor else sym.tpe =:= sym.owner.thisType.memberType(alt) }) } sym.asInstanceOf[TermSymbol].setAlias(alias) } catch { case e: MissingRequirementError => throw toTypeError(e) } } } }
scala/scala
src/reflect/scala/reflect/internal/pickling/UnPickler.scala
Scala
apache-2.0
30,980
package io.buoyant.telemetry.statsd import com.timgroup.statsd.StatsDClient import com.twitter.finagle.stats.{Counter => FCounter, Stat => FStat} private[statsd] object Metric { // stats (timing/histograms) only send when Math.random() <= sampleRate class Counter(statsDClient: StatsDClient, name: String, sampleRate: Double) extends FCounter { def incr(delta: Int): Unit = statsDClient.count(name, delta, sampleRate) } // gauges simply evaluate on send class Gauge(statsDClient: StatsDClient, name: String, f: => Float) { def send: Unit = statsDClient.recordGaugeValue(name, f) } // stats (timing/histograms) only send when Math.random() <= sampleRate class Stat(statsDClient: StatsDClient, name: String, sampleRate: Double) extends FStat { def add(value: Float): Unit = // would prefer `recordHistogramValue`, but that's an extension, supported by Datadog and InfluxDB statsDClient.recordExecutionTime(name, value.toLong, sampleRate) } }
denverwilliams/linkerd
telemetry/statsd/src/main/scala/io/buoyant/telemetry/statsd/Metric.scala
Scala
apache-2.0
987
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package vta.core import chisel3._ import chisel3.util._ import vta.util.config._ import vta.shell._ /** UopMaster. * * Uop interface used by a master module, i.e. TensorAlu or TensorGemm, * to request a micro-op (uop) from the uop-scratchpad. The index (idx) is * used as an address to find the uop in the uop-scratchpad. */ class UopMaster(implicit p: Parameters) extends Bundle { val addrBits = log2Ceil(p(CoreKey).uopMemDepth) val idx = ValidIO(UInt(addrBits.W)) val data = Flipped(ValidIO(new UopDecode)) override def cloneType = new UopMaster().asInstanceOf[this.type] } /** UopClient. * * Uop interface used by a client module, i.e. LoadUop, to receive * a request from a master module, i.e. TensorAlu or TensorGemm. * The index (idx) is used as an address to find the uop in the uop-scratchpad. */ class UopClient(implicit p: Parameters) extends Bundle { val addrBits = log2Ceil(p(CoreKey).uopMemDepth) val idx = Flipped(ValidIO(UInt(addrBits.W))) val data = ValidIO(new UopDecode) override def cloneType = new UopClient().asInstanceOf[this.type] } /** LoadUop. * * Load micro-ops (uops) from memory, i.e. DRAM, and store them in the * uop-scratchpad. Currently, micro-ops are 32-bit wide and loaded in * group of 2 given the fact that the DRAM payload is 8-bytes. This module * should be modified later on to support different DRAM sizes efficiently. */ class LoadUop(debug: Boolean = false)(implicit p: Parameters) extends Module { val mp = p(ShellKey).memParams val io = IO(new Bundle { val start = Input(Bool()) val done = Output(Bool()) val inst = Input(UInt(INST_BITS.W)) val baddr = Input(UInt(mp.addrBits.W)) val vme_rd = new VMEReadMaster val uop = new UopClient }) val numUop = 2 // store two uops per sram word val uopBits = p(CoreKey).uopBits val uopBytes = uopBits / 8 val uopDepth = p(CoreKey).uopMemDepth / numUop val dec = io.inst.asTypeOf(new MemDecode) val raddr = Reg(chiselTypeOf(io.vme_rd.cmd.bits.addr)) val xcnt = Reg(chiselTypeOf(io.vme_rd.cmd.bits.len)) val xlen = Reg(chiselTypeOf(io.vme_rd.cmd.bits.len)) val xrem = Reg(chiselTypeOf(dec.xsize)) val xsize = (dec.xsize >> log2Ceil(numUop)) + dec.xsize(0) + (dec.sram_offset % 2.U) - 1.U val xmax = (1 << mp.lenBits).U val xmax_bytes = ((1 << mp.lenBits) * mp.dataBits / 8).U val offsetIsEven = (dec.sram_offset % 2.U) === 0.U val sizeIsEven = (dec.xsize % 2.U) === 0.U val sIdle :: sReadCmd :: sReadData :: Nil = Enum(3) val state = RegInit(sIdle) // control switch(state) { is(sIdle) { when(io.start) { state := sReadCmd when(xsize < xmax) { xlen := xsize xrem := 0.U }.otherwise { xlen := xmax - 1.U xrem := xsize - xmax } } } is(sReadCmd) { when(io.vme_rd.cmd.ready) { state := sReadData } } is(sReadData) { when(io.vme_rd.data.valid) { when(xcnt === xlen) { when(xrem === 0.U) { state := sIdle }.elsewhen(xrem < xmax) { state := sReadCmd xlen := xrem xrem := 0.U } .otherwise { state := sReadCmd xlen := xmax - 1.U xrem := xrem - xmax } } } } } // read-from-dram val maskOffset = VecInit(Seq.fill(M_DRAM_OFFSET_BITS)(true.B)).asUInt when(state === sIdle) { when(offsetIsEven) { raddr := io.baddr | (maskOffset & (dec.dram_offset << log2Ceil(uopBytes))) }.otherwise { raddr := (io.baddr | (maskOffset & (dec.dram_offset << log2Ceil( uopBytes)))) - uopBytes.U } }.elsewhen(state === sReadData && xcnt === xlen && xrem =/= 0.U) { raddr := raddr + xmax_bytes } io.vme_rd.cmd.valid := state === sReadCmd io.vme_rd.cmd.bits.addr := raddr io.vme_rd.cmd.bits.len := xlen io.vme_rd.data.ready := state === sReadData when(state =/= sReadData) { xcnt := 0.U }.elsewhen(io.vme_rd.data.fire()) { xcnt := xcnt + 1.U } val waddr = Reg(UInt(log2Ceil(uopDepth).W)) when(state === sIdle) { waddr := dec.sram_offset >> log2Ceil(numUop) }.elsewhen(io.vme_rd.data.fire()) { waddr := waddr + 1.U } val wdata = Wire(Vec(numUop, UInt(uopBits.W))) val mem = SyncReadMem(uopDepth, chiselTypeOf(wdata)) val wmask = Reg(Vec(numUop, Bool())) when(offsetIsEven) { when(sizeIsEven) { wmask := "b_11".U.asTypeOf(wmask) }.elsewhen(io.vme_rd.cmd.fire()) { when(dec.xsize === 1.U) { wmask := "b_01".U.asTypeOf(wmask) }.otherwise { wmask := "b_11".U.asTypeOf(wmask) } } .elsewhen(io.vme_rd.data.fire()) { when(xcnt === xlen - 1.U) { wmask := "b_01".U.asTypeOf(wmask) }.otherwise { wmask := "b_11".U.asTypeOf(wmask) } } }.otherwise { when(io.vme_rd.cmd.fire()) { wmask := "b_10".U.asTypeOf(wmask) }.elsewhen(io.vme_rd.data.fire()) { when(sizeIsEven && xcnt === xlen - 1.U) { wmask := "b_01".U.asTypeOf(wmask) }.otherwise { wmask := "b_11".U.asTypeOf(wmask) } } } wdata := io.vme_rd.data.bits.asTypeOf(wdata) when(io.vme_rd.data.fire()) { mem.write(waddr, wdata, wmask) } // read-from-sram io.uop.data.valid := RegNext(io.uop.idx.valid) val sIdx = io.uop.idx.bits % numUop.U val rIdx = io.uop.idx.bits >> log2Ceil(numUop) val memRead = mem.read(rIdx, io.uop.idx.valid) val sWord = memRead.asUInt.asTypeOf(wdata) val sUop = sWord(sIdx).asTypeOf(io.uop.data.bits) io.uop.data.bits <> sUop // done io.done := state === sReadData & io.vme_rd.data.valid & xcnt === xlen & xrem === 0.U // debug if (debug) { when(io.vme_rd.cmd.fire()) { printf("[LoadUop] cmd addr:%x len:%x rem:%x\n", raddr, xlen, xrem) } } }
Huyuwei/tvm
vta/hardware/chisel/src/main/scala/core/LoadUop.scala
Scala
apache-2.0
6,725
package io.circe.benchmark import org.scalatest.FlatSpec class ParsingBenchmarkSpec extends FlatSpec { val benchmark: ParsingBenchmark = new ParsingBenchmark import benchmark._ "The parsing benchmark" should "correctly parse integers using Circe" in { assert(parseIntsC === intsC) } it should "correctly parse integers using Argonaut" in { assert(parseIntsA === intsA) } it should "correctly parse integers using Play JSON" in { assert(parseIntsP === intsP) } it should "correctly parse integers using Spray JSON" in { assert(parseIntsS === intsS) } it should "correctly parse case classes using Circe" in { assert(parseFoosC === foosC) } it should "correctly parse case classes using Argonaut" in { assert(parseFoosA === foosA) } it should "correctly parse case classes using Play JSON" in { assert(parseFoosP === foosP) } it should "correctly parse case classes using Spray JSON" in { assert(parseFoosS === foosS) } }
alexarchambault/circe
benchmark/src/test/scala/io/circe/benchmark/ParsingBenchmarkSpec.scala
Scala
apache-2.0
999
import java.nio.file._ import scala.util._ object Main extends App { println("Hello world") val path = Paths get "/data/test01" val result = Try(Files createFile path) println(result) }
benmccann/sbt-native-packager
test-project-docker/src/main/scala/Main.scala
Scala
bsd-2-clause
201
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.integration.torch import com.intel.analytics.bigdl._ import com.intel.analytics.bigdl.nn.{GradientChecker, LogSoftMax} import com.intel.analytics.bigdl.tensor.Tensor import com.intel.analytics.bigdl.utils.RandomGenerator._ import scala.util.Random @com.intel.analytics.bigdl.tags.Serial class LogSoftMaxSpec extends TorchSpec { "A LogSoftMax Module " should "generate correct output and grad with input 2D" in { torchCheck() val module = new LogSoftMax[Double]() Random.setSeed(100) val input = Tensor[Double](4, 10).apply1(e => Random.nextDouble()) val data = Tensor[Double](4, 20).randn() val gradOutput = data.narrow(2, 1, 10) val start = System.nanoTime() val output = module.forward(input) val gradInput = module.backward(input, gradOutput) val end = System.nanoTime() val scalaTime = end - start val code = "module = nn.LogSoftMax()\\n" + "output1 = module:forward(input)\\n " + "output2 = module:backward(input, gradOutput)" val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput), Array("output1", "output2")) val luaOutput = torchResult("output1").asInstanceOf[Tensor[Double]] val luaGradInput = torchResult("output2").asInstanceOf[Tensor[Double]] luaOutput should be(output) luaGradInput should be(gradInput) println("Test case : LogSoft, Torch : " + luaTime + " s, Scala : " + scalaTime / 1e9 + " s") } "A LogSoftMax Module " should "generate correct output and grad with input 1D" in { torchCheck() val module = new LogSoftMax[Double]() Random.setSeed(100) val input = Tensor[Double](10).apply1(e => Random.nextDouble()) val data = Tensor[Double](20).randn() val gradOutput = data.narrow(1, 1, 10) val start = System.nanoTime() val output = module.forward(input) val gradInput = module.backward(input, gradOutput) val end = System.nanoTime() val scalaTime = end - start val code = "module = nn.LogSoftMax()\\n" + "output1 = module:forward(input)\\n " + "output2 = module:backward(input, gradOutput)" val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput), Array("output1", "output2")) val luaOutput = torchResult("output1").asInstanceOf[Tensor[Double]] val luaGradInput = torchResult("output2").asInstanceOf[Tensor[Double]] luaOutput should be(output) luaGradInput should be(gradInput) println("Test case : LogSoft, Torch : " + luaTime + " s, Scala : " + scalaTime / 1e9 + " s") } "A LogSoftMax Module " should "generate correct output and grad tiwh input 1*N" in { torchCheck() val module = new LogSoftMax[Double]() Random.setSeed(100) val input = Tensor[Double](1, 10).apply1(e => Random.nextDouble()) val data = Tensor[Double](1, 20).randn() val gradOutput = data.narrow(2, 1, 10) val start = System.nanoTime() val output = module.forward(input) val gradInput = module.backward(input, gradOutput) val end = System.nanoTime() val scalaTime = end - start val code = "module = nn.LogSoftMax()\\n" + "output1 = module:forward(input)\\n " + "output2 = module:backward(input, gradOutput)" val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput), Array("output1", "output2")) val luaOutput = torchResult("output1").asInstanceOf[Tensor[Double]] val luaGradInput = torchResult("output2").asInstanceOf[Tensor[Double]] luaOutput should be(output) luaGradInput should be(gradInput) println("Test case : LogSoft, Torch : " + luaTime + " s, Scala : " + scalaTime / 1e9 + " s") } "LogSoftMax module" should "be good in gradient check for input" in { torchCheck() val seed = 100 RNG.setSeed(seed) val layer = new LogSoftMax[Double]() val input = Tensor[Double](4, 10).apply1(e => Random.nextDouble()) val checker = new GradientChecker(1e-4) checker.checkLayer[Double](layer, input, 1e-3) should be(true) } "LogSoftMax float module" should "return good result" in { torchCheck() val module = new LogSoftMax[Float]() Random.setSeed(100) val input = Tensor[Float](2, 5).apply1(e => Random.nextFloat() + 10) val gradOutput = Tensor[Float](2, 5).apply1(e => Random.nextFloat() + 10) val start = System.nanoTime() val output = module.forward(input) val gradInput = module.backward(input, gradOutput) val end = System.nanoTime() val scalaTime = end - start val code = "torch.setdefaulttensortype('torch.FloatTensor')" + "module = nn.LogSoftMax()\\n" + "output1 = module:forward(input)\\n " + "output2 = module:backward(input, gradOutput)" val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput), Array("output1", "output2")) val luaOutput = torchResult("output1").asInstanceOf[Tensor[Float]] val luaGradInput = torchResult("output2").asInstanceOf[Tensor[Float]] luaOutput should be(output) luaGradInput should be(gradInput) } }
zhangxiaoli73/BigDL
spark/dl/src/test/scala/com/intel/analytics/bigdl/integration/torch/LogSoftMaxSpec.scala
Scala
apache-2.0
5,730
package polynomial import integer.IntegersModP import org.scalatest.{FunSuite, Matchers} import polynomial.Predef.X import core.InfixOps._ class PolynomialRingOverFieldTest extends FunSuite with Matchers { implicit val intsMod5 = IntegersModP(5) implicit val polyRing = PolynomialRingOverField(intsMod5, X) val zero = intsMod5.classOf(0) val one = intsMod5.classOf(1) val two = intsMod5.classOf(2) val three = intsMod5.classOf(3) val four = intsMod5.classOf(4) test("Addition works") { Polynomial(three) + Polynomial(four) should be (Polynomial(two)) Polynomial(one, three) + Polynomial(three) should be (Polynomial(one, one)) Polynomial(two, three) + Polynomial(three, one) should be (Polynomial(four)) Polynomial(three, zero) + polyRing.zero should be (Polynomial(three, zero)) } test("Subtraction works") { Polynomial(three) - Polynomial(two) should be (Polynomial(one)) Polynomial(one, two) - Polynomial(three) should be (Polynomial(one, four)) Polynomial(two, three) - Polynomial(two, two) should be (Polynomial(one)) Polynomial(three, zero) - polyRing.zero should be (Polynomial(three, zero)) } test("Multiplication works") { Polynomial(three) * Polynomial(two) should be (Polynomial(one)) Polynomial(one, two) * Polynomial(three) should be (Polynomial(three, one)) Polynomial(one, three, two) * Polynomial(three, one) should be (Polynomial(three, zero, four, two)) Polynomial(one, two, three) * polyRing.zero should be (polyRing.zero) } test("Exponentiation works") { Polynomial(one, three) ^ 0 should be (polyRing.one) Polynomial(one, three) ^ 1 should be (Polynomial(one, three)) Polynomial(one, three) ^ 2 should be (Polynomial(one, one, four)) Polynomial(one, three) ^ 3 should be (Polynomial(one, four, two, two)) } test("Negation works") { Polynomial(one, three, three).negate should be (Polynomial(four, two, two)) } test("A polynomial ring over a field has a zero and a one") { polyRing.zero should be (Polynomial(zero)) polyRing.one should be (Polynomial(one)) } test("The quotient of a constant divided by a constant is a constant") { val constant1 = Polynomial(three) val constant2 = Polynomial(two) polyRing.quot(constant1, constant2) should be (Polynomial(four)) } test("The quotient of zero divided by a constant is a zero") { val constant = Polynomial(three) polyRing.quot(polyRing.zero, constant) should be (polyRing.zero) } test("The quotient of any non-zero polynomial divided by zero should throw an exception") { val constant = Polynomial(three) val p = Polynomial(one, two) intercept[ArithmeticException](polyRing.quot(constant, polyRing.zero)) intercept[ArithmeticException](polyRing.quot(p, polyRing.zero)) intercept[ArithmeticException](polyRing.quot(polyRing.zero, polyRing.zero)) } test("The quotient of a polynomial divided by a polynomial of higher degree should be zero") { val p1 = Polynomial(one, two) val p2 = Polynomial(one, two, three) polyRing.quot(p1, p2) should be (polyRing.zero) } test("The quotient of a polynomial divided by a polynomial of the same degree should have degree 0") { val p1 = Polynomial(one, two, four) val p2 = Polynomial(one, two, three) polyRing.quot(p1, p2).degree.toInt should be (0) } test("The quotient of a polynomial divided by a polynomial of lesser degree should have degree equal the difference") { val p1 = Polynomial(one) val p2 = Polynomial(one, two) val p3 = Polynomial(one, two, three) polyRing.quot(p2, p1).degree.toInt should be (1) polyRing.quot(p3, p1).degree.toInt should be (2) polyRing.quot(p3, p2).degree.toInt should be (1) } test("The quotient for polynomials should work when they divide evenly") { val p1 = Polynomial(three) val p2 = Polynomial(one, two) val p3 = Polynomial(three, two) val p4 = Polynomial(two, one, two) polyRing.quot(p1 * p2, p1) should be (p2) polyRing.quot(p1 * p2, p2) should be (p1) polyRing.quot(p2 * p3, p3) should be (p2) polyRing.quot(p2 * p3, p2) should be (p3) polyRing.quot(p2 * p3 * p4, p4) should be (p2 * p3) polyRing.quot(p2 * p3 * p4, p2 * p3) should be (p4) } test("The quotient for polynomials should work when they don't divide evenly") { val p1 = Polynomial(three) val p2 = Polynomial(one, two) val p3 = Polynomial(three, two) val p4 = Polynomial(two, one, two) polyRing.quot(polyRing.plus(p2 * p3, p1), p3) should be (p2) polyRing.quot(p2 * p3 * p4 + p1, p4) should be (p2 * p3) polyRing.quot(p2 * p3 * p4 + p2, p4) should be (p2 * p3) polyRing.quot(p2 * p3 * p4 + p3, p4) should be (p2 * p3) polyRing.quot(p2 * p3 * p4 + p1, p2 * p3) should be (p4) polyRing.quot(p2 * p3 * p4 + p2, p2 * p3) should be (p4) polyRing.quot(p2 * p3 * p4 + p3, p2 * p3) should be (p4) } test("The mod of a constant divided by a constant is zero") { val constant1 = Polynomial(three) val constant2 = Polynomial(two) polyRing.mod(constant1, constant2) should be (polyRing.zero) } test("The mod of zero divided by a constant is a zero") { val constant = Polynomial(three) polyRing.mod(polyRing.zero, constant) should be (polyRing.zero) } test("The mod of any non-zero polynomial divided by zero should throw an exception") { val constant = Polynomial(three) val p = Polynomial(one, two) intercept[ArithmeticException](polyRing.mod(constant, polyRing.zero)) intercept[ArithmeticException](polyRing.mod(p, polyRing.zero)) intercept[ArithmeticException](polyRing.mod(polyRing.zero, polyRing.zero)) } test("The mod of a polynomial divided by a polynomial of higher degree should be itself") { val p1 = Polynomial(one, two) val p2 = Polynomial(one, two, three) polyRing.mod(p1, p2) should be (p1) } test("The mod of a polynomial divided by a polynomial of the same degree should have degree 0") { val p1 = Polynomial(one, two) val p2 = Polynomial(one, three) polyRing.quot(p1, p2).degree.toInt should be (0) } test("The mod of a polynomial divided by a polynomial of lesser degree should have degree <= the degree of the numerator") { val p1 = Polynomial(one, two, three) val p2 = Polynomial(one, three) assert(polyRing.quot(p1, p2).degree <= p1.degree) } test("The mod for polynomials that divide evenly should be zero") { val p1 = Polynomial(three) val p2 = Polynomial(one, two) val p3 = Polynomial(three, two) val p4 = Polynomial(two, one, two) polyRing.mod(p1 * p2, p1) should be (polyRing.zero) polyRing.mod(p1 * p2, p2) should be (polyRing.zero) polyRing.mod(p2 * p3, p3) should be (polyRing.zero) polyRing.mod(p2 * p3, p2) should be (polyRing.zero) polyRing.mod(p2 * p3 * p4, p4) should be (polyRing.zero) polyRing.mod(p2 * p3 * p4, p2 * p3) should be (polyRing.zero) } test("The mod for polynomials should work when they don't divide evenly") { val p1 = Polynomial(three) val p2 = Polynomial(one, two) val p3 = Polynomial(three, two) val p4 = Polynomial(two, one, two) polyRing.mod(p2 * p3 + p1, p3) should be (p1) polyRing.mod(p2 * p3 * p4 + p1, p4) should be (p1) polyRing.mod(p2 * p3 * p4 + p2, p4) should be (p2) polyRing.mod(p2 * p3 * p4 + p3, p4) should be (p3) polyRing.mod(p2 * p3 * p4 + p1, p2 * p3) should be (p1) polyRing.mod(p2 * p3 * p4 + p2, p2 * p3) should be (p2) polyRing.mod(p2 * p3 * p4 + p3, p2 * p3) should be (p3) } }
dkettlestrings/thunder
src/test/scala/polynomial/PolynomialRingOverFieldTest.scala
Scala
gpl-3.0
7,633
/* * Copyright (c) 2013 Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see http://www.gnu.org/licenses/agpl.html. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package optiml package macros import lancet.api._ import lancet.interpreter._ import lancet.core._ import scala.virtualization.lms.internal.{GenericFatCodegen} import scala.virtualization.lms.common._ import scala.reflect.SourceContext import ppl.delite.framework.DeliteApplication import ppl.delite.framework.Config //import ppl.dsl.optiml.{Vector,DenseVector,DenseVectorView,DenseMatrix,RangeVector,IndexVectorRange} import ppl.dsl.optiml.{OptiMLApplication, OptiMLApplicationRunner} import ppl.dsl.optiml.{OptiMLCodeGenScala,OptiMLExp} import optiml.library.OptiMLCompanion import OptiMLLancetRunner._ object OptiMLMacros extends OptiMLRunner.ClassMacros { val targets = List(classOf[OptiMLCompanion]) import OptiMLRunner._ //{Rep,reflect,mtr,infix_relax} def readMatrix(self: Rep[OptiMLCompanion], path: Rep[String]): Rep[DenseMatrix[Double]] = { Console.println("catch readMatrix") OptiMLRunner.readMatrix(ensureType(path)) } def readVector(self: Rep[OptiMLCompanion], path: Rep[String]): Rep[DenseVector[Double]] = { Console.println("catch readVector") OptiMLRunner.readVector(ensureType(path)) } def tic(self: Rep[OptiMLCompanion], dep1: Rep[Any], dep2: Rep[Any]): Rep[Unit] = { Console.println("catch tic") OptiMLRunner.profile_start(OptiMLRunner.strToRepStr("app"),scala.Seq(dep1,dep2)) } def toc(self: Rep[OptiMLCompanion], dep: Rep[Any]): Rep[Unit] = { Console.println("catch toc") OptiMLRunner.profile_stop(OptiMLRunner.strToRepStr("app"),scala.Seq(dep)) } def index_new(self: Rep[OptiMLCompanion], start: Rep[Int], end: Rep[Int]): Rep[IndexVectorRange] = { Console.println("catch index_new") OptiMLRunner.indexvector_range(start,end) } def indexvector_hashreduce(self: Rep[OptiMLCompanion], x: Rep[IndexVectorRange], f: Rep[Int => Int], map: Rep[Int => DenseVector[Double]], reduce: Rep[(DenseVector[Double],DenseVector[Double]) => DenseVector[Double]]): Rep[DenseVector[DenseVector[Double]]] = { try { Console.println("catch indexvector_hashreduce") implicit val mf = manifest[DenseVector[Double]] //FIXME: generic types implicit val a = OptiMLRunner.denseVectorArith[Double] val block1 = decompileFun(f)(intType,intType) val vType = TypeRep("generated.scala.DenseVectorDouble")(mf) val block2 = decompileFun(map,1)(intType,vType) // val tvType = TypeRep("(generated.scala.DenseVectorDouble,generated.scala.DenseVectorDouble)")(manifest[(V,V)]) val block3 = decompileFun2(reduce,2)(vType,vType,vType) OptiMLRunner.indexvector_hashreduce(OptiMLRunner.indexVecRangeToInterface(x),block1,block2,block3) } catch { case e => e.printStackTrace; throw e } } def indexvector_hashreduce2(self: Rep[OptiMLCompanion], x: Rep[IndexVectorRange], f: Rep[Int => Int], map: Rep[Int => Int], reduce: Rep[(Int,Int) => Int]): Rep[DenseVector[Int]] = { try { Console.println("catch indexvector_hashreduce2") // implicit val mf = manifest[Int] //FIXME: generic types // implicit val a = OptiMLRunner.intArith val block1 = decompileFun(f)(intType,intType) val block2 = decompileFun(map,1)(intType,intType) val block3 = decompileFun2(reduce,2)(intType,intType,intType) OptiMLRunner.indexvector_hashreduce(OptiMLRunner.indexVecRangeToInterface(x),block1,block2,block3) } catch { case e => e.printStackTrace; throw e } } def exp(self: Rep[OptiMLCompanion], x: Rep[Double]): Rep[Double] = { Console.println("catch exp") OptiMLRunner.math_exp(x) } def dist[T](self: Rep[OptiMLCompanion], x: Rep[DenseVectorView[T]], y: Rep[DenseVectorView[T]]): Rep[T] = { Console.println("catch dist") implicit val mf = manifest[Double].asInstanceOf[Manifest[T]] //FIXME: generic types implicit val a = OptiMLRunner.doubleArith.asInstanceOf[OptiMLRunner.Arith[T]] OptiMLRunner.optila_vector_dist_square(OptiMLRunner.denseViewToInterface(x),OptiMLRunner.denseViewToInterface(y)) } def dist2[T](self: Rep[OptiMLCompanion], x: Rep[DenseVector[T]], y: Rep[DenseVector[T]]): Rep[T] = { Console.println("catch dist2") implicit val mf = manifest[Double].asInstanceOf[Manifest[T]] //FIXME: generic types implicit val a = OptiMLRunner.doubleArith.asInstanceOf[OptiMLRunner.Arith[T]] OptiMLRunner.optila_vector_dist_square(OptiMLRunner.denseVecToInterface(x),OptiMLRunner.denseVecToInterface(y)) } def dist3[T](self: Rep[OptiMLCompanion], x: Rep[DenseMatrix[T]], y: Rep[DenseMatrix[T]]): Rep[T] = { Console.println("catch dist3") implicit val mf = manifest[Double].asInstanceOf[Manifest[T]] //FIXME: generic types implicit val a = OptiMLRunner.doubleArith.asInstanceOf[OptiMLRunner.Arith[T]] OptiMLRunner.optila_matrix_dist_square(OptiMLRunner.denseMatToInterface(x),OptiMLRunner.denseMatToInterface(y)) } def sum(self: Rep[OptiMLCompanion], start: Rep[Int], end: Rep[Int], size: Rep[Int], block: Rep[Int => DenseVector[Double]]): Rep[DenseVector[Double]] = { try{ Console.println("catch sum") implicit val mf = manifest[Double] implicit val cl = OptiMLRunner.vectorCloneable[Double,DenseVector[Double]] implicit val ar = OptiMLRunner.denseVectorArith[Double] val tpe = TypeRep("generated.scala.DenseVectorDouble")(manifest[DenseVector[Double]]) val block1 = decompileFun(block)(intType,tpe) OptiMLRunner.optiml_sum[DenseVector[Double]](start,end,block1) } catch { case e => e.printStackTrace; throw e } } // somehow Delite is generating the entire untilconverged as a singletask, when it should be a DeliteOpWhile that gets unrolled.. def untilconverged[T](self: Rep[OptiMLCompanion], x: Rep[DenseMatrix[T]], tol: Rep[Double], maxIter: Rep[Int], block: Rep[DenseMatrix[T] => DenseMatrix[T]]): Rep[DenseMatrix[T]] = { try { Console.println("catch untilconverged") implicit val mf = manifest[Double].asInstanceOf[Manifest[T]] //FIXME: generic types //implicit val matOps = OptiMLRunner.repToDenseMatOps(x).asInstanceOf[Rep[DenseMatrix[Double]] => OptiMLRunner.MatOpsCls[Double]] implicit val cl = OptiMLRunner.matrixCloneable[Double,DenseMatrix[Double]].asInstanceOf[OptiMLRunner.Cloneable[DenseMatrix[T]]] implicit val ar = OptiMLRunner.doubleArith.asInstanceOf[OptiMLRunner.Arith[T]] implicit val diff = (a: Rep[DenseMatrix[T]], b: Rep[DenseMatrix[T]]) => (OptiMLRunner.optila_matrix_dist_square(OptiMLRunner.denseMatToInterface(a),OptiMLRunner.denseMatToInterface(b))(mf,ar,implicitly[SourceContext])).asInstanceOf[Rep[Double]] // somehow the default string output is just ppl.dsl.optila.DenseMatrix (without type param) val tpe = TypeRep("generated.scala.DenseMatrixDouble")(manifest[DenseMatrix[T]]) val block1 = decompileFun(block)(tpe,tpe) OptiMLRunner.optiml_untilconverged[DenseMatrix[T]](x,(a: Rep[DenseMatrix[T]]) => tol,maxIter,OptiMLRunner.unit(true),block1,diff) } catch { case e => e.printStackTrace; throw e } } def untilconverged2[T](self: Rep[OptiMLCompanion], x: Rep[DenseVector[T]], tol: Rep[Double], maxIter: Rep[Int], block: Rep[DenseVector[T] => DenseVector[T]]): Rep[DenseVector[T]] = { try { Console.println("catch untilconverged2") implicit val mf = manifest[Double].asInstanceOf[Manifest[T]] //FIXME: generic types implicit val cl = OptiMLRunner.vectorCloneable[Double,DenseVector[Double]].asInstanceOf[OptiMLRunner.Cloneable[DenseVector[T]]] implicit val ar = OptiMLRunner.doubleArith.asInstanceOf[OptiMLRunner.Arith[T]] implicit val diff = (a: Rep[DenseVector[T]], b: Rep[DenseVector[T]]) => (OptiMLRunner.optila_vector_dist_square(OptiMLRunner.denseVecToInterface(a),OptiMLRunner.denseVecToInterface(b))(mf,ar,implicitly[SourceContext])).asInstanceOf[Rep[Double]] // somehow the default string output is just ppl.dsl.optila.DenseVector (without type param) val tpe = TypeRep("generated.scala.DenseVectorDouble")(manifest[DenseVector[T]]) val block1 = decompileFun(block)(tpe,tpe) OptiMLRunner.optiml_untilconverged[DenseVector[T]](x,(a: Rep[DenseVector[T]]) => tol,maxIter,OptiMLRunner.unit(true),block1,diff) } catch { case e => e.printStackTrace; throw e } } }
TiarkRompf/lancet
src/main/scala/optiml/macros/OptiMLMacros.scala
Scala
agpl-3.0
9,308
/* * Copyright 2011-2017 Chris de Vreeze * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.cdevreeze.yaidom.integrationtest import java.{io => jio} import java.{util => jutil} import eu.cdevreeze.yaidom.core.EName import eu.cdevreeze.yaidom.core.ENameProvider import eu.cdevreeze.yaidom.core.QNameProvider import eu.cdevreeze.yaidom.core.jvm.CaffeineENameProvider import eu.cdevreeze.yaidom.core.jvm.CaffeineQNameProvider import eu.cdevreeze.yaidom.parse.DocumentParserUsingDom import eu.cdevreeze.yaidom.parse.DocumentParserUsingDomLS import eu.cdevreeze.yaidom.parse.DocumentParserUsingSax import eu.cdevreeze.yaidom.parse.DocumentParserUsingStax import eu.cdevreeze.yaidom.queryapi.ClarkElemLike import eu.cdevreeze.yaidom.resolved import org.scalatest.BeforeAndAfterAll import org.scalatest.funsuite.AnyFunSuite /** * Large XML parsing test case. * * Acknowledgments: The large XML files come from http://javakata6425.appspot.com/#!goToPageIIIarticleIIIOptimally%20parse%20humongous%20XML%20files%20with%20vanilla%20Java. * * @author Chris de Vreeze */ class LargeXmlParsingTest extends AnyFunSuite with BeforeAndAfterAll { private val logger: jutil.logging.Logger = jutil.logging.Logger.getLogger("eu.cdevreeze.yaidom.integrationtest") @volatile private var xmlBytes: Array[Byte] = _ ENameProvider.globalENameProvider.become(CaffeineENameProvider.fromMaximumCacheSize(50)) QNameProvider.globalQNameProvider.become(CaffeineQNameProvider.fromMaximumCacheSize(50)) protected override def beforeAll(): Unit = { val zipFileUrl = classOf[LargeXmlParsingTest].getResource("veryBigFile.zip") val zipFile = new jutil.zip.ZipFile(new jio.File(zipFileUrl.toURI)) val zipEntries = zipFile.entries() require(zipEntries.hasMoreElements()) val zipEntry: jutil.zip.ZipEntry = zipEntries.nextElement() val is = new jio.BufferedInputStream(zipFile.getInputStream(zipEntry)) val bos = new jio.ByteArrayOutputStream var b: Int = -1 while ( { b = is.read(); b >= 0 }) { bos.write(b) } is.close() this.xmlBytes = bos.toByteArray } test("testParseLargeXmlUsingSax") { val parser = DocumentParserUsingSax.newInstance() val startMs = System.currentTimeMillis() val doc = parser.parse(new jio.ByteArrayInputStream(xmlBytes)) val endMs = System.currentTimeMillis() logger.info(s"[testParseLargeXmlUsingSax] Parsing (into a Document) took ${endMs - startMs} ms") doQueryTest(doc.documentElement) } test("testParseLargeXmlIntoResolvedElemUsingSax") { val parser = DocumentParserUsingSax.newInstance() val startMs = System.currentTimeMillis() val doc = parser.parse(new jio.ByteArrayInputStream(xmlBytes)) val endMs = System.currentTimeMillis() logger.info(s"[testParseLargeXmlIntoResolvedElemUsingSax] Parsing (into a Document) took ${endMs - startMs} ms") val resolvedRoot = resolved.Elem.from(doc.documentElement) doQueryTest(resolvedRoot) val emailElms = resolvedRoot findTopmostElems { e => e.localName == "email" } take (10) assertResult(10) { emailElms.size } assertResult(doc.documentElement.findAllElemsOrSelf.size) { resolvedRoot.findAllElemsOrSelf.size } } test("testParseLargeXmlUsingStax") { val parser = DocumentParserUsingStax.newInstance() val startMs = System.currentTimeMillis() val doc = parser.parse(new jio.ByteArrayInputStream(xmlBytes)) val endMs = System.currentTimeMillis() logger.info(s"[testParseLargeXmlUsingStax] Parsing (into a Document) took ${endMs - startMs} ms") doQueryTest(doc.documentElement) } test("testParseLargeXmlUsingDom") { val parser = DocumentParserUsingDom.newInstance() val startMs = System.currentTimeMillis() val doc = parser.parse(new jio.ByteArrayInputStream(xmlBytes)) val endMs = System.currentTimeMillis() logger.info(s"[testParseLargeXmlUsingDom] Parsing (into a Document) took ${endMs - startMs} ms") doQueryTest(doc.documentElement) } test("testParseLargeXmlUsingDomLS") { val parser = DocumentParserUsingDomLS.newInstance() val startMs = System.currentTimeMillis() val doc = parser.parse(new jio.ByteArrayInputStream(xmlBytes)) val endMs = System.currentTimeMillis() logger.info(s"[testParseLargeXmlUsingDomLS] Parsing (into a Document) took ${endMs - startMs} ms") doQueryTest(doc.documentElement) } private def doQueryTest[E <: ClarkElemLike.Aux[E]](elm: E): Unit = { val startMs = System.currentTimeMillis() assert(elm.findAllElemsOrSelf.size >= 100000, "Expected at least 100000 elements in the XML") assertResult(Set(EName("contacts"), EName("contact"), EName("firstName"), EName("lastName"), EName("email"), EName("phone"))) { val result = elm.findAllElemsOrSelf map { e => e.resolvedName } result.toSet } val s = "b" * (2000 + 46) val elms1 = elm \\\\ { e => e.resolvedName == EName("phone") && e.trimmedText == s } assert(elms1.size >= 1, s"Expected at least one phone element with text value '${s}'") val endMs = System.currentTimeMillis() logger.info(s"The test (invoking findAllElemsOrSelf twice, and filterElemsOrSelf once) took ${endMs - startMs} ms") } }
dvreeze/yaidom
jvm/src/test/scala/eu/cdevreeze/yaidom/integrationtest/LargeXmlParsingTest.scala
Scala
apache-2.0
5,767
package spoiwo.examples.features import spoiwo.model._ import spoiwo.model.enums.HyperLinkType import spoiwo.natures.streaming.xlsx.Model2XlsxConversions.XlsxSheet object HyperlinkExample { val hyperlinksSheet: Sheet = Sheet(name = "Hyperlinks") .withRows( Row().withCellValues("PERSON", "EMAIL"), Row().withCellValues( HyperLink(text = "Bill Gates", address = "https://pl.wikipedia.org/wiki/Bill_Gates"), HyperLink( text = "bill.gates@microsoft.com", address = "mailto:bill.gates@microsoft.com", linkType = HyperLinkType.Email ) ) ) def main(args: Array[String]): Unit = { hyperlinksSheet.saveAsXlsx(args(0)) } }
norbert-radyk/spoiwo
examples/src/main/scala/spoiwo/examples/features/HyperlinkExample.scala
Scala
mit
709
package at.hazm.webserver import java.io.File import java.net.{InetSocketAddress, URI, URL, URLClassLoader} import java.nio.charset.StandardCharsets import java.util.regex.Pattern import com.twitter.util.StorageUnit import com.typesafe.config._ import org.slf4j.LoggerFactory import play.api.libs.json.{JsObject, JsString, Json} import scala.collection.JavaConverters._ import scala.util.{Failure, Success, Try} class Config(val source:URL, val config:com.typesafe.config.Config) { private[this] val logger = LoggerFactory.getLogger(getClass) private[this] def getByFQN[T](key:String, default: => T)(implicit converter:String => Either[String, T]):T = { Try(config.getString(key)) match { case Success(value) => converter(value) match { case Right(result) => logger.debug(s"$key = $result") result case Left(message) => logger.warn(s"$key = $value; $message (apply $default)") default } case Failure(ex) => logger.debug(s"$key = '$default' (default); $ex") default } } private[this] def getArrayByFQN[T](key:String, default: => T)(implicit converter:String => Either[String, T]):Seq[T] = { Try(config.getStringList(key)) match { case Success(list) => val converted = list.asScala.map(value => converter(value)) val results = list.asScala.map { value => converter(value) match { case Right(result) => logger.debug(s"$key = $result") result case Left(message) => logger.warn(s"$key = $value; $message (apply $default)") default } } val log:String => Unit = if(converted.exists(_.isLeft)) logger.warn else logger.debug log(s"$key = ${ results.zip(converted).zip(list.asScala).map { case ((result, Right(_)), _) => s"$result" case ((result, Left(message)), value) => s"$value; $message (apply $result)" }.mkString("[", ", ", "]") }") results case Failure(ex) => logger.debug(s"$key = [] (default); $ex") Seq.empty } } private[this] def getMap(key:String):Map[String, String] = Try(config.getConfig(key)).toOption.map { cs => cs.entrySet().asScala.map(x => x.getKey).toSeq.map { name => (ConfigUtil.splitPath(name).get(0), cs.getString(name)) }.toMap }.getOrElse(Map.empty) private[this] def resolve(path:String):File = { (if(source.getProtocol == "file") { new File(source.toURI).getParentFile } else new File(".")).getAbsoluteFile.toPath.resolve(path).toFile } private[this] class ExceptableConverter[T](converter:String => T) extends Function[String, Either[String, T]] { override def apply(value:String):Either[String, T] = Try(converter(value)) match { case Success(result) => Right(result) case Failure(ex) => Left(ex.toString) } } private[this] implicit val _StringConverter:String => Right[Nothing, String] = { value:String => Right(value) } private[this] implicit object _BooleanConverter extends ExceptableConverter[Boolean](_.toBoolean) private[this] implicit object _IntConverter extends ExceptableConverter[Int](_.toInt) private[this] implicit object _LongConverter extends ExceptableConverter[Long](_.toLong) private[this] implicit val _StorageUnitConverter:String => Either[String, StorageUnit] = Config.storageSize object server { private[this] def get[T](key:String, default:T)(implicit converter:String => Either[String, T]):T = getByFQN[T](s"server.$key", default)(converter) val DefaultPort = 8089 val requestTimeout:Long = get("request-timeout", 30) * 1000L val compressionLevel:Int = get("compression-level", 0) val maxRequestSize:StorageUnit = get("max-request-size", StorageUnit.fromKilobytes(500)) val bindAddress:InetSocketAddress = get("bind-address", new InetSocketAddress(DefaultPort)) { value:String => val HostPort = "(.*):(\\\\d+)".r try { value match { case HostPort("*", port) => Right(new InetSocketAddress(port.toInt)) case HostPort(host, port) => Right(new InetSocketAddress(host, port.toInt)) case host => Right(new InetSocketAddress(host, DefaultPort)) } } catch { case _:NumberFormatException => Left(s"invalid port number") } } val docroot:File = get("docroot", resolve(".")) { path => Right(resolve(path)) } val sendBufferSize:Int = get("send-buffer-size", 4 * 1024) } object template { private[this] def get[T](key:String, default:T)(implicit converter:String => Either[String, T]):T = getByFQN[T](s"template.$key", default)(converter) val updateCheckInterval:Long = get("update-check-interval", 2) * 1000L } /** * 外部コマンド/シェルスクリプト実行環境の設定。 */ object shell { /** Map[ドット付き拡張子, インタープリタ] */ val interpreters:Map[String, String] = { val result = Try(config.getObject("shell.interpreters")).toOption.map { obj => obj.entrySet().asScala.collect { case e if e.getValue.valueType() == ConfigValueType.STRING => (e.getKey, e.getValue.unwrapped().toString) }.toMap }.getOrElse(Map.empty) logger.debug(s"shell.interpreters = ${Json.stringify(JsObject(result.mapValues(s => JsString(s))))}") result } } /** * 外部実行スクリプトの設定。 */ object cgi { private[this] def get[T](key:String, default:T)(implicit converter:String => Either[String, T]):T = getByFQN[T](s"cgi.$key", default)(converter) val enabled:Boolean = get("enabled", false) val timeout:Long = get("timeout", 10 * 1000L) val prefix:String = get("prefix", "/api/") } object script { private[this] def fqn(key:String):String = s"script.$key" private[this] def get[T](key:String, default:T)(implicit converter:String => Either[String, T]):T = getByFQN[T](fqn(key), default)(converter) private[this] def getArray[T](key:String, default:String)(implicit converter:String => Either[String, T]):Seq[T] = { get(key, default).split(",").filter(_.nonEmpty).map(converter).collect { case Right(value) => value } } val timeout:Long = get("timeout", 10 * 1000L) val extensions:Seq[String] = getArray("extensions", ".xjs") val javaExtensions:Seq[String] = getArray("extensions-java", ".java") def libs(root:File):ClassLoader = { val urls = getArrayByFQN(fqn("libs"), "").filter(_.nonEmpty).map { d => val dir = new File(d) if(dir.isAbsolute) dir else new File(root, d) }.flatMap { base => def findJars(dir:File):Seq[URL] = { dir.listFiles().filter(f => f.isFile && f.getName.endsWith(".jar")).map { file => logger.debug(s"add script library: ${base.getName}/${base.toURI.relativize(file.toURI)}") file.toURI.toURL } ++ dir.listFiles().filter(_.isDirectory).flatMap(findJars) } if(base.isDirectory) findJars(base) else { logger.warn(s"script library directory is not exist: ${base.getAbsolutePath}") Seq.empty } }.toArray val defaultLoader = Thread.currentThread().getContextClassLoader if(urls.isEmpty) defaultLoader else new URLClassLoader(urls, defaultLoader) } } /** * リダイレクト URI のパターンとそのリダイレクト先。 */ val redirect:Seq[(Pattern, String)] = getMap("redirect").toSeq.map { case (pattern, url) => (Pattern.compile(pattern), url) } /** * エラーの発生したパスと対応するテンプレート (XSL ファイル)。 */ val error:Seq[(Pattern, String)] = getMap("error").toSeq.map { case (pattern, path) => (Pattern.compile(pattern), path) } } object Config { object Builder extends at.hazm.util.Cache.Builder[Config] { override def compile(uri:URI, binary:Option[Array[Byte]]):Config = { val base = binary match { case Some(b) => ConfigFactory.parseString(new String(b, StandardCharsets.UTF_8)) case None => ConfigFactory.load() } new Config(uri.toURL, base) } } def storageSize(value:String):Either[String, StorageUnit] = { val unitSize = Seq("", "k", "M", "G", "T", "P").zipWithIndex.map { case (u, exp) => (u.toLowerCase, 1024 * exp) }.toMap val pattern = "(\\\\d+)([a-z]?)".r value.trim().toLowerCase match { case pattern(size, unit) if Try { size.toLong }.isSuccess && unitSize.contains(unit) => Right(StorageUnit.fromBytes(size.toLong * unitSize(unit))) case _ => Left(s"invalid size or unit: one of ${unitSize.keys.mkString(",")} can use for unit") } } }
torao/hazmat-webserver
src/main/scala/at/hazm/webserver/Config.scala
Scala
apache-2.0
9,073
/* Copyright 2009-2021 EPFL, Lausanne */ package stainless package extraction package oo class AdtSpecialization(override val s: Trees, override val t: Trees) (using override val context: inox.Context) extends CachingPhase with SimpleFunctions with SimpleSorts with SimpleTypeDefs with utils.SyntheticSorts { self => private[this] def root(id: Identifier)(using symbols: s.Symbols): Identifier = { symbols.getClass(id).parents.map(ct => root(ct.id)).headOption.getOrElse(id) } private[this] def isCandidate(id: Identifier)(using symbols: s.Symbols): Boolean = { import s._ val cd = symbols.getClass(id) cd.parents match { case Nil => def rec(cd: s.ClassDef): Boolean = { val cs = cd.children (cd.parents.size <= 1) && (cd.typeMembers.isEmpty) && (cs forall rec) && (cd.parents forall (_.tps == cd.typeArgs)) && ((cd.flags contains IsAbstract) || cs.isEmpty) && (!(cd.flags contains IsAbstract) || cd.fields.isEmpty) && (cd.typeArgs forall (tp => tp.isInvariant && !tp.flags.exists { case Bounds(_, _) => true case _ => false })) } rec(cd) case _ => isCandidate(root(cd.id)) } } private[this] val constructorCache = new utils.ConcurrentCached[Identifier, Identifier](_.freshen) private[this] def constructorID(id: Identifier)(using symbols: s.Symbols): Identifier = symbols.lookupClass(id).map { cd => if (cd.parents.isEmpty && !(cd.flags contains s.IsAbstract)) constructorCache(cd.id) else cd.id }.get private[this] def constructors(id: Identifier)(using symbols: s.Symbols): Seq[Identifier] = { val cd = symbols.getClass(id) val classes = cd +: cd.descendants classes.filterNot(_.flags contains s.IsAbstract).map(_.id) } private[this] def isCaseObject(id: Identifier)(using symbols: s.Symbols): Boolean = isCandidate(id) && (symbols.getClass(id).flags contains s.IsCaseObject) private[this] val caseObject = new utils.ConcurrentCached[Identifier, Identifier](_.freshen) private[this] val unapplyID = new utils.ConcurrentCached[Identifier, Identifier](_.freshen) override protected final def getContext(symbols: s.Symbols) = new TransformerContext(self.s, self.t)(using symbols) protected final class TransformerContext(override val s: self.s.type, override val t: self.t.type) (using val symbols: s.Symbols) extends oo.ConcreteTreeTransformer(s, t) { protected given givenContext: TransformerContext = this override def transform(flag: s.Flag): t.Flag = flag match { case s.ClassParamInit(cid) if isCandidate(cid) => t.ClassParamInit(constructorID(cid)) case _ => super.transform(flag) } override def transform(e: s.Expr): t.Expr = e match { case s.ClassSelector(expr, selector) => expr.getType match { case s.ClassType(id, tps) if isCandidate(id) => t.ADTSelector(transform(expr), selector).copiedFrom(e) case _ => super.transform(e) } case s.ClassConstructor(s.ClassType(id, Seq()), Seq()) if isCaseObject(id) => t.FunctionInvocation(caseObject(id), Seq(), Seq()).copiedFrom(e) case s.ClassConstructor(s.ClassType(id, tps), args) if isCandidate(id) => t.ADT(constructorID(id), tps map transform, args map transform).copiedFrom(e) case _ => super.transform(e) } override def transform(pat: s.Pattern): t.Pattern = pat match { case s.ClassPattern(binder, s.ClassType(id, tps), subs) if isCandidate(id) => t.ADTPattern(binder map transform, constructorID(id), tps map transform, subs map transform).copiedFrom(pat) case iop @ s.InstanceOfPattern(ob, tpe @ s.ClassType(id, tps)) if isCandidate(id) => if (constructors(id) == Seq(id)) { val subs = tpe.tcd.fields.map(_ => t.WildcardPattern(None).copiedFrom(pat)) t.ADTPattern(ob map transform, constructorID(id), tps map transform, subs).copiedFrom(iop) } else if (root(id) == id) { t.WildcardPattern(ob map transform).copiedFrom(iop) } else { t.UnapplyPattern(None, Seq(), unapplyID(id), tps map transform, Seq(t.WildcardPattern(ob map transform).copiedFrom(iop)) ).copiedFrom(iop) } case _ => super.transform(pat) } override def transform(tpe: s.Type): t.Type = tpe match { case s.ClassType(id, tps) if isCandidate(id) => if (id == root(id)) { t.ADTType(id, tps map transform).copiedFrom(tpe) } else { val vd = t.ValDef(FreshIdentifier("v"), t.ADTType(root(id), tps map transform).copiedFrom(tpe)).copiedFrom(tpe) t.RefinementType(vd, t.orJoin(constructors(id).map { cid => t.IsConstructor(vd.toVariable, constructorID(cid)).copiedFrom(tpe) }).copiedFrom(tpe)).copiedFrom(tpe) } case _ => super.transform(tpe) } } private[this] def descendantKey(id: Identifier)(using symbols: s.Symbols): CacheKey = SetKey( (symbols.dependencies(id) + id) .flatMap(id => Set(id) ++ symbols.lookupClass(id).toSeq.flatMap { cd => val rootCd = symbols.getClass(root(cd.id)) val classes = Set(rootCd.id) ++ rootCd.descendants.map(_.id) classes ++ classes.flatMap(id => symbols.getClass(id).typeMembers) }) ) // The function cache must consider the descendants of all classes on which the // function depends as they will determine which classes will be transformed into // sorts and which ones will not. override protected final val funCache = new ExtractionCache[s.FunDef, FunctionResult]( (fd, context) => FunctionKey(fd) + descendantKey(fd.id)(using context.symbols) ) // If there are any input sorts in this phase, their transformation is simple override protected final val sortCache = new SimpleCache[s.ADTSort, SortResult] // The class cache must also consider all descendants of dependent classes as they // will again determine what will become a sort and what won't. // We must further depend on the synthetic OptionSort for the generated unapply function. override protected final val classCache = new ExtractionCache[s.ClassDef, ClassResult]({ // Note that we could use a more precise key here that determines whether the // option sort will be used by the class result, but this shouldn't be necessary (cd, context) => val symbols = context.symbols ClassKey(cd) + descendantKey(cd.id)(using symbols) + OptionSort.key(using symbols) }) override protected final val typeDefCache = new ExtractionCache[s.TypeDef, TypeDefResult]( (td, context) => TypeDefKey(td) + descendantKey(td.id)(using context.symbols) ) override protected final def extractFunction(context: TransformerContext, fd: s.FunDef): t.FunDef = context.transform(fd) override protected final def extractTypeDef(context: TransformerContext, td: s.TypeDef): t.TypeDef = context.transform(td) override protected final def extractSort(context: TransformerContext, sort: s.ADTSort): t.ADTSort = context.transform(sort) override protected final type ClassResult = Either[t.ClassDef, (Option[t.ADTSort], Seq[t.FunDef])] override protected final def registerClasses(symbols: t.Symbols, classes: Seq[ClassResult]): t.Symbols = { classes.foldLeft(symbols) { case (symbols, Left(cd)) => symbols.withClasses(Seq(cd)) case (symbols, Right((sort, fds))) => symbols.withSorts(sort.toSeq).withFunctions(fds) } } private def keepSortFlag(flag: s.Flag): Boolean = flag match { case s.IsAbstract | s.IsSealed | s.IsCaseObject => false case _ => true } override protected final def extractClass(context: TransformerContext, cd: s.ClassDef): ClassResult = { import context.{t => _, s => _, given, _} if (isCandidate(cd.id)) { if (cd.parents.isEmpty) { val sortTparams = cd.tparams map (tpd => context.transform(tpd)) val newSort = new t.ADTSort( cd.id, sortTparams, constructors(cd.id).map { cid => val consCd = context.symbols.getClass(cid) val tpMap = (consCd.tparams.map(tpd => context.transform(tpd).tp) zip sortTparams.map(_.tp)).toMap new t.ADTConstructor( constructorID(cid), cd.id, consCd.fields map { vd => val tvd = context.transform(vd) tvd.copy(tpe = t.typeOps.instantiateType(tvd.tpe, tpMap)) } ).copiedFrom(consCd) }, cd.flags filter keepSortFlag map (f => context.transform(f)) ).copiedFrom(cd) val functions = (cd +: cd.descendants).flatMap { cd => import t.dsl._ val objectFunction = if (isCaseObject(cd.id)) { val vd = t.ValDef.fresh("v", t.ADTType(root(cd.id), cd.typeArgs map (transform(_))).setPos(cd)).setPos(cd) val returnType = t.RefinementType(vd, t.IsConstructor(vd.toVariable, constructorID(cd.id)).setPos(cd)).setPos(cd) Some(mkFunDef(caseObject(cd.id), t.Inline, t.Derived(Some(cd.id)))()(_ => ( Seq(), returnType, (_ => t.ADT(constructorID(cd.id), Seq(), Seq()).setPos(cd)) )).setPos(cd)) } else { None } import OptionSort._ val cons = constructors(cd.id) val unapplyFunction = if (root(cd.id) != cd.id && cons != Seq(cd.id)) { Some(mkFunDef(unapplyID(cd.id), t.DropVCs, t.Synthetic, t.IsUnapply(isEmpty, get)) (cd.typeArgs.map(_.id.name) : _*) { tparams => val base = T(root(cd.id))(tparams : _*) def condition(e: t.Expr): t.Expr = t.orJoin(cons.map(t.IsConstructor(e, _))) val vd = t.ValDef.fresh("v", base) val returnType = t.RefinementType(vd, condition(vd.toVariable)) (Seq("x" :: base), T(option)(returnType), { case Seq(x) => if_ (condition(x)) { C(some)(returnType)(x) } else_ { C(none)(returnType)() } }) }) } else { None } objectFunction.toSeq ++ unapplyFunction } Right((Some(newSort), functions)) } else { Right((None, Seq())) } } else { Left(context.transform(cd)) } } override protected final def extractSymbols(context: TransformerContext, symbols: s.Symbols): t.Symbols = { val newSymbols = super.extractSymbols(context, symbols) .withFunctions(OptionSort.functions(using symbols)) .withSorts(OptionSort.sorts(using symbols)) val dependencies: Set[Identifier] = (symbols.functions.keySet ++ symbols.sorts.keySet ++ symbols.classes.keySet) .flatMap(id => newSymbols.dependencies(id) + id) val independentSymbols = t.NoSymbols .withFunctions(newSymbols.functions.values.toSeq.filter { fd => dependencies(fd.id) || // keep the introduced case object construction functions fd.flags.exists { case t.Derived(Some(id)) => dependencies(id) case _ => false } }) .withSorts(newSymbols.sorts.values.toSeq.filter(sort => dependencies(sort.id))) .withClasses(newSymbols.classes.values.toSeq.filter(cd => dependencies(cd.id))) .withTypeDefs(newSymbols.typeDefs.values.toSeq.filter(td => dependencies(td.id))) independentSymbols } } object AdtSpecialization { def apply(ts: Trees, tt: Trees)(using inox.Context): ExtractionPipeline { val s: ts.type val t: tt.type } = { class Impl(override val s: ts.type, override val t: tt.type) extends AdtSpecialization(s, t) new Impl(ts, tt) } }
epfl-lara/stainless
core/src/main/scala/stainless/extraction/oo/AdtSpecialization.scala
Scala
apache-2.0
11,882
/* * Copyright 2013-2015 Websudos, Limited. * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * - Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * - Explicit consent must be obtained from the copyright owner, Outworkers Limited before any redistribution is made. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.websudos.phantom.reactivestreams.suites import com.websudos.phantom.batch.BatchType import com.websudos.phantom.reactivestreams._ import org.reactivestreams.tck.SubscriberWhiteboxVerification.{SubscriberPuppet, WhiteboxSubscriberProbe} import org.reactivestreams.tck.{SubscriberWhiteboxVerification, TestEnvironment} import org.reactivestreams.{Subscriber, Subscription} class BatchSubscriberWhiteboxTest extends SubscriberWhiteboxVerification[Opera](new TestEnvironment()) with StreamDatabase.connector.Connector with TestImplicits { override def createSubscriber(probe: WhiteboxSubscriberProbe[Opera]): Subscriber[Opera] = { new BatchSubscriber[ConcreteOperaTable, Opera]( StreamDatabase.operaTable, OperaRequestBuilder, 5, 2, batchType = BatchType.Unlogged, flushInterval = None, completionFn = () => (), errorFn = _ => () ) { override def onSubscribe(s: Subscription): Unit = { super.onSubscribe(s) probe.registerOnSubscribe(new SubscriberPuppet { override def triggerRequest(elements: Long): Unit = { s.request(elements) } override def signalCancel(): Unit = { s.cancel() } }) } override def onComplete(): Unit = { super.onComplete() probe.registerOnComplete() } override def onError(t: Throwable): Unit = { probe.registerOnError(t) } override def onNext(t: Opera): Unit = { super.onNext(t) probe.registerOnNext(t) } } } override def createElement(element: Int): Opera = OperaData.operas(element) }
levinson/phantom
phantom-reactivestreams/src/test/scala/com/websudos/phantom/reactivestreams/suites/BatchSubscriberWhiteboxTest.scala
Scala
bsd-2-clause
3,166
/* * Copyright 2017 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.accounts.frsse2008.micro import uk.gov.hmrc.ct.accounts.retriever.AccountsBoxRetriever import uk.gov.hmrc.ct.box._ case class AC421(value: Option[Int]) extends CtBoxIdentifier(name = "Previous Depreciation and other amounts written off assets") with CtOptionalInteger with Input with SelfValidatableBox[AccountsBoxRetriever, Option[Int]] { override def validate(boxRetriever: AccountsBoxRetriever): Set[CtValidation] = { validateMoney(value) } }
pncampbell/ct-calculations
src/main/scala/uk/gov/hmrc/ct/accounts/frsse2008/micro/AC421.scala
Scala
apache-2.0
1,158
package contege.seqgen import scala.collection.JavaConversions._ import scala.collection.mutable.Set import scala.collection.mutable.Map import java.util.ArrayList import contege.ClassReader import contege.Random import contege.Atom import contege.ConstructorAtom import contege.MethodAtom import contege.Stats import contege.Config import contege.GlobalState /** * Adds calls that provide parameters required to call a * CUT method. This task is used to add parameter-providing calls * to the prefix before calling the CUT method in the suffix. */ class PrepareCUTCallTask(prefix: Prefix, cutMethod: MethodAtom, global: GlobalState) extends Task[Prefix](global) { // filled by computeSequenceCandidate; // if we don't find a passing sequence, set to None var args: Option[ArrayList[Variable]] = None private var candidateWithoutCall: Option[Prefix] = None override def run = { val ret = super.run if (ret.isDefined) { assert(candidateWithoutCall.isDefined) // found parameters that make the CUT call possible -- return the sequence w/o the call (will be done in suffix) candidateWithoutCall } else { // did not find a sequence args = None None } } override def computeSequenceCandidate: Option[Prefix] = { var candidate = prefix.copy val args = new ArrayList[Variable]() cutMethod.paramTypes.foreach(typ => { val paramTask = new GetParamTask[Prefix](candidate, typ, true, global) paramTask.run match { case Some(extendedSequence) => { candidate = extendedSequence assert(paramTask.param.isDefined) args.add(paramTask.param.get) } case None => { global.stats.callCutFailedReasons.add("couldn't find param of type "+typ) return None } } }) this.args = Some(args) candidateWithoutCall = Some(candidate) val retVal = cutMethod.returnType match { case Some(t) => Some(new ObjectVariable) case None => None } val extendedCandidate = candidate.copy val receiver = prefix.getCutVariable extendedCandidate.appendCall(cutMethod, Some(receiver), args, retVal, None) Some(extendedCandidate) } }
michaelpradel/ConTeGe
src/contege/seqgen/PrepareCUTCallTask.scala
Scala
gpl-2.0
2,170
import org.scalacheck._ import org.scalacheck.Prop._ import java.net.{URLClassLoader, URLDecoder} import scala.collection.mutable import scala.xml.NodeSeq object XMLUtil { import scala.xml._ def stripGroup(seq: Node): Node = { seq match { case group: Group => { <div class="group">{ group.nodes.map(stripGroup _) }</div> } case e: Elem => { val child = e.child.map(stripGroup _) Elem(e.prefix, e.label, e.attributes, e.scope, child : _*) } case _ => seq } } } object Test extends Properties("HtmlFactory") { final val RESOURCES = "test/scaladoc/resources/" import scala.tools.nsc.ScalaDocReporter import scala.tools.nsc.doc.{DocFactory, Settings} import scala.tools.nsc.doc.html.HtmlFactory def getClasspath = { // these things can be tricky // this test previously relied on the assumption that the current thread's classloader is an url classloader and contains all the classpaths // does partest actually guarantee this? to quote Leonard Nimoy: The answer, of course, is no. // this test _will_ fail again some time in the future. // Footnote: java.lang.ClassCastException: org.apache.tools.ant.loader.AntClassLoader5 cannot be cast to java.net.URLClassLoader val loader = Thread.currentThread.getContextClassLoader.asInstanceOf[URLClassLoader] val paths = loader.getURLs.map(u => URLDecoder.decode(u.getPath)) paths mkString java.io.File.pathSeparator } def createFactory = { val settings = new Settings({Console.err.println(_)}) settings.scaladocQuietRun = true settings.nowarn.value = true settings.classpath.value = getClasspath settings.docAuthor.value = true val reporter = new scala.tools.nsc.reporters.ConsoleReporter(settings) new DocFactory(reporter, settings) } def createTemplates(basename: String): collection.Map[String, NodeSeq] = { val result = mutable.Map[String, NodeSeq]() createFactory.makeUniverse(Left(List(RESOURCES+basename))) match { case Some(universe) => { new HtmlFactory(universe, new ScalaDocReporter(universe.settings)).writeTemplates((page) => { result += (page.absoluteLinkTo(page.path) -> page.body) }) } case _ => } result } def createTemplate(scala: String) = { val html = scala.stripSuffix(".scala") + ".html" createTemplates(scala)(html) } /** * This tests the text without the markup - ex: * * <h4 class="signature"> * <span class="modifier_kind"> * <span class="modifier">implicit</span> * <span class="kind">def</span> * </span> * <span class="symbol"> * <span class="name">test</span><span class="params">()</span><span class="result">: <span name="scala.Int" class="extype">Int</span></span> * </span> * </h4> * * becomes: * * implicit def test(): Int * * and is required to contain the text in the given checks * * NOTE: Comparison is done ignoring all whitespace */ def checkText(scalaFile: String, debug: Boolean = true)(checks: (Option[String], String, Boolean)*): Boolean = { val htmlFile = scalaFile.stripSuffix(".scala") + ".html" val htmlAllFiles = createTemplates(scalaFile) var result = true for ((fileHint, check, expected) <- checks) { // resolve the file to be checked val fileName = fileHint match { case Some(file) => if (file endsWith ".html") file else file + ".html" case None => htmlFile } val fileTextPretty = htmlAllFiles(fileName).text.replace('→',' ').replaceAll("\\\\s+"," ") val fileText = fileTextPretty.replaceAll(" ", "") val checkTextPretty = check.replace('→',' ').replaceAll("\\\\s+"," ") val checkText = checkTextPretty.replaceAll(" ", "") val checkValue = fileText.contains(checkText) == expected if (debug && (!checkValue)) { Console.err.println("") Console.err.println("HTML Check failed for resource file " + scalaFile + ":") Console.err.println("Could not match: \\n" + checkTextPretty) Console.err.println("In the extracted HTML text: \\n" + fileTextPretty) Console.err.println("NOTE: The whitespaces are eliminated before matching!") Console.err.println("") } result &&= checkValue } result } def shortComments(root: scala.xml.Node) = XMLUtil.stripGroup(root).descendant.flatMap { case e: scala.xml.Elem => { if (e.attribute("class").toString.contains("shortcomment")) { Some(e) } else { None } } case _ => None } property("Trac #3790") = { createTemplate("Trac3790.scala") match { case node: scala.xml.Node => { val comments = shortComments(node) comments.exists { _.toString.contains(">A lazy String\\n</p>") } && comments.exists { _.toString.contains(">A non-lazy String\\n</p>") } } case _ => false } } property("Trac #4306") = { val files = createTemplates("Trac4306.scala") files("com/example/trac4306/foo/package$$Bar.html") != None } property("Trac #4366") = { createTemplate("Trac4366.scala") match { case node: scala.xml.Node => { shortComments(node).exists { n => { val str = n.toString str.contains("<code>foo</code>") && str.contains("</strong>") } } } case _ => false } } property("Trac #4358") = { createTemplate("Trac4358.scala") match { case node: scala.xml.Node => ! shortComments(node).exists { _.toString.contains("<em>i.</em>") } case _ => false } } property("Trac #4180") = { createTemplate("Trac4180.scala") != None } property("Trac #4372") = { createTemplate("Trac4372.scala") match { case node: scala.xml.Node => { val html = node.toString html.contains("<span title=\\"gt4s: $plus$colon\\" class=\\"name\\">+:</span>") && html.contains("<span title=\\"gt4s: $minus$colon\\" class=\\"name\\">-:</span>") && html.contains("""<span class="params">(<span name="n">n: <span class="extype" name="scala.Int">Int</span></span>)</span><span class="result">: <span class="extype" name="scala.Int">Int</span></span>""") } case _ => false } } property("Trac #4374 - public") = { val files = createTemplates("Trac4374.scala") files("WithPublic.html") match { case node: scala.xml.Node => { val s = node.toString s.contains("""href="WithPublic$.html"""") && files.get("WithPublic$.html") != None } case _ => false } } property("Trac #4374 - private") = { val files = createTemplates("Trac4374.scala") files("WithPrivate.html") match { case node: scala.xml.Node => { val s = node.toString ! s.contains("""href="WithPrivate$.html"""") && files.get("WithPrivate$.html") == None } case _ => false } } property("Trac #4325 - files") = { val files = createTemplates("Trac4325.scala") files.get("WithSynthetic.html") != None && files.get("WithSynthetic$.html") == None && files.get("WithObject.html") != None && files.get("WithObject$.html") != None } property("Trac #4325 - Don't link to syntetic companion") = { val files = createTemplates("Trac4325.scala") files("WithSynthetic.html") match { case node: scala.xml.Node => { val s = node.toString ! s.contains("""href="WithSynthetic$.html"""") } case _ => false } } property("Trac #4325 - Link to companion") = { val files = createTemplates("Trac4325.scala") files("WithObject.html") match { case node: scala.xml.Node => { val s = node.toString s.contains("""href="WithObject$.html"""") } case _ => false } } property("Trac #4420 - no whitespace at end of line") = { val files = createTemplates("Trac4420.scala") files("TestA.html") match { case node: scala.xml.Node => { val s = node.toString s.contains("""See YYY for more details""") } case _ => false } } // // property("Trac #484 - refinements and existentials") = { // val files = createTemplates("Trac484.scala") // val lines = """ // |type Bar = AnyRef { type Dingus <: T forSome { type T <: String } } // |type Foo = AnyRef { ... /* 3 definitions in type refinement */ } // |def g(x: T forSome { type T <: String }): String // |def h(x: Float): AnyRef { def quux(x: Int,y: Int): Int } // |def hh(x: Float): AnyRef { def quux(x: Int,y: Int): Int } // |def j(x: Int): Bar // |def k(): AnyRef { type Dingus <: T forSome { type T <: String } } // """.stripMargin.trim.lines map (_.trim) // // files("RefinementAndExistentials.html") match { // case node: scala.xml.Node => { // val s = node.text.replaceAll("\\\\s+", " ") // lines forall (s contains _) // } // case _ => false // } // } property("Trac #4289") = { val files = createTemplates("Trac4289.scala") files("Subclass.html") match { case node: scala.xml.Node => { node.toString.contains { """<dt>returns</dt><dd class="cmt"><p>123</p></dd>""" } } case _ => false } } property("Trac #4409") = { createTemplate("Trac4409.scala") match { case node: scala.xml.Node => { ! node.toString.contains("""<div class="block"><ol>since""") } case _ => false } } property("Trac #4452") = { createTemplate("Trac4452.scala") match { case node: scala.xml.Node => ! node.toString.contains(">*") case _ => false } } property("SI-4421") = { createTemplate("SI_4421.scala") match { case node: scala.xml.Node => { val html = node.toString html.contains(">Example:") && html.contains(">Note<") } case _ => false } } property("SI-4589") = { createTemplate("SI_4589.scala") match { case node: scala.xml.Node => { val html = node.toString html.contains(">x0123456789: <") && html.contains(">x012345678901234567890123456789: <") } case _ => false } } property("SI-4714: Should decode symbolic type alias name.") = { createTemplate("SI_4715.scala") match { case node: scala.xml.Node => { val html = node.toString html.contains(">:+:<") } case _ => false } } property("SI-4287: Default arguments of synthesized constructor") = { val files = createTemplates("SI_4287.scala") files("ClassWithSugar.html") match { case node: scala.xml.Node => { node.toString.contains(">123<") } case _ => false } } property("SI-4507: Default arguments of synthesized constructor") = { createTemplate("SI_4507.scala") match { case node: scala.xml.Node => ! node.toString.contains("<li>returns silently when evaluating true and true</li>") case _ => false } } property("SI-4898: Use cases and links should not crash scaladoc") = { createTemplate("SI_4898.scala") true } property("SI-5054: Use cases should override their original members") = checkText("SI_5054_q1.scala")( (None,"""def test(): Int""", true) //Disabled because the full signature is now displayed //(None, """def test(implicit lost: Int): Int""", false) ) property("SI-5054: Use cases should keep their flags - final should not be lost") = checkText("SI_5054_q2.scala")((None, """final def test(): Int""", true)) property("SI-5054: Use cases should keep their flags - implicit should not be lost") = checkText("SI_5054_q3.scala")((None, """implicit def test(): Int""", true)) property("SI-5054: Use cases should keep their flags - real abstract should not be lost") = checkText("SI_5054_q4.scala")((None, """abstract def test(): Int""", true)) property("SI-5054: Use cases should keep their flags - traits should not be affected") = checkText("SI_5054_q5.scala")((None, """def test(): Int""", true)) property("SI-5054: Use cases should keep their flags - traits should not be affected") = checkText("SI_5054_q6.scala")((None, """abstract def test(): Int""", true)) property("SI-5054: Use case individual signature test") = checkText("SI_5054_q7.scala")( (None, """abstract def test2(explicit: Int): Int [use case] This takes the explicit value passed.""", true), (None, """abstract def test1(): Int [use case] This takes the implicit value in scope.""", true) ) property("SI-5287: Display correct \\"Definition classes\\"") = checkText("SI_5287.scala")( (None, """def method(): Int [use case] The usecase explanation [use case] The usecase explanation Definition Classes SI_5287 SI_5287_B SI_5287_A""", true) ) // the explanation appears twice, as small comment and full comment property("Comment inheritance: Correct comment inheritance for overriding") = checkText("implicit-inheritance-override.scala")( (Some("Base"), """def function[T](arg1: T, arg2: String): Double The base comment. The base comment. And another sentence... T the type of the first argument arg1 The T term comment arg2 The string comment returns The return comment """, true), (Some("DerivedA"), """def function[T](arg1: T, arg2: String): Double Overriding the comment, the params and returns comments should stay the same. Overriding the comment, the params and returns comments should stay the same. T the type of the first argument arg1 The T term comment arg2 The string comment returns The return comment """, true), (Some("DerivedB"), """def function[T](arg1: T, arg2: String): Double T the type of the first argument arg1 The overridden T term comment arg2 The overridden string comment returns The return comment """, true), (Some("DerivedC"), """def function[T](arg1: T, arg2: String): Double T the type of the first argument arg1 The T term comment arg2 The string comment returns The overridden return comment """, true), (Some("DerivedD"), """def function[T](arg1: T, arg2: String): Double T The overridden type parameter comment arg1 The T term comment arg2 The string comment returns The return comment """, true) ) for (useCaseFile <- List("UseCaseInheritance", "UseCaseOverrideInheritance")) { property("Comment inheritance: Correct comment inheritance for usecases") = checkText("implicit-inheritance-usecase.scala")( (Some(useCaseFile), """def missing_arg[T](arg1: T): Double [use case] [use case] T The type parameter arg1 The T term comment returns The return comment """, true), (Some(useCaseFile), """def missing_targ(arg1: Int, arg2: String): Double [use case] [use case] arg1 The T term comment arg2 The string comment returns The return comment """, true), (Some(useCaseFile), """def overridden_arg1[T](implicit arg1: T, arg2: String): Double [use case] [use case] T The type parameter arg1 The overridden T term comment arg2 The string comment returns The return comment """, true), (Some(useCaseFile), """def overridden_targ[T](implicit arg1: T, arg2: String): Double [use case] [use case] T The overridden type parameter comment arg1 The T term comment arg2 The string comment returns The return comment """, true), (Some(useCaseFile), """def overridden_return[T](implicit arg1: T, arg2: String): Double [use case] [use case] T The type parameter arg1 The T term comment arg2 The string comment returns The overridden return comment """, true), (Some(useCaseFile), """def added_arg[T](implicit arg1: T, arg2: String, arg3: Float): Double [use case] [use case] T The type parameter arg1 The T term comment arg2 The string comment arg3 The added float comment returns The return comment """, true), (Some(useCaseFile), """def overridden_comment[T](implicit arg1: T, arg2: String): Double [use case] The overridden comment. [use case] The overridden comment. T The type parameter arg1 The T term comment arg2 The string comment returns The return comment """, true) ) } property("Comment inheritance: Correct explicit inheritance for override") = checkText("explicit-inheritance-override.scala")( (Some("InheritDocDerived"), """def function[T](arg1: T, arg2: String): Double Starting line Starting line The base comment. And another sentence... The base comment. And another sentence... Ending line Author: StartAuthor a Scala developer EndAuthor T StartT the type of the first argument EndT arg1 Start1 The T term comment End1 arg2 Start2 The string comment End2 returns StartRet The return comment EndRet""", true), (Some("InheritDocDerived"), """Definition Classes InheritDocDerived → InheritDocBase Example: StartExample function[Int](3, "something") EndExample Version StartVer 0.0.2 EndVer Since StartSince 0.0.1 EndSince Exceptions thrown SomeException StartEx if the function is not called with correct parameters EndEx SomeOtherException StartSOE Should Warn <invalid inheritdoc annotation> EndSOE To do StartTodo Call mom. And dad! EndTodo Note StartNote Be careful! EndNote See also StartSee The Manual EndSee """, true)) property("Comment inheritance: Correct explicit inheritance for usecase") = checkText("explicit-inheritance-usecase.scala")( (Some("UseCaseInheritDoc"), """def function[T](arg1: T, arg2: String): Double [use case] Starting line [use case] Starting line The base comment. And another sentence... The base comment. And another sentence... Ending line Author: StartAuthor a Scala developer EndAuthor T StartT the type of the first argument EndT arg1 Start1 The T term comment End1 arg2 Start2 The string comment End2 returns StartRet The return comment EndRet""", true), (Some("UseCaseInheritDoc"), """Example: StartExample function[Int](3,"something") EndExample Version StartVer 0.0.2 EndVer Since StartSince 0.0.1 EndSince Exceptions thrown SomeException StartEx if the function is not called with correct parameters EndEx SomeOtherException StartSOE Should Warn <invalid inheritdoc annotation> EndSOE To do StartTodo Call mom. And dad! EndTodo Note StartNote Be careful! EndNote See also StartSee The Manual EndSee """, true)) property("Comment inheritance: Correct explicit inheritance in corner cases") = checkText("inheritdoc-corner-cases.scala")( (Some("D"), """def hello1: Int Inherited: Hello 1 comment Inherited: Hello 1 comment Definition Classes D → A """, true), (Some("D"), """def hello2: Int Inherited: Hello 2 comment Inherited: Hello 2 comment Definition Classes D → B """, true), (Some("G"), """def hello1: Int Inherited: Hello 1 comment Inherited: Hello 1 comment Definition Classes G → D → A """, true), (Some("G"), """def hello2: Int Inherited: Hello 2 comment Inherited: Hello 2 comment Definition Classes G → D → B """, true), (Some("I"), """def hello1(i: Int): Unit [use case] Inherited: Hello 1 comment [use case] Inherited: Hello 1 comment Definition Classes I → G → D → A """, true) // traits E, F and H shouldn't crash scaladoc but we don't need to check the output ) property("Indentation normalization for code blocks") = { val files = createTemplates("code-indent.scala") files("C.html") match { case node: scala.xml.Node => { val s = node.toString s.contains("<pre>a typicial indented\\ncomment on multiple\\ncomment lines</pre>") && s.contains("<pre>one liner</pre>") && s.contains("<pre>two lines, one useful</pre>") && s.contains("<pre>line1\\nline2\\nline3\\nline4</pre>") && s.contains("<pre>a ragged example\\na (condition)\\n the t h e n branch\\nan alternative\\n the e l s e branch</pre>") && s.contains("<pre>Trait example {\\n Val x = a\\n Val y = b\\n}</pre>") && s.contains("<pre>l1\\n\\nl2\\n\\nl3\\n\\nl4\\n\\nl5</pre>") } case _ => false } } property("SI-4014: Scaladoc omits @author: no authors") = { val noAuthors = createTemplates("SI-4014_0.scala")("Foo.html") noAuthors match { case node: scala.xml.Node => { val s = node.toString ! s.contains("Author") } case _ => false } } property("SI-4014: Scaladoc omits @author: one author") = { val oneAuthor = createTemplates("SI-4014_1.scala")("Foo.html") oneAuthor match { case node: scala.xml.Node => { val s = node.toString s.contains("<h6>Author:</h6>") && s.contains("<p>The Only Author</p>") } case _ => false } } property("SI-4014: Scaladoc omits @author: two authors") = { val twoAuthors = createTemplates("SI-4014_2.scala")("Foo.html") twoAuthors match { case node: scala.xml.Node => { val s = node.toString s.contains("<h6>Authors:</h6>") && s.contains("<p>The First Author</p>") && s.contains("<p>The Second Author</p>") } case _ => false } } { val files = createTemplates("basic.scala") //println(files) property("class") = files.get("com/example/p1/Clazz.html") match { case Some(node: scala.xml.Node) => { property("implicit conversion") = node.toString contains "<span class=\\"modifier\\">implicit </span>" property("gt4s") = node.toString contains "title=\\"gt4s: $colon$colon\\"" property("gt4s of a deprecated method") = node.toString contains "title=\\"gt4s: $colon$colon$colon$colon. Deprecated: " true } case _ => false } property("package") = files.get("com/example/p1/index.html") != None property("package object") = files("com/example/p1/index.html") match { case node: scala.xml.Node => node.toString contains "com.example.p1#packageObjectMethod" case _ => false } property("lower bound") = files("com/example/p1/LowerBound.html") match { case node: scala.xml.Node => true case _ => false } property("upper bound") = files("com/example/p1/UpperBound.html") match { case node: scala.xml.Node => true case _ => false } property("SI-8514: No inconsistencies") = checkText("SI-8514.scala")( (Some("a/index"), """class A extends AnyRef Some doc here Some doc here Annotations @DeveloperApi() """, true), (Some("a/index"), """class B extends AnyRef Annotations @DeveloperApi() """, true) ) } // SI-8144 { implicit class AttributesAwareNode(val node: NodeSeq) { def \\@(attrName: String): String = node \\ ("@" + attrName) text def \\@(attrName: String, attrValue: String): NodeSeq = node filter { _ \\ ("@" + attrName) exists (_.text == attrValue) } } implicit class AssertionAwareNode(node: scala.xml.NodeSeq) { def assertTypeLink(expectedUrl: String): Boolean = { val linkElement: NodeSeq = node \\\\ "div" \\@ ("id", "definition") \\\\ "span" \\@ ("class", "permalink") \\ "a" linkElement \\@ "href" == expectedUrl } def assertMemberLink(group: String)(memberName: String, expectedUrl: String): Boolean = { val linkElement: NodeSeq = node \\\\ "div" \\@ ("id", group) \\\\ "li" \\@ ("name", memberName) \\\\ "span" \\@ ("class", "permalink") \\ "a" linkElement \\@ "href" == expectedUrl } def assertValuesLink(memberName: String, expectedUrl: String): Boolean = { val linkElement: NodeSeq = node \\\\ "div" \\@ ("class", "values members") \\\\ "li" \\@ ("name", memberName) \\\\ "span" \\@ ("class", "permalink") \\ "a" linkElement \\@ "href" == expectedUrl } } val files = createTemplates("SI-8144.scala") def check(pagePath: String)(f: NodeSeq => org.scalacheck.Prop): org.scalacheck.Prop = files(pagePath) match { case node: scala.xml.Node => f(XMLUtil.stripGroup(node)) case _ => false } property("SI-8144: Members' permalink - inner package") = check("some/pack/index.html") { node => ("type link" |: node.assertTypeLink("../../some/pack/index.html")) && ("member: SomeType (object)" |: node.assertValuesLink("some.pack.SomeType", "../../some/pack/index.html#SomeType")) && ("member: SomeType (class)" |: node.assertMemberLink("types")("some.pack.SomeType", "../../some/pack/index.html#SomeTypeextendsAnyRef")) } property("SI-8144: Members' permalink - companion object") = check("some/pack/SomeType$.html") { node => ("type link" |: node.assertTypeLink("../../some/pack/SomeType$.html")) && ("member: someVal" |: node.assertMemberLink("allMembers")("some.pack.SomeType#someVal", "../../some/pack/SomeType$.html#someVal:String")) } property("SI-8144: Members' permalink - class") = check("some/pack/SomeType.html") { node => ("type link" |: node.assertTypeLink("../../some/pack/SomeType.html")) && ("constructor " |: node.assertMemberLink("constructors")("some.pack.SomeType#<init>", "../../some/pack/SomeType.html#<init>(arg:String):some.pack.SomeType")) && ( "member: type TypeAlias" |: node.assertMemberLink("types")("some.pack.SomeType.TypeAlias", "../../some/pack/SomeType.html#TypeAlias=String")) && ( "member: def >#<():Int " |: node.assertValuesLink("some.pack.SomeType#>#<", "../../some/pack/SomeType.html#>#<():Int")) && ( "member: def >@<():TypeAlias " |: node.assertValuesLink("some.pack.SomeType#>@<", "../../some/pack/SomeType.html#>@<():SomeType.this.TypeAlias")) } } property("SI-9599 Multiple @todo formatted with comma on separate line") = { createTemplates("SI-9599.scala")("X.html") match { case node: scala.xml.Node => node.text.contains("todo3todo2todo1") case _ => false } } }
felixmulder/scala
test/scaladoc/scalacheck/HtmlFactoryTest.scala
Scala
bsd-3-clause
27,733
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import org.apache.spark.sql.catalyst.plans.{Inner, LeftOuter, RightOuter} import org.apache.spark.sql.catalyst.plans.logical.Join import org.apache.spark.sql.execution.joins.BroadcastHashJoinExec import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSQLContext class DataFrameJoinSuite extends QueryTest with SharedSQLContext { import testImplicits._ test("join - join using") { val df = Seq(1, 2, 3).map(i => (i, i.toString)).toDF("int", "str") val df2 = Seq(1, 2, 3).map(i => (i, (i + 1).toString)).toDF("int", "str") checkAnswer( df.join(df2, "int"), Row(1, "1", "2") :: Row(2, "2", "3") :: Row(3, "3", "4") :: Nil) } test("join - join using multiple columns") { val df = Seq(1, 2, 3).map(i => (i, i + 1, i.toString)).toDF("int", "int2", "str") val df2 = Seq(1, 2, 3).map(i => (i, i + 1, (i + 1).toString)).toDF("int", "int2", "str") checkAnswer( df.join(df2, Seq("int", "int2")), Row(1, 2, "1", "2") :: Row(2, 3, "2", "3") :: Row(3, 4, "3", "4") :: Nil) } test("join - sorted columns not in join's outputSet") { val df = Seq((1, 2, "1"), (3, 4, "3")).toDF("int", "int2", "str_sort").as('df1) val df2 = Seq((1, 3, "1"), (5, 6, "5")).toDF("int", "int2", "str").as('df2) val df3 = Seq((1, 3, "1"), (5, 6, "5")).toDF("int", "int2", "str").as('df3) checkAnswer( df.join(df2, $"df1.int" === $"df2.int", "outer").select($"df1.int", $"df2.int2") .orderBy('str_sort.asc, 'str.asc), Row(null, 6) :: Row(1, 3) :: Row(3, null) :: Nil) checkAnswer( df2.join(df3, $"df2.int" === $"df3.int", "inner") .select($"df2.int", $"df3.int").orderBy($"df2.str".desc), Row(5, 5) :: Row(1, 1) :: Nil) } test("join - join using multiple columns and specifying join type") { val df = Seq((1, 2, "1"), (3, 4, "3")).toDF("int", "int2", "str") val df2 = Seq((1, 3, "1"), (5, 6, "5")).toDF("int", "int2", "str") checkAnswer( df.join(df2, Seq("int", "str"), "inner"), Row(1, "1", 2, 3) :: Nil) checkAnswer( df.join(df2, Seq("int", "str"), "left"), Row(1, "1", 2, 3) :: Row(3, "3", 4, null) :: Nil) checkAnswer( df.join(df2, Seq("int", "str"), "right"), Row(1, "1", 2, 3) :: Row(5, "5", null, 6) :: Nil) checkAnswer( df.join(df2, Seq("int", "str"), "outer"), Row(1, "1", 2, 3) :: Row(3, "3", 4, null) :: Row(5, "5", null, 6) :: Nil) checkAnswer( df.join(df2, Seq("int", "str"), "left_semi"), Row(1, "1", 2) :: Nil) } test("join - join using self join") { val df = Seq(1, 2, 3).map(i => (i, i.toString)).toDF("int", "str") // self join checkAnswer( df.join(df, "int"), Row(1, "1", "1") :: Row(2, "2", "2") :: Row(3, "3", "3") :: Nil) } test("join - self join") { val df1 = testData.select(testData("key")).as('df1) val df2 = testData.select(testData("key")).as('df2) checkAnswer( df1.join(df2, $"df1.key" === $"df2.key"), sql("SELECT a.key, b.key FROM testData a JOIN testData b ON a.key = b.key") .collect().toSeq) } test("join - cross join") { val df1 = Seq((1, "1"), (3, "3")).toDF("int", "str") val df2 = Seq((2, "2"), (4, "4")).toDF("int", "str") checkAnswer( df1.crossJoin(df2), Row(1, "1", 2, "2") :: Row(1, "1", 4, "4") :: Row(3, "3", 2, "2") :: Row(3, "3", 4, "4") :: Nil) checkAnswer( df2.crossJoin(df1), Row(2, "2", 1, "1") :: Row(2, "2", 3, "3") :: Row(4, "4", 1, "1") :: Row(4, "4", 3, "3") :: Nil) } test("join - using aliases after self join") { val df = Seq(1, 2, 3).map(i => (i, i.toString)).toDF("int", "str") checkAnswer( df.as('x).join(df.as('y), $"x.str" === $"y.str").groupBy("x.str").count(), Row("1", 1) :: Row("2", 1) :: Row("3", 1) :: Nil) checkAnswer( df.as('x).join(df.as('y), $"x.str" === $"y.str").groupBy("y.str").count(), Row("1", 1) :: Row("2", 1) :: Row("3", 1) :: Nil) } test("[SPARK-6231] join - self join auto resolve ambiguity") { val df = Seq((1, "1"), (2, "2")).toDF("key", "value") checkAnswer( df.join(df, df("key") === df("key")), Row(1, "1", 1, "1") :: Row(2, "2", 2, "2") :: Nil) checkAnswer( df.join(df.filter($"value" === "2"), df("key") === df("key")), Row(2, "2", 2, "2") :: Nil) checkAnswer( df.join(df, df("key") === df("key") && df("value") === 1), Row(1, "1", 1, "1") :: Nil) val left = df.groupBy("key").agg(count("*")) val right = df.groupBy("key").agg(sum("key")) checkAnswer( left.join(right, left("key") === right("key")), Row(1, 1, 1, 1) :: Row(2, 1, 2, 2) :: Nil) } test("broadcast join hint using broadcast function") { val df1 = Seq((1, "1"), (2, "2")).toDF("key", "value") val df2 = Seq((1, "1"), (2, "2")).toDF("key", "value") // equijoin - should be converted into broadcast join val plan1 = df1.join(broadcast(df2), "key").queryExecution.sparkPlan assert(plan1.collect { case p: BroadcastHashJoinExec => p }.size === 1) // no join key -- should not be a broadcast join val plan2 = df1.crossJoin(broadcast(df2)).queryExecution.sparkPlan assert(plan2.collect { case p: BroadcastHashJoinExec => p }.size === 0) // planner should not crash without a join broadcast(df1).queryExecution.sparkPlan // SPARK-12275: no physical plan for BroadcastHint in some condition withTempPath { path => df1.write.parquet(path.getCanonicalPath) val pf1 = spark.read.parquet(path.getCanonicalPath) assert(df1.crossJoin(broadcast(pf1)).count() === 4) } } test("broadcast join hint using Dataset.hint") { // make sure a giant join is not broadcastable val plan1 = spark.range(10e10.toLong) .join(spark.range(10e10.toLong), "id") .queryExecution.executedPlan assert(plan1.collect { case p: BroadcastHashJoinExec => p }.size == 0) // now with a hint it should be broadcasted val plan2 = spark.range(10e10.toLong) .join(spark.range(10e10.toLong).hint("broadcast"), "id") .queryExecution.executedPlan assert(plan2.collect { case p: BroadcastHashJoinExec => p }.size == 1) } test("join - outer join conversion") { val df = Seq((1, 2, "1"), (3, 4, "3")).toDF("int", "int2", "str").as("a") val df2 = Seq((1, 3, "1"), (5, 6, "5")).toDF("int", "int2", "str").as("b") // outer -> left val outerJoin2Left = df.join(df2, $"a.int" === $"b.int", "outer").where($"a.int" >= 3) assert(outerJoin2Left.queryExecution.optimizedPlan.collect { case j @ Join(_, _, LeftOuter, _) => j }.size === 1) checkAnswer( outerJoin2Left, Row(3, 4, "3", null, null, null) :: Nil) // outer -> right val outerJoin2Right = df.join(df2, $"a.int" === $"b.int", "outer").where($"b.int" >= 3) assert(outerJoin2Right.queryExecution.optimizedPlan.collect { case j @ Join(_, _, RightOuter, _) => j }.size === 1) checkAnswer( outerJoin2Right, Row(null, null, null, 5, 6, "5") :: Nil) // outer -> inner val outerJoin2Inner = df.join(df2, $"a.int" === $"b.int", "outer"). where($"a.int" === 1 && $"b.int2" === 3) assert(outerJoin2Inner.queryExecution.optimizedPlan.collect { case j @ Join(_, _, Inner, _) => j }.size === 1) checkAnswer( outerJoin2Inner, Row(1, 2, "1", 1, 3, "1") :: Nil) // right -> inner val rightJoin2Inner = df.join(df2, $"a.int" === $"b.int", "right").where($"a.int" > 0) assert(rightJoin2Inner.queryExecution.optimizedPlan.collect { case j @ Join(_, _, Inner, _) => j }.size === 1) checkAnswer( rightJoin2Inner, Row(1, 2, "1", 1, 3, "1") :: Nil) // left -> inner val leftJoin2Inner = df.join(df2, $"a.int" === $"b.int", "left").where($"b.int2" > 0) assert(leftJoin2Inner.queryExecution.optimizedPlan.collect { case j @ Join(_, _, Inner, _) => j }.size === 1) checkAnswer( leftJoin2Inner, Row(1, 2, "1", 1, 3, "1") :: Nil) } test("process outer join results using the non-nullable columns in the join input") { // Filter data using a non-nullable column from a right table val df1 = Seq((0, 0), (1, 0), (2, 0), (3, 0), (4, 0)).toDF("id", "count") val df2 = Seq(Tuple1(0), Tuple1(1)).toDF("id").groupBy("id").count checkAnswer( df1.join(df2, df1("id") === df2("id"), "left_outer").filter(df2("count").isNull), Row(2, 0, null, null) :: Row(3, 0, null, null) :: Row(4, 0, null, null) :: Nil ) // Coalesce data using non-nullable columns in input tables val df3 = Seq((1, 1)).toDF("a", "b") val df4 = Seq((2, 2)).toDF("a", "b") checkAnswer( df3.join(df4, df3("a") === df4("a"), "outer") .select(coalesce(df3("a"), df3("b")), coalesce(df4("a"), df4("b"))), Row(1, null) :: Row(null, 2) :: Nil ) } test("SPARK-16991: Full outer join followed by inner join produces wrong results") { val a = Seq((1, 2), (2, 3)).toDF("a", "b") val b = Seq((2, 5), (3, 4)).toDF("a", "c") val c = Seq((3, 1)).toDF("a", "d") val ab = a.join(b, Seq("a"), "fullouter") checkAnswer(ab.join(c, "a"), Row(3, null, 4, 1) :: Nil) } test("SPARK-17685: WholeStageCodegenExec throws IndexOutOfBoundsException") { val df = Seq((1, 1, "1"), (2, 2, "3")).toDF("int", "int2", "str") val df2 = Seq((1, 1, "1"), (2, 3, "5")).toDF("int", "int2", "str") val limit = 1310721 val innerJoin = df.limit(limit).join(df2.limit(limit), Seq("int", "int2"), "inner") .agg(count($"int")) checkAnswer(innerJoin, Row(1) :: Nil) } test("SPARK-23087: don't throw Analysis Exception in CheckCartesianProduct when join condition " + "is false or null") { withSQLConf(SQLConf.CROSS_JOINS_ENABLED.key -> "false") { val df = spark.range(10) val dfNull = spark.range(10).select(lit(null).as("b")) df.join(dfNull, $"id" === $"b", "left").queryExecution.optimizedPlan val dfOne = df.select(lit(1).as("a")) val dfTwo = spark.range(10).select(lit(2).as("b")) dfOne.join(dfTwo, $"a" === $"b", "left").queryExecution.optimizedPlan } } test("SPARK-24385: Resolve ambiguity in self-joins with EqualNullSafe") { withSQLConf(SQLConf.CROSS_JOINS_ENABLED.key -> "false") { val df = spark.range(2) // this throws an exception before the fix df.join(df, df("id") <=> df("id")).queryExecution.optimizedPlan } } test("NaN and -0.0 in join keys") { val df1 = Seq(Float.NaN -> Double.NaN, 0.0f -> 0.0, -0.0f -> -0.0).toDF("f", "d") val df2 = Seq(Float.NaN -> Double.NaN, 0.0f -> 0.0, -0.0f -> -0.0).toDF("f", "d") val joined = df1.join(df2, Seq("f", "d")) checkAnswer(joined, Seq( Row(Float.NaN, Double.NaN), Row(0.0f, 0.0), Row(0.0f, 0.0), Row(0.0f, 0.0), Row(0.0f, 0.0))) } }
guoxiaolongzte/spark
sql/core/src/test/scala/org/apache/spark/sql/DataFrameJoinSuite.scala
Scala
apache-2.0
11,796
package scala.slick.driver import java.sql.Types import scala.slick.SlickException import scala.slick.lifted._ import scala.slick.ast._ import scala.slick.util.MacroSupport.macroSupportInterpolation import scala.slick.profile.{SqlProfile, Capability} /** * Slick driver for <a href="http://www.hsqldb.org/">HyperSQL</a> * (starting with version 2.0). * * This driver implements the [[scala.slick.driver.ExtendedProfile]] * ''without'' the following capabilities: * * <ul> * <li>[[scala.slick.profile.SqlProfile.capabilities.sequenceCurr]]: * <code>Sequence.curr</code> to get the current value of a sequence is * not supported by Hsqldb. Trying to generate SQL code which uses this * feature throws a SlickException.</li> * </ul> * * @author szeiger */ trait HsqldbDriver extends ExtendedDriver { driver => override protected def computeCapabilities: Set[Capability] = (super.computeCapabilities - SqlProfile.capabilities.sequenceCurr ) override val columnTypes = new JdbcTypes override def createQueryBuilder(input: QueryBuilderInput): QueryBuilder = new QueryBuilder(input) override def createTableDDLBuilder(table: Table[_]): TableDDLBuilder = new TableDDLBuilder(table) override def createSequenceDDLBuilder(seq: Sequence[_]): SequenceDDLBuilder[_] = new SequenceDDLBuilder(seq) class QueryBuilder(input: QueryBuilderInput) extends super.QueryBuilder(input) with OracleStyleRowNum { override protected val scalarFrom = Some("(VALUES (0))") override protected val concatOperator = Some("||") override def expr(c: Node, skipParens: Boolean = false): Unit = c match { case l @ LiteralNode(v: String) if (v ne null) && typeInfoFor(l.tpe).sqlType != Types.CHAR => /* Hsqldb treats string literals as type CHARACTER and pads them with * spaces in some expressions, so we cast all string literals to * VARCHAR. The length is only 16M instead of 2^31-1 in order to leave * enough room for concatenating strings (which extends the size even if * it is not needed). */ b"cast(" super.expr(c) b" as varchar(16777216))" /* Hsqldb uses the SQL:2008 syntax for NEXTVAL */ case Library.NextValue(SequenceNode(name)) => b"(next value for `$name)" case Library.CurrentValue(_*) => throw new SlickException("Hsqldb does not support CURRVAL") case RowNumber(_) => b"rownum()" // Hsqldb uses Oracle ROWNUM semantics but needs parens case _ => super.expr(c, skipParens) } override protected def buildFetchOffsetClause(fetch: Option[Long], offset: Option[Long]) = (fetch, offset) match { case (Some(t), Some(d)) => b" limit $t offset $d" case (Some(t), None ) => b" limit $t" case (None, Some(d) ) => b" offset $d" case _ => } } class JdbcTypes extends super.JdbcTypes { override val byteArrayJdbcType = new ByteArrayJdbcType { override val sqlTypeName = "LONGVARBINARY" } override val uuidJdbcType = new UUIDJdbcType { override def sqlType = java.sql.Types.BINARY override def sqlTypeName = "BINARY(16)" } } class TableDDLBuilder(table: Table[_]) extends super.TableDDLBuilder(table) { override protected def createIndex(idx: Index) = { if(idx.unique) { /* Create a UNIQUE CONSTRAINT (with an automatically generated backing * index) because Hsqldb does not allow a FOREIGN KEY CONSTRAINT to * reference columns which have a UNIQUE INDEX but not a nominal UNIQUE * CONSTRAINT. */ val sb = new StringBuilder append "ALTER TABLE " append quoteIdentifier(table.tableName) append " ADD " sb append "CONSTRAINT " append quoteIdentifier(idx.name) append " UNIQUE(" addIndexColumnList(idx.on, sb, idx.table.tableName) sb append ")" sb.toString } else super.createIndex(idx) } } class SequenceDDLBuilder[T](seq: Sequence[T]) extends super.SequenceDDLBuilder(seq) { override def buildDDL: DDL = { import seq.integral._ val increment = seq._increment.getOrElse(one) val desc = increment < zero val start = seq._start.getOrElse(if(desc) -1 else 1) val b = new StringBuilder append "CREATE SEQUENCE " append quoteIdentifier(seq.name) seq._increment.foreach { b append " INCREMENT BY " append _ } seq._minValue.foreach { b append " MINVALUE " append _ } seq._maxValue.foreach { b append " MAXVALUE " append _ } /* The START value in Hsqldb defaults to 0 instead of the more * conventional 1/-1 so we rewrite it to make 1/-1 the default. */ if(start != 0) b append " START WITH " append start if(seq._cycle) b append " CYCLE" DDL(b.toString, "DROP SEQUENCE " + quoteIdentifier(seq.name)) } } } object HsqldbDriver extends HsqldbDriver
boldradius/slick
src/main/scala/scala/slick/driver/HsqldbDriver.scala
Scala
bsd-2-clause
4,849
package com.avsystem.commons.misc import com.avsystem.commons.IIterable object OptRef { def apply[A >: Null](value: A): OptRef[A] = new OptRef[A](value) def unapply[A >: Null](opt: OptRef[A]): OptRef[A] = opt //name-based extractor def some[A >: Null](value: A): OptRef[A] = if (value != null) new OptRef[A](value) else throw new NullPointerException object Boxed { def unapply[A, B >: Null](optRef: OptRef[B])(implicit unboxing: Unboxing[A, B]): Opt[A] = if (optRef.isEmpty) Opt.Empty else Opt(unboxing.fun(optRef.get)) } implicit def opt2Iterable[A >: Null](xo: OptRef[A]): IIterable[A] = xo.toList final val Empty: OptRef[Null] = new OptRef[Null](null) def empty[A >: Null]: OptRef[A] = Empty private val nullFunc: Any => Null = _ => null final class WithFilter[+A >: Null] private[OptRef](self: OptRef[A], p: A => Boolean) { def map[B >: Null](f: A => B): OptRef[B] = self filter p map f def flatMap[B >: Null](f: A => OptRef[B]): OptRef[B] = self filter p flatMap f def foreach[U](f: A => U): Unit = self filter p foreach f def withFilter(q: A => Boolean): WithFilter[A] = new WithFilter[A](self, x => p(x) && q(x)) } } /** * Like [[Opt]] but has better Java interop thanks to the fact that wrapped value has type `A` instead of `Any`. * For example, Scala method defined like this: * {{{ * def takeMaybeString(str: OptRef[String]): Unit * }}} * will be seen by Java as: * {{{ * public void takeMaybeString(String str); * }}} * and `null` will be used to represent absence of value. * <p/> * This comes at the cost of `A` having to be a nullable type. Also, empty value is represented internally using `null` * which unfortunately makes [[OptRef]] suffer from SI-7396 (`hashCode` fails on `OptRef.Empty` which means that you * can't add [[OptRef]] values into hash sets or use them as hash map keys). */ final class OptRef[+A >: Null] private(private val value: A) extends AnyVal with OptBase[A] with Serializable { @inline def isEmpty: Boolean = value == null @inline def isDefined: Boolean = !isEmpty @inline def nonEmpty: Boolean = isDefined @inline def get: A = if (isEmpty) throw new NoSuchElementException("empty OptRef") else value @inline def toOpt: Opt[A] = Opt(value) @inline def toOption: Option[A] = Option(value) @inline def toNOpt: NOpt[A] = if (isEmpty) NOpt.Empty else NOpt(value) @inline def toOptArg: OptArg[A] = if (isEmpty) OptArg.Empty else OptArg(value) @inline def getOrElse[B >: A](default: => B): B = if (isEmpty) default else value @inline def orNull[B >: A](implicit ev: Null <:< B): B = value.asInstanceOf[B] @inline def map[B >: Null](f: A => B): OptRef[B] = if (isEmpty) OptRef.Empty else OptRef(f(value)) @inline def fold[B](ifEmpty: => B)(f: A => B): B = if (isEmpty) ifEmpty else f(value) /** * The same as [[fold]] but takes arguments in a single parameter list for better type inference. */ @inline def mapOr[B](ifEmpty: => B, f: A => B): B = if (isEmpty) ifEmpty else f(value) @inline def flatMap[B >: Null](f: A => OptRef[B]): OptRef[B] = if (isEmpty) OptRef.Empty else f(value) @inline def filter(p: A => Boolean): OptRef[A] = if (isEmpty || p(value)) this else OptRef.Empty @inline def withFilter(p: A => Boolean): OptRef.WithFilter[A] = new OptRef.WithFilter[A](this, p) @inline def filterNot(p: A => Boolean): OptRef[A] = if (isEmpty || !p(value)) this else OptRef.Empty @inline def contains[A1 >: A](elem: A1): Boolean = !isEmpty && value == elem @inline def exists(p: A => Boolean): Boolean = !isEmpty && p(value) @inline def forall(p: A => Boolean): Boolean = isEmpty || p(value) @inline def foreach[U](f: A => U): Unit = { if (!isEmpty) f(value) } @inline def collect[B >: Null](pf: PartialFunction[A, B]): OptRef[B] = if (!isEmpty) new OptRef(pf.applyOrElse(value, OptRef.nullFunc)) else OptRef.Empty @inline def orElse[B >: A](alternative: => OptRef[B]): OptRef[B] = if (isEmpty) alternative else this @inline def iterator: Iterator[A] = if (isEmpty) Iterator.empty else Iterator.single(value) @inline def toList: List[A] = if (isEmpty) List() else new ::(value, Nil) @inline def toRight[X](left: => X): Either[X, A] = if (isEmpty) Left(left) else Right(value) @inline def toLeft[X](right: => X): Either[A, X] = if (isEmpty) Right(right) else Left(value) @inline def zip[B >: Null](that: OptRef[B]): OptRef[(A, B)] = if (isEmpty || that.isEmpty) OptRef.Empty else OptRef((this.get, that.get)) /** * Apply side effect only if OptRef is empty. It's a bit like foreach for OptRef.Empty * * @param sideEffect - code to be executed if optRef is empty * @return the same optRef * @example {{{captionOptRef.forEmpty(logger.warn("caption is empty")).foreach(setCaption)}}} */ @inline def forEmpty(sideEffect: => Unit): OptRef[A] = { if (isEmpty) { sideEffect } this } override def toString: String = if (isEmpty) "OptRef.Empty" else s"OptRef($value)" }
AVSystem/scala-commons
commons-core/src/main/scala/com/avsystem/commons/misc/OptRef.scala
Scala
mit
5,146