code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
package org.talkingpuffin.ui
import javax.swing.table.AbstractTableModel
import twitter4j.{User, Status}
case class UserAndStatus(user: User, retweetedUser: Option[User], status: Option[Status]) {
def origUser = retweetedUser.getOrElse(user)
def retweetingUser = if (retweetedUser.isDefined) Some(user) else None
}
trait UserAndStatusProvider extends AbstractTableModel {
def getUserAndStatusAt(rowIndex: Int): UserAndStatus
} | dcbriccetti/talking-puffin | desktop/src/main/scala/org/talkingpuffin/ui/UserAndStatusProvider.scala | Scala | mit | 435 |
package com.arcusys.learn.liferay.update.version270
import com.arcusys.learn.liferay.LiferayClasses.LUpgradeProcess
import com.arcusys.learn.liferay.update.SlickDBContext
import com.arcusys.learn.liferay.update.version270.slide.SlideTableComponent
import com.arcusys.valamis.web.configuration.ioc.Configuration
import com.escalatesoft.subcut.inject.BindingModule
class DBUpdater2729(val bindingModule: BindingModule)
extends LUpgradeProcess
with SlideTableComponent
with SlickDBContext {
import driver.simple._
override def getThreshold = 2729
def this() = this(Configuration)
override def doUpgrade(): Unit = {
db.withTransaction { implicit session =>
val textAndImageSlideId = slides
.filter(x => x.isTemplate === true && x.title === "Text and image")
.map(_.id)
.firstOption
textAndImageSlideId.foreach { id =>
val textAndImageSlideMapper = slideElements
.filter(x => x.slideId === id)
.map(el => el.zIndex -> el.id).toMap
textAndImageSlideMapper.get("1")
.foreach { elementId =>
updateContent(elementId, """<h1><span style="font-size:3em">Page header</span></h1>""")
}
}
val textSlideId = slides
.filter(x => x.isTemplate === true && x.title === "Text only")
.map(_.id)
.firstOption
textSlideId.foreach { id =>
val textSlideMapper = slideElements
.filter(x => x.slideId === id)
.map(el => el.zIndex -> el.id).toMap
textSlideMapper.get("1")
.foreach { elementId =>
updateContent(elementId, """<h2><span style="font-size:3em">Page header</span></h2>""")
}
}
val titleSlideId = slides
.filter(x => x.isTemplate === true && x.title === "Title and subtitle")
.map(_.id)
.firstOption
titleSlideId.foreach { id =>
val titleSlideMapper = slideElements
.filter(x => x.slideId === id)
.map(el => el.zIndex -> el.id).toMap
titleSlideMapper.get("1")
.foreach { elementId =>
updateContent(elementId, """<h1><span style="font-size:3em">Page header</span></h1>""")
}
titleSlideMapper.get("2")
.foreach { elementId =>
updateContent(elementId, """<h6><span style="font-size:2em">Page subtitle</span></h6>""")
}
}
val videoSlideId = slides
.filter(x => x.isTemplate === true && x.title === "Video only")
.map(_.id)
.firstOption
videoSlideId.foreach { id =>
val videoSlideMapper = slideElements
.filter(x => x.slideId === id)
.map(el => el.zIndex -> el.id).toMap
videoSlideMapper.get("1")
.foreach { elementId =>
updateContent(elementId, """<h2><span style="font-size:3em">Page header</span></h2>""")
}
}
val lessonSummarySlideId = slides
.filter(x => x.isTemplate === true && x.isLessonSummary === true)
.map(_.id)
.firstOption
lessonSummarySlideId.foreach { id =>
val lessonSummarySlideMapper = slideElements
.filter(x => x.slideId === id)
.map(el => el.zIndex -> el.id).toMap
lessonSummarySlideMapper.get("1")
.foreach { elementId =>
updateContent(elementId, """<h1><span style="font-size:3em">Lesson summary</span></h1>""")
}
}
}
}
private def updateContent(id: Long, content: String) = {
db.withTransaction { implicit session =>
slideElements.filter(_.id === id).map(_.content).update(content)
}
}
private def updateProperties(slideElementId: Long,
top: String,
left: String,
width: String,
height: String) = {
db.withTransaction { implicit session =>
val deviceId = 1L //default device(desktop)
val property = slideElementProperties.filter(_.slideElementId === slideElementId).firstOption
if (property.nonEmpty) {
slideElementProperties.filter(x => x.deviceId === deviceId && x.slideElementId === slideElementId && x.key === "width")
.map(_.value)
.update(width)
slideElementProperties.filter(x => x.deviceId === deviceId && x.slideElementId === slideElementId && x.key === "height")
.map(_.value)
.update(height)
slideElementProperties.filter(x => x.deviceId === deviceId && x.slideElementId === slideElementId && x.key === "top")
.map(_.value)
.update(top)
slideElementProperties.filter(x => x.deviceId === deviceId && x.slideElementId === slideElementId && x.key === "left")
.map(_.value)
.update(left)
}
}
}
}
| igor-borisov/valamis | learn-portlet/src/main/scala/com/arcusys/learn/liferay/update/version270/DBUpdater2729.scala | Scala | gpl-3.0 | 4,837 |
package example
object X/*<=example.X.*/ {
@deprecated/*=>scala.deprecated#*//*=>scala.deprecated#`<init>`().*/("to test -Xmaxwarn", since/*=>scala.deprecated#`<init>`().(since)*/ = "forever")
def x/*<=example.X.x().*/ = 42
}
trait T/*<=example.T#*/ {
import X/*=>example.X.*/._
def `1`/*<=example.T#`1`().*/ = x/*=>example.X.x().*/
def `2`/*<=example.T#`2`().*/ = x/*=>example.X.x().*/
def `3`/*<=example.T#`3`().*/ = x/*=>example.X.x().*/
def `4`/*<=example.T#`4`().*/ = x/*=>example.X.x().*/
def `5`/*<=example.T#`5`().*/ = x/*=>example.X.x().*/
def `6`/*<=example.T#`6`().*/ = x/*=>example.X.x().*/
def `7`/*<=example.T#`7`().*/ = x/*=>example.X.x().*/
def `8`/*<=example.T#`8`().*/ = x/*=>example.X.x().*/
def `9`/*<=example.T#`9`().*/ = x/*=>example.X.x().*/
def `10`/*<=example.T#`10`().*/ = x/*=>example.X.x().*/
def `11`/*<=example.T#`11`().*/ = x/*=>example.X.x().*/
def `12`/*<=example.T#`12`().*/ = x/*=>example.X.x().*/
def `13`/*<=example.T#`13`().*/ = x/*=>example.X.x().*/
def `14`/*<=example.T#`14`().*/ = x/*=>example.X.x().*/
def `15`/*<=example.T#`15`().*/ = x/*=>example.X.x().*/
def `16`/*<=example.T#`16`().*/ = x/*=>example.X.x().*/
def `17`/*<=example.T#`17`().*/ = x/*=>example.X.x().*/
def `18`/*<=example.T#`18`().*/ = x/*=>example.X.x().*/
def `19`/*<=example.T#`19`().*/ = x/*=>example.X.x().*/
def `20`/*<=example.T#`20`().*/ = x/*=>example.X.x().*/
def `21`/*<=example.T#`21`().*/ = x/*=>example.X.x().*/
def `22`/*<=example.T#`22`().*/ = x/*=>example.X.x().*/
def `23`/*<=example.T#`23`().*/ = x/*=>example.X.x().*/
def `24`/*<=example.T#`24`().*/ = x/*=>example.X.x().*/
def `25`/*<=example.T#`25`().*/ = x/*=>example.X.x().*/
def `26`/*<=example.T#`26`().*/ = x/*=>example.X.x().*/
def `27`/*<=example.T#`27`().*/ = x/*=>example.X.x().*/
def `28`/*<=example.T#`28`().*/ = x/*=>example.X.x().*/
def `29`/*<=example.T#`29`().*/ = x/*=>example.X.x().*/
def `30`/*<=example.T#`30`().*/ = x/*=>example.X.x().*/
def `31`/*<=example.T#`31`().*/ = x/*=>example.X.x().*/
def `32`/*<=example.T#`32`().*/ = x/*=>example.X.x().*/
def `33`/*<=example.T#`33`().*/ = x/*=>example.X.x().*/
def `34`/*<=example.T#`34`().*/ = x/*=>example.X.x().*/
def `35`/*<=example.T#`35`().*/ = x/*=>example.X.x().*/
def `36`/*<=example.T#`36`().*/ = x/*=>example.X.x().*/
def `37`/*<=example.T#`37`().*/ = x/*=>example.X.x().*/
def `38`/*<=example.T#`38`().*/ = x/*=>example.X.x().*/
def `39`/*<=example.T#`39`().*/ = x/*=>example.X.x().*/
def `40`/*<=example.T#`40`().*/ = x/*=>example.X.x().*/
def `41`/*<=example.T#`41`().*/ = x/*=>example.X.x().*/
def `42`/*<=example.T#`42`().*/ = x/*=>example.X.x().*/
def `43`/*<=example.T#`43`().*/ = x/*=>example.X.x().*/
def `44`/*<=example.T#`44`().*/ = x/*=>example.X.x().*/
def `45`/*<=example.T#`45`().*/ = x/*=>example.X.x().*/
def `46`/*<=example.T#`46`().*/ = x/*=>example.X.x().*/
def `47`/*<=example.T#`47`().*/ = x/*=>example.X.x().*/
def `48`/*<=example.T#`48`().*/ = x/*=>example.X.x().*/
def `49`/*<=example.T#`49`().*/ = x/*=>example.X.x().*/
def `50`/*<=example.T#`50`().*/ = x/*=>example.X.x().*/
def `51`/*<=example.T#`51`().*/ = x/*=>example.X.x().*/
def `52`/*<=example.T#`52`().*/ = x/*=>example.X.x().*/
def `53`/*<=example.T#`53`().*/ = x/*=>example.X.x().*/
def `54`/*<=example.T#`54`().*/ = x/*=>example.X.x().*/
def `55`/*<=example.T#`55`().*/ = x/*=>example.X.x().*/
def `56`/*<=example.T#`56`().*/ = x/*=>example.X.x().*/
def `57`/*<=example.T#`57`().*/ = x/*=>example.X.x().*/
def `58`/*<=example.T#`58`().*/ = x/*=>example.X.x().*/
def `59`/*<=example.T#`59`().*/ = x/*=>example.X.x().*/
def `60`/*<=example.T#`60`().*/ = x/*=>example.X.x().*/
def `61`/*<=example.T#`61`().*/ = x/*=>example.X.x().*/
def `62`/*<=example.T#`62`().*/ = x/*=>example.X.x().*/
def `63`/*<=example.T#`63`().*/ = x/*=>example.X.x().*/
def `64`/*<=example.T#`64`().*/ = x/*=>example.X.x().*/
def `65`/*<=example.T#`65`().*/ = x/*=>example.X.x().*/
def `66`/*<=example.T#`66`().*/ = x/*=>example.X.x().*/
def `67`/*<=example.T#`67`().*/ = x/*=>example.X.x().*/
def `68`/*<=example.T#`68`().*/ = x/*=>example.X.x().*/
def `69`/*<=example.T#`69`().*/ = x/*=>example.X.x().*/
def `70`/*<=example.T#`70`().*/ = x/*=>example.X.x().*/
def `71`/*<=example.T#`71`().*/ = x/*=>example.X.x().*/
def `72`/*<=example.T#`72`().*/ = x/*=>example.X.x().*/
def `73`/*<=example.T#`73`().*/ = x/*=>example.X.x().*/
def `74`/*<=example.T#`74`().*/ = x/*=>example.X.x().*/
def `75`/*<=example.T#`75`().*/ = x/*=>example.X.x().*/
def `76`/*<=example.T#`76`().*/ = x/*=>example.X.x().*/
def `77`/*<=example.T#`77`().*/ = x/*=>example.X.x().*/
def `78`/*<=example.T#`78`().*/ = x/*=>example.X.x().*/
def `79`/*<=example.T#`79`().*/ = x/*=>example.X.x().*/
def `80`/*<=example.T#`80`().*/ = x/*=>example.X.x().*/
def `81`/*<=example.T#`81`().*/ = x/*=>example.X.x().*/
def `82`/*<=example.T#`82`().*/ = x/*=>example.X.x().*/
def `83`/*<=example.T#`83`().*/ = x/*=>example.X.x().*/
def `84`/*<=example.T#`84`().*/ = x/*=>example.X.x().*/
def `85`/*<=example.T#`85`().*/ = x/*=>example.X.x().*/
def `86`/*<=example.T#`86`().*/ = x/*=>example.X.x().*/
def `87`/*<=example.T#`87`().*/ = x/*=>example.X.x().*/
def `88`/*<=example.T#`88`().*/ = x/*=>example.X.x().*/
def `89`/*<=example.T#`89`().*/ = x/*=>example.X.x().*/
def `90`/*<=example.T#`90`().*/ = x/*=>example.X.x().*/
def `91`/*<=example.T#`91`().*/ = x/*=>example.X.x().*/
def `92`/*<=example.T#`92`().*/ = x/*=>example.X.x().*/
def `93`/*<=example.T#`93`().*/ = x/*=>example.X.x().*/
def `94`/*<=example.T#`94`().*/ = x/*=>example.X.x().*/
def `95`/*<=example.T#`95`().*/ = x/*=>example.X.x().*/
def `96`/*<=example.T#`96`().*/ = x/*=>example.X.x().*/
def `97`/*<=example.T#`97`().*/ = x/*=>example.X.x().*/
def `98`/*<=example.T#`98`().*/ = x/*=>example.X.x().*/
def `99`/*<=example.T#`99`().*/ = x/*=>example.X.x().*/
def `100`/*<=example.T#`100`().*/ = x/*=>example.X.x().*/
def `101`/*<=example.T#`101`().*/ = x/*=>example.X.x().*/
}
| scalameta/scalameta | tests/jvm/src/test/resources/example/XmaxWarn.scala | Scala | bsd-3-clause | 6,134 |
// Copyright 2017 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generator for emitting Verilog
package bottlerocket
import chisel3._
import firrtl.{ExecutionOptionsManager, HasFirrtlOptions, CommonOptions, FirrtlExecutionOptions, ComposableOptions}
case class BROptions(nProgInterrupts: Int = 240, resetVec: BigInt = BigInt("100", 16)) extends ComposableOptions
object BottleRocketGenerator extends App {
val config = new DefaultBottleRocketConfig
trait HasBROptions {
self: ExecutionOptionsManager =>
var brOptions = BROptions()
parser.note("BottleRocket options")
parser.opt[Int]("nProgInterrupts")
.abbr("nInts")
.valueName("<nInts>")
.foreach { n => brOptions = brOptions.copy(nProgInterrupts = n) }
parser.opt[String]("reset-vec")
.abbr("rstVec")
.valueName("<addr-hex>")
.foreach { str => brOptions = brOptions.copy(resetVec = BigInt(str, 16)) }
}
val optionsManager = new ExecutionOptionsManager("chisel3")
with HasChiselExecutionOptions
with HasFirrtlOptions
with HasBROptions { }
if (optionsManager.parse(args)) {
Driver.execute(optionsManager, () => new BottleRocketCore(optionsManager.brOptions)(config))
}
}
| google/bottlerocket | src/main/scala/bottlerocket/BottleRocketGenerator.scala | Scala | apache-2.0 | 1,745 |
// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO3
package scalapb.perf.protos
object ProtosProto extends _root_.scalapb.GeneratedFileObject {
lazy val dependencies: Seq[_root_.scalapb.GeneratedFileObject] = Seq(
scalapb.options.ScalapbProto
)
lazy val messagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] =
Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]](
scalapb.perf.protos.SimpleMessage,
scalapb.perf.protos.MessageContainer,
scalapb.perf.protos.Enum,
scalapb.perf.protos.EnumVector,
scalapb.perf.protos.IntVector,
scalapb.perf.protos.StringMessage
)
private lazy val ProtoBytes: _root_.scala.Array[Byte] =
scalapb.Encoding.fromBase64(scala.collection.immutable.Seq(
"""Cgxwcm90b3MucHJvdG8SDHNjYWxhcGIucGVyZhoVc2NhbGFwYi9zY2FsYXBiLnByb3RvIogBCg1TaW1wbGVNZXNzYWdlEhQKA
WkYASABKAVCBuI/AxIBaVIBaRIUCgFqGAIgASgFQgbiPwMSAWpSAWoSFAoBaxgDIAEoDEIG4j8DEgFrUgFrEjUKBWNvbG9yGAQgA
SgOMhMuc2NhbGFwYi5wZXJmLkNvbG9yQgriPwcSBWNvbG9yUgVjb2xvciKEAQoQTWVzc2FnZUNvbnRhaW5lchI3CgNvcHQYASABK
AsyGy5zY2FsYXBiLnBlcmYuU2ltcGxlTWVzc2FnZUII4j8FEgNvcHRSA29wdBI3CgNyZXAYAiADKAsyGy5zY2FsYXBiLnBlcmYuU
2ltcGxlTWVzc2FnZUII4j8FEgNyZXBSA3JlcCI9CgRFbnVtEjUKBWNvbG9yGAEgASgOMhMuc2NhbGFwYi5wZXJmLkNvbG9yQgriP
wcSBWNvbG9yUgVjb2xvciJGCgpFbnVtVmVjdG9yEjgKBmNvbG9ycxgBIAMoDjITLnNjYWxhcGIucGVyZi5Db2xvckIL4j8IEgZjb
2xvcnNSBmNvbG9ycyIqCglJbnRWZWN0b3ISHQoEaW50cxgBIAMoBUIJ4j8GEgRpbnRzUgRpbnRzIk0KDVN0cmluZ01lc3NhZ2USH
QoEc3RyMRgBIAEoCUIJ4j8GEgRzdHIxUgRzdHIxEh0KBHN0cjIYAiABKAlCCeI/BhIEc3RyMlIEc3RyMioyCgVDb2xvchILCgdVT
ktOT1dOEAASBwoDUkVEEAESCQoFR1JFRU4QAhIICgRCTFVFEANCB+I/BEgBaAFKwAgKBhIEAAAtAQoICgEMEgMAABIKCQoCAwASA
wIAHwoICgECEgMEABUKCQoBCBIEBgAJAgoLCgMI/AcSBAYACQIKCgoCBQASBAsAEAEKCgoDBQABEgMLBQoKCwoEBQACABIDDAIOC
gwKBQUAAgABEgMMAgkKDAoFBQACAAISAwwMDQoLCgQFAAIBEgMNAgoKDAoFBQACAQESAw0CBQoMCgUFAAIBAhIDDQgJCgsKBAUAA
gISAw4CDAoMCgUFAAICARIDDgIHCgwKBQUAAgICEgMOCgsKCwoEBQACAxIDDwILCgwKBQUAAgMBEgMPAgYKDAoFBQACAwISAw8JC
goKCgIEABIEEgAXAQoKCgMEAAESAxIIFQoLCgQEAAIAEgMTAg4KDAoFBAACAAUSAxMCBwoMCgUEAAIAARIDEwgJCgwKBQQAAgADE
gMTDA0KCwoEBAACARIDFAIOCgwKBQQAAgEFEgMUAgcKDAoFBAACAQESAxQICQoMCgUEAAIBAxIDFAwNCgsKBAQAAgISAxUCDgoMC
gUEAAICBRIDFQIHCgwKBQQAAgIBEgMVCAkKDAoFBAACAgMSAxUMDQoLCgQEAAIDEgMWAhIKDAoFBAACAwYSAxYCBwoMCgUEAAIDA
RIDFggNCgwKBQQAAgMDEgMWEBEKCgoCBAESBBkAHAEKCgoDBAEBEgMZCBgKCwoEBAECABIDGgIYCgwKBQQBAgAGEgMaAg8KDAoFB
AECAAESAxoQEwoMCgUEAQIAAxIDGhYXCjkKBAQBAgESAxsCISIsICBbKHNjYWxhcGIuZmllbGQpLmNvbGxlY3Rpb25fdHlwZT0iT
GlzdCJdOwoKDAoFBAECAQQSAxsCCgoMCgUEAQIBBhIDGwsYCgwKBQQBAgEBEgMbGRwKDAoFBAECAQMSAxsfIAoKCgIEAhIEHgAgA
QoKCgMEAgESAx4IDAoLCgQEAgIAEgMfAhIKDAoFBAICAAYSAx8CBwoMCgUEAgIAARIDHwgNCgwKBQQCAgADEgMfEBEKCgoCBAMSB
CIAJAEKCgoDBAMBEgMiCBIKCwoEBAMCABIDIwIcCgwKBQQDAgAEEgMjAgoKDAoFBAMCAAYSAyMLEAoMCgUEAwIAARIDIxEXCgwKB
QQDAgADEgMjGhsKCgoCBAQSBCYAKAEKCgoDBAQBEgMmCBEKCwoEBAQCABIDJwIaCgwKBQQEAgAEEgMnAgoKDAoFBAQCAAUSAycLE
AoMCgUEBAIAARIDJxEVCgwKBQQEAgADEgMnGBkKCgoCBAUSBCoALQEKCgoDBAUBEgMqCBUKCwoEBAUCABIDKwISCgwKBQQFAgAFE
gMrAggKDAoFBAUCAAESAysJDQoMCgUEBQIAAxIDKxARCgsKBAQFAgESAywCEgoMCgUEBQIBBRIDLAIICgwKBQQFAgEBEgMsCQ0KD
AoFBAUCAQMSAywQEWIGcHJvdG8z"""
).mkString)
lazy val scalaDescriptor: _root_.scalapb.descriptors.FileDescriptor = {
val scalaProto = com.google.protobuf.descriptor.FileDescriptorProto.parseFrom(ProtoBytes)
_root_.scalapb.descriptors.FileDescriptor.buildFrom(scalaProto, dependencies.map(_.scalaDescriptor))
}
lazy val javaDescriptor: com.google.protobuf.Descriptors.FileDescriptor = {
val javaProto = com.google.protobuf.DescriptorProtos.FileDescriptorProto.parseFrom(ProtoBytes)
com.google.protobuf.Descriptors.FileDescriptor.buildFrom(javaProto, _root_.scala.Array(
scalapb.options.ScalapbProto.javaDescriptor
))
}
@deprecated("Use javaDescriptor instead. In a future version this will refer to scalaDescriptor.", "ScalaPB 0.5.47")
def descriptor: com.google.protobuf.Descriptors.FileDescriptor = javaDescriptor
} | trueaccord/ScalaPB | docs/src/main/scala/scalapb/perf/protos/ProtosProto.scala | Scala | apache-2.0 | 4,114 |
/**
* Copyright © 2015, BoldRadius Solutions
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.boldradius.akka_exchange.domain
sealed trait Security {
type SymbolType <: SecuritySymbol
val symbol: SymbolType
}
case class Stock(symbol: StockSymbol) extends Security {
type SymbolType = StockSymbol
}
sealed abstract class Bond(symbol: CUSIP) extends Security {
type SymbolType = CUSIP
}
trait SecuritySymbol
object CUSIP {
def validate(cusip: String): Boolean =
letterOrdinal(cusip.last) == calculateCheckDigit(cusip)
def calculateCheckDigit(cusip: String): Int = {
// accept with or without checkdigit
var sum = 0
for (n <- 0 to 6) {
def charCode(c: Char): Int = {
val x =
letterOrdinal(c)
if (n % 2 == 1) x * 2 else x
}
val v = charCode(cusip.charAt(n))
if (v > 9) {
val div = v / 10
val mod = v % 10
sum += mod + div
} else
sum += v
}
val mod = (10 - (sum % 10)) % 10
mod
}
def letterOrdinal(c: Char): Int =
if (c >= '0' && c <= '9') // digits 0-9
c - '0'
else
c - 'A' + 10
}
case class CUSIP(cusip: String) extends SecuritySymbol {
require(CUSIP.validate(cusip), "Invalid CUSIP: Check Digit Doesn't Match.")
}
sealed abstract class StockSymbol(val symbol: String) extends SecuritySymbol {
type Exchange <: StockExchange
def exchange: Exchange
}
case class NYSESymbol(sym: String) extends StockSymbol(sym) {
type Exchange = NYSE.type
def exchange = NYSE
}
case class NASDAQSymbol(sym: String) extends StockSymbol(sym) {
type Exchange = NASDAQ.type
def exchange = NASDAQ
}
case class LSESymbol(sym: String) extends StockSymbol(sym) {
type Exchange = LSE.type
def exchange = LSE
}
| rahulkavale/akka-exchange | util/src/main/scala/com/boldradius/akka_exchange/domain/securities.scala | Scala | apache-2.0 | 2,301 |
package ohnosequences.tabula.impl
import ohnosequences.cosas._, types._, properties._
import ohnosequences.tabula._, attributes._, conditions._, predicates._
import com.amazonaws.services.dynamodbv2.model.{AttributeValue, ScalarAttributeType, AttributeDefinition, ConditionalOperator}
import com.amazonaws.services.dynamodbv2.model.{Condition => SDKCondition}
import scala.reflect._
import shapeless._, poly._
import spire.algebra.Monoid
object ImplicitConversions {
/* Conversions between item and the SDK representation */
type SDKRep = Map[String, AttributeValue]
type SDKElem = (String, AttributeValue)
import ohnosequences.cosas.ops.typeSets._
implicit val SDKRepMonoid: Monoid[SDKRep] = new Monoid[SDKRep] {
def id: SDKRep = Map[String, AttributeValue]()
def op(x: SDKRep, y: SDKRep): SDKRep = x ++ y
}
object SDKRepParsers {
implicit def caseNum[P <: AnyProperty.ofType[Num]](p: P, m: SDKRep):
(ValueOf[P], SDKRep) = (p(m(p.label).getN.toInt), m)
implicit def caseString[P <: AnyProperty.ofType[String]](p: P, m: SDKRep):
(ValueOf[P], SDKRep) = (p(m(p.label).getS.toString), m)
// TODO: a case for Bytes
}
object SDKRepSerializers {
implicit def default[P <: AnyProperty](t: ValueOf[P])
(implicit getP: ValueOf[P] => P): SDKRep = Map(getP(t).label -> getAttrVal[P#Raw](t.value))
}
// trait SDKRepParser extends AnyItemAction {
// val parseSDKRep: SDKRep => ValueOf[Item]
// }
// trait SDKRepGetter extends AnyItemAction {
// val getSDKRep: Item#Raw => SDKRep
// }
// trait SDKRepGetter[A <: AnyItemAction] {
// def getSDKRep(rep: ValueOf[A#Item]): SDKRep
// }
// implicit def autoSDKGetter[A <: AnyItemAction](a: A)(implicit transf: FromProperties.Item[a.Item, SDKRep]):
// SDKRepGetter[A] = new SDKRepGetter[A] {
// val action = a
// def getSDKRep(rep: a.item.Rep): SDKRep = transf(rep)
// }
/* Properties-related conversions */
implicit def getAttrDef[A <: AnyAttribute](attr: A): AttributeDefinition = {
val attrDef = new AttributeDefinition().withAttributeName(attr.label)
attr.rawTag.runtimeClass.asInstanceOf[Class[A#Raw]] match {
case c if c == classOf[Num] => attrDef.withAttributeType(ScalarAttributeType.N)
case c if c == classOf[String] => attrDef.withAttributeType(ScalarAttributeType.S)
case c if c == classOf[Bytes] => attrDef.withAttributeType(ScalarAttributeType.B)
}
}
// FIXME: restrict T somehow, maybe Typeable instance is needed
implicit def getAttrVal[T] // : Typeable] // : oneOf[NotSetValues]#is]
(attr: T): AttributeValue = {
// val B = TypeCase[Bytes]
attr match {
case _: Num => new AttributeValue().withN(attr.toString)
case _: String => new AttributeValue().withS(attr.toString)
// TODO: test the Bytes case
case a: Bytes => {
import java.nio._
val byteBuffer: ByteBuffer = ByteBuffer.allocate(a.length)
byteBuffer.put(Array[Byte](a: _*))
new AttributeValue().withB(byteBuffer)
}
}
}
/* Conditions-related conversions */
implicit def toSDKCondition[C <: AnyCondition](cond: C): SDKCondition = {
import scala.collection.JavaConversions._
val sdkCond = new SDKCondition().withComparisonOperator(cond.getClass.getSimpleName)
cond match {
case c: NullaryCondition[_] => sdkCond
case _ => {
val attrValList: java.util.Collection[AttributeValue] = (cond match {
case c: SimpleCondition[_] => List(c.value)
case c: CONTAINS[_, _] => List(c.value)
case c: NOT_CONTAINS[_, _] => List(c.value)
case c: BETWEEN[_] => List(c.start, c.end)
case c: IN[_] => c.values
}) map getAttrVal
sdkCond.withAttributeValueList(attrValList)
}
}
}
implicit def toSDKPredicate[P <: AnyPredicate](pred: P): (ConditionalOperator, Map[String, SDKCondition]) = {
pred match {
case p: AnySimplePredicate => (ConditionalOperator.AND, Map(p.head.attribute.label -> toSDKCondition(p.head)))
case p: AnyAndPredicate => (ConditionalOperator.AND,
toSDKPredicate(p.body)._2 + (p.head.attribute.label -> toSDKCondition(p.head)))
case p: AnyOrPredicate => (ConditionalOperator.OR,
toSDKPredicate(p.body)._2 + (p.head.attribute.label -> toSDKCondition(p.head)))
}
}
}
| ohnosequences/tabula | src/main/scala/tabula/impl/ImplicitConversions.scala | Scala | agpl-3.0 | 4,469 |
package com.github.jarlakxen.scalatra.rest.queryable
import org.junit.runner.RunWith
import org.specs2.runner.JUnitRunner
import org.specs2.mutable._
import javax.servlet.http.HttpServletRequest
import org.scalatra.{ ScalatraServlet, ScalatraParams }
import org.scalatra.test.specs2.MutableScalatraSpec
import org.scalatra.json.JacksonJsonSupport
import org.json4s.{ DefaultFormats, Formats }
@RunWith( classOf[JUnitRunner] )
class QueryableViewSupportSpec extends MutableScalatraSpec {
case class User( id : String, name : String, password : String )
class QueryableViewServlet extends ScalatraServlet with JacksonJsonSupport with jackson.QueryableViewSupport {
protected implicit val jsonFormats : Formats = DefaultFormats
get( "/" ){
contentType = formats( "json" )
User( "1", "test", "1233456" )
}
}
addServlet( new QueryableViewServlet, "/*" )
"GET / on QueryableViewServlet" should {
"return status 200 and the full object" in {
get( "/" ) {
status must_== 200
body must_== "{\\"id\\":\\"1\\",\\"name\\":\\"test\\",\\"password\\":\\"1233456\\"}"
}
}
"return status 200 and the partial object" in {
get( "/", Map( "fields" -> "id, name" ) ) {
status must_== 200
body must_== "{\\"id\\":\\"1\\",\\"name\\":\\"test\\"}"
}
}
}
} | Jarlakxen/scalatra-rest | src/test/scala/com/github/jarlakxen/scalatra/rest/queryable/QueryableViewSupportSpec.scala | Scala | gpl-2.0 | 1,325 |
package com.cloudray.scalapress.plugin.gallery.vegas
import javax.persistence._
import com.cloudray.scalapress.section.Section
import scala.collection.JavaConverters._
import com.cloudray.scalapress.framework.ScalapressRequest
import com.cloudray.scalapress.plugin.gallery.base.{Gallery, Image}
import scala.Some
import org.hibernate.annotations.{NotFoundAction, NotFound}
import scala.beans.BeanProperty
import org.apache.commons.io.IOUtils
/** @author Stephen Samuel */
@Entity
@Table(name = "plugin_gallery_vegas")
class VegasSection extends Section {
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "gallery")
@NotFound(action = NotFoundAction.IGNORE)
@BeanProperty
var gallery: Gallery = _
override def desc: String = "Showing a gallery using the 'Vegas Plugin' (" + Option(gallery)
.map(_.name)
.getOrElse("-No Gallery Set-") + ")"
override def backoffice: String = "/backoffice/plugin/gallery/vegas/section/" + id
override def render(request: ScalapressRequest): Option[String] = {
Some("<script>" + VegasSection.js + "</script>")
}
/** Returns the images that this section should render. Will use images set on the section
* or fetch from the container if applicable.
*/
def imagesToRender: Iterable[Image] = gallery.images.size match {
case 0 => Option(item).map(_.images.asScala.map(Image(_, null, 0))).getOrElse(Nil)
case _ => gallery.images.asScala
}
}
object VegasSection {
val RESOURCE_JS = "/com/cloudray/scalapress/plugin/gallery/vegas/vegas.js"
val js = IOUtils.toString(getClass.getResourceAsStream(RESOURCE_JS))
} | vidyacraghav/scalapress | src/main/scala/com/cloudray/scalapress/plugin/gallery/vegas/VegasSection.scala | Scala | apache-2.0 | 1,601 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.keras.autograd
import com.intel.analytics.bigdl.dllib.nn.Graph.ModuleNode
import com.intel.analytics.bigdl.dllib.nn.abstractnn.{AbstractModule, Activity, InferShape}
import com.intel.analytics.bigdl.dllib.nn.internal.KerasLayer
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils._
import com.intel.analytics.bigdl.dllib.{nn => bnn}
import com.intel.analytics.bigdl.dllib.keras.layers._
import com.intel.analytics.bigdl.dllib.keras.layers.internal._
import com.intel.analytics.bigdl.dllib.keras.models._
import com.intel.analytics.bigdl.dllib.keras.Model
import com.intel.analytics.bigdl.dllib.keras.layers.TimeDistributed
import scala.reflect.ClassTag
object AutoGrad {
val EPSILON = 10e-8
// TODO: Get the nDim from Variable
private def normalizeAxis(axis: Int, nDim: Int = -1) = {
if (axis < 0) {
throw new IllegalArgumentException("We don't support axis < 0 for now") // axis + nDim
} else {
axis
}
}
/**
* Element-wise absolute value.
* @param x A variable.
* @return A variable.
*/
def abs[T: ClassTag](x: Variable[T])(
implicit ev: TensorNumeric[T]): Variable[T] = {
val o: KerasLayer[Activity, Activity, T] =
new KerasLayerWrapper[T](bnn.Abs[T]().asInstanceOf[AbstractModule[Activity, Activity, T]])
Variable(o.inputs(x.node))
}
/**
* Sum of the values in a variable, alongside the specified axis.
* @param x A variable.
* @param axis axis to compute the mean. 0-based indexed.
* @param keepDims A boolean, whether to keep the dimensions or not.
* If `keepDims` is `False`, the rank of the variable is reduced
* by 1. If `keepDims` is `True`,
* the reduced dimensions are retained with length 1.
* @return A variable with the mean of elements of `x`.
*/
def sum[T: ClassTag](x: Variable[T], axis: Int = 0, keepDims: Boolean = false)(
implicit ev: TensorNumeric[T]): Variable[T] = {
val o: KerasLayer[Activity, Activity, T] =
new KerasLayerWrapper[T](bnn.Sum[T](dimension = normalizeAxis(axis) + 1,
squeeze = !keepDims).asInstanceOf[AbstractModule[Activity, Activity, T]])
Variable(o.inputs(x.node))
}
/**
* Element-wise value clipping.
* @param x A variable.
* @param min Double.
* @param max Double.
* @return A variable.
*/
def clip[T: ClassTag](x: Variable[T], min: Double, max: Double)(
implicit ev: TensorNumeric[T]): Variable[T] = {
val o: KerasLayer[Activity, Activity, T] =
new KerasLayerWrapper[T](
bnn.HardTanh[T](minValue = min,
maxValue = max).asInstanceOf[AbstractModule[Activity, Activity, T]])
Variable(o.inputs(x.node))
}
/**
* Element-wise square.
* @param x A variable.
* @return A variable.
*/
def square[T: ClassTag](x: Variable[T])(
implicit ev: TensorNumeric[T]): Variable[T] = {
Variable(Square[T]().inputs(x.node))
}
/**
* Element-wise square root.
* @param x A variable.
* @return A variable.
*/
def sqrt[T: ClassTag](x: Variable[T])(
implicit ev: TensorNumeric[T]): Variable[T] = {
Variable(Sqrt[T]().inputs(x.node))
}
/**
* Element-wise maximum of two variables
* @param x A variable.
* @param y A variable.
* @return A variable.
*/
def maximum[T: ClassTag](x: Variable[T], y: Variable[T])(
implicit ev: TensorNumeric[T]): Variable[T] = {
val o: KerasLayer[Activity, Activity, T] =
new KerasLayerWrapper[T](
bnn.CMaxTable[T]().asInstanceOf[AbstractModule[Activity, Activity, T]])
Variable(o.inputs(x.node, y.node))
}
/**
* Element-wise maximum of two variables
* @param x A variable.
* @param y Double
* @return A variable.
*/
def maximum[T: ClassTag](x: Variable[T], y: Double)(
implicit ev: TensorNumeric[T]): Variable[T] = {
clip(x, min = y, max = Double.MaxValue)
}
/**
* Mean of a tensor, alongside the specified axis.
* @param axis axis to compute the mean. 0-based indexed.
* @param keepDims A boolean, whether to keep the dimensions or not.
*If `keepDims` is `False`, the rank of the tensor is reduced
*by 1. If `keepDims` is `True`,
*the reduced dimensions are retained with length 1.
* @return
* A tensor with the mean of elements of `x`.
*/
def mean[T: ClassTag](x: Variable[T], axis: Int = 0, keepDims: Boolean = false)(
implicit ev: TensorNumeric[T]): Variable[T] = {
val o: KerasLayer[Activity, Activity, T] =
new KerasLayerWrapper[T](bnn.Mean[T](dimension = normalizeAxis(axis) + 1,
squeeze = !keepDims).asInstanceOf[AbstractModule[Activity, Activity, T]])
Variable(o.inputs(x.node))
}
/**
* Element-wise log.
* @param x A variable.
* @return A variable.
*/
def log[T: ClassTag](x: Variable[T])(
implicit ev: TensorNumeric[T]): Variable[T] = {
Variable(Log[T]().inputs(x.node))
}
/**
* Define the value of epsilon.
* @return A value of type Double.
*/
def epsilon[T: ClassTag]()(
implicit ev: TensorNumeric[T]): Double = {
EPSILON
}
/**
* Element-wise exponential.
* @param x A variable.
* @return A variable.
*/
def exp[T: ClassTag](x: Variable[T])(
implicit ev: TensorNumeric[T]): Variable[T] = {
Variable(Exp[T]().inputs(x.node))
}
/**
* Element-wise exponentiation.
* @param x A variable.
* @param a Double.
* @return A variable.
*/
def pow[T: ClassTag](x: Variable[T], a: Double)(
implicit ev: TensorNumeric[T]): Variable[T] = {
Variable(Power[T](a).inputs(x.node))
}
/**
* Softsign of a variable.
* @param x A variable.
* @return A variable.
*/
def softsign[T: ClassTag](x: Variable[T])(
implicit ev: TensorNumeric[T]): Variable[T] = {
val o: KerasLayer[Activity, Activity, T] =
new KerasLayerWrapper(bnn.SoftSign[T]().asInstanceOf[AbstractModule[Activity, Activity, T]])
Variable(o.inputs(x.node))
}
/**
* Softplus of a variable.
* @param x A variable.
* @return A variable.
*/
def softplus[T: ClassTag](x: Variable[T])(
implicit ev: TensorNumeric[T]): Variable[T] = {
val o: KerasLayer[Activity, Activity, T] =
new KerasLayerWrapper(bnn.SoftPlus[T]().asInstanceOf[AbstractModule[Activity, Activity, T]])
Variable(o.inputs(x.node))
}
/**
* Stacks a list of rank `R` tensors into a rank `R+1` tensor.
* @param inputs: List of variables (tensors).
* @param axis axis along which to perform stacking.
*/
def stack[T: ClassTag](inputs: List[Variable[T]], axis: Int = 1)(
implicit ev: TensorNumeric[T]): Variable[T] = {
val stacked = Variable(Merge.merge[T](inputs.map(expandDims(_, axis).node), mode = "concat",
concatAxis = axis))
contiguous(stacked)
}
/**
* Adds a 1-sized dimension at index "axis".
* The axis is 0 based and if you set the axis to 0, you would change the batch dim.
* @param axis Position where to add a new axis.
*/
def expandDims[T: ClassTag](x: Variable[T], axis: Int)(
implicit ev: TensorNumeric[T]): Variable[T] = {
val layer = ExpandDim(axis).asInstanceOf[AbstractModule[Activity, Activity, T]]
val expanded = Variable(layer.inputs(x.node))
contiguous(expanded)
}
/**
* Turn the output and grad to be contiguous for the input Variable
*/
def contiguous[T: ClassTag](input: Variable[T])(implicit ev: TensorNumeric[T]): Variable[T] = {
val contiguousNode = new KerasLayerWrapper(
bnn.Contiguous[T]().asInstanceOf[AbstractModule[Activity, Activity, T]]).inputs(input.node)
Variable(contiguousNode)
}
/**
* Module to perform matrix multiplication on two mini-batch inputs,
* producing a mini-batch.
*
* @param x A variable.
* @param y A variable.
* @param axes Axes along which to perform multiplication.
*/
def mm[T: ClassTag](
x: Variable[T],
y: Variable[T],
axes: List[Int] = null)(implicit ev: TensorNumeric[T]): Variable[T] = {
require(x.getOutputShape().isInstanceOf[SingleShape], "Only accept single shape")
require(y.getOutputShape().isInstanceOf[SingleShape], "Only accept single shape")
var xx = x
var yy = y
var yShape = yy.getOutputShape().toSingle()
var xShape = xx.getOutputShape().toSingle()
if (yShape.size > xShape.size) {
xx = AutoGrad.expandDims(x, 0)
} else if (yShape.size < xShape.size) {
yy = AutoGrad.expandDims(y, 0)
}
xShape = xx.getOutputShape().toSingle()
yShape = yy.getOutputShape().toSingle()
var transposeX = false
var transposeY = false
var left = 0
var right = 0
if (xShape.length == 2 && yShape.length == 2) {
left = 0
right = 1
} else if (xShape.length == 3 && yShape.length == 3) {
left = 1
right = 2
} else if (xShape.length == 4 && yShape.length == 4) {
left = 2
right = 3
} else if (xShape.length > 4 && yShape.length > 4) {
throw new IllegalArgumentException(s"Only support 2D/3D/4D input for now," +
s"but got [${xShape.mkString(",")}] and [${xShape.mkString(",")}]")
}
if (axes != null) {
require(axes.length == 2, s"axes.length should be 2, but got: ${axes.length}")
require(axes(0) >= left && axes(0) <= right,
s"axes should between [$left, $right], not ${axes(0)}")
require(axes(1) >= left && axes(1) <= right,
s"axes should between [$left, $right], not ${axes(1)}")
transposeX = if (axes(0) != xShape.length - 1) {true} else {false}
transposeY = if (axes(1) == yShape.length - 1) {true} else {false}
}
val mm = InternalMM[T](transA = transposeX,
transB = transposeY)
val kmm = new KerasLayerWrapper[T](mm.asInstanceOf[AbstractModule[Activity, Activity, T]])
if (xShape.length > 3 || yShape.length > 3) {
TimeDistributed(kmm.asInstanceOf[KerasLayer[Activity, Tensor[T], T]]).from(xx, yy)
} else kmm.from(xx, yy)
}
/**
* Normalizes a tensor wrt the L2 norm alongside the specified axis.
*
* @param x A variable.
* @param axis Axis along which to perform multiplication.
*/
def l2Normalize[T: ClassTag](x: Variable[T], axis: Int)
(implicit ev: TensorNumeric[T]): Variable[T] = {
val l2Normalize = x / sqrt(maximum(sum(x * x, axis, keepDims = true), epsilon()))
l2Normalize
}
/**
* Operator that computes a dot product between samples in two tensors.
*
* @param x A variable.
* @param y A variable.
* @param axes Axes along which to perform multiplication.
* @param normalize Whether to L2-normalize samples along the
* dot product axis before taking the dot product.
* If set to True, then the output of the dot product
* is the cosine proximity between the two samples.
*/
def batchDot[T: ClassTag](x: Variable[T], y: Variable[T],
axes: List[Int], normalize: Boolean = false)
(implicit ev: TensorNumeric[T]): Variable[T] = {
val xShape = x.getOutputShape().toSingle().toArray
if (!normalize) {
require(xShape.length == 2 || xShape.length == 3,
s"Only support 2D and 3D for now, but got: ${xShape.length}")
if (xShape.length == 2) {
sum(x*y, axis = 1, keepDims = true)
} else {
mm(x, y, axes)
}
} else {
val l2_x = l2Normalize(x, axes(0))
val l2_y = l2Normalize(y, axes(1))
batchDot(l2_x, l2_y, axes = axes)
}
}
/**
* Computes the error function(Gauss error function) of each element.
* @param x A variable.
* @return A variable.
*/
def erf[T: ClassTag](x: Variable[T])(
implicit ev: TensorNumeric[T]): Variable[T] = {
Variable(new KerasLayerWrapper[T](new InternalERF[T]()
.asInstanceOf[AbstractModule[Activity, Activity, T]]).inputs(x.node))
}
}
object Variable extends {
private[bigdl] def apply[T: ClassTag](node: ModuleNode[T])(
implicit ev: TensorNumeric[T]) = {
new Variable[T](node)
}
def apply[T: ClassTag](inputShape: Shape)(
implicit ev: TensorNumeric[T]): Variable[T] = {
new Variable[T](Input(inputShape))
}
}
class Variable[T: ClassTag] private[bigdl] (private[bigdl] var node: ModuleNode[T],
var name: String = null)(
implicit ev: TensorNumeric[T]) extends Serializable {
if (node != null) {
if (name == null) {
name = node.element.getName()
} else {
node.element.setName(name)
}
require(node.element.isInstanceOf[KerasLayer[Activity, Activity, T]])
require(node.element.asInstanceOf[InferShape].getOutputShape() != null)
}
private[bigdl] def getRoots(): Array[ModuleNode[T]] = {
val dfs = this.node.graph(reverse = true).DFS.toList.reverse
val roots = dfs.filter(_.prevNodes.size == 0).toArray[ModuleNode[T]]
roots
}
private[bigdl] def toGraph(inputs: Array[Variable[T]]): Model[T] = {
Model(input = inputs.map(_.node), output = this.node)
}
// "tensorboard --logdir path" to visualize this Variable
private[bigdl] def toTensorBoard(path: String) = {
def toGraph(): Model[T] = {
val dfs = this.node.graph(reverse = true).DFS.toList.reverse
val roots = dfs.filter(_.prevNodes.size == 0).toArray
Model(input = roots, output = this.node)
}
val props = System.getProperties()
val tmp: Option[String] = if (props.contains("bigdl.localMode")) {
Some(props.getProperty("bigdl.localMode"))
} else {
None
}
props.setProperty("bigdl.localMode", "true")
Engine.init
toGraph().saveGraphTopology(path) // TODO: add saveGraphTopology
if (!tmp.isEmpty) {
props.setProperty("bigdl.localMode", tmp.get)
} else {
props.remove("bigdl.localMode")
}
}
// scalastyle:off
def +(a: Variable[T]): Variable[T] = {
val o =
new KerasLayerWrapper[T](new InternalCAddTable[T, T]().asInstanceOf[AbstractModule[Activity, Activity, T]])
val (x, y) = broadcast(this, a)
Variable(o.inputs(Array(x.node, y.node)))
}
def +(a: Double): Variable[T] = {
Variable(AddConstant[T](a).inputs(Array(this.node)))
}
def -(a: Variable[T]): Variable[T] = {
val o =
new KerasLayerWrapper[T](bnn.Negative[T]().asInstanceOf[AbstractModule[Activity, Activity, T]])
val neg = new Variable(o.inputs(a.node))
val (x, y) = broadcast(this, neg)
x + y
}
def -(a: Double): Variable[T] = {
Variable(AddConstant[T](-a).inputs(Array(this.node)))
}
def unary_-(): Variable[T] = {
val o =
new KerasLayerWrapper[T](bnn.Negative[T]().asInstanceOf[AbstractModule[Activity, Activity, T]])
Variable(o.inputs(this.node))
}
def *(a: Variable[T]): Variable[T] = {
val o =
new KerasLayerWrapper[T](InternalCMulTable[T]().asInstanceOf[AbstractModule[Activity, Activity, T]])
val (x, y) = broadcast(this, a)
Variable(o.inputs(Array(x.node, y.node)))
}
def *(a: Double): Variable[T] = {
Variable(MulConstant[T](a).inputs(Array(this.node)))
}
def /(other: Variable[T]): Variable[T] = {
val o =
new KerasLayerWrapper[T](bnn.CDivTable[T]().asInstanceOf[AbstractModule[Activity, Activity, T]])
val (x, y) = broadcast(this, other)
Variable(o.inputs(Array(x.node, y.node)))
}
def /(a: Double): Variable[T] = {
this * (1/a)
}
/**
* Delete the singleton dimension(s).
* The batch dimension needs to be unchanged.
* For example, if input has size (2, 1, 3, 4, 1):
* Squeeze(dim = 1) will give output size (2, 3, 4, 1)
* Squeeze(dims = null) will give output size (2, 3, 4)
*/
def squeeze(dim: Int): Variable[T] = {
val dims = new Array[Int](1)
dims(0) = dim
squeeze(dims)
}
def squeeze(dims: Array[Int]): Variable[T] = {
val blayer = if (dims == null){
com.intel.analytics.bigdl.dllib.nn.Squeeze[T](null, batchMode = false)
} else {
com.intel.analytics.bigdl.dllib.nn.Squeeze[T](dims.map(x => x + 1), batchMode = false)
}
val klayer = new KerasLayerWrapper[T](blayer)
Variable(klayer.inputs(this.node))
}
/**
* Same as Narrow in torch.
* Slice the input with the number of dimensions not being reduced.
* The batch dimension needs to be unchanged.
* For example, if input is:
* 1 2 3
* 4 5 6
* slice(1, 1, 2) will give output
* 2 3
* 5 6
* slice(1, 2, -1) will give output
* 3
* 6
* @param dim The dimension to narrow. 0-based index. Cannot narrow the batch dimension.
* -1 means the last dimension of the input.
* @param startIndex Non-negative integer. The start index on the given dimension. 0-based index.
* @param length The length to be sliced. Default is 1.
*/
def slice(dim: Int, startIndex: Int, length: Int): Variable[T] = {
val layer = Narrow[T](dim = dim,
offset = startIndex,
length = length)
Variable(layer.inputs(this.node))
}
/**
* Select an index of the input in the given dim and return the subset part.
* The batch dimension needs to be unchanged.
* The selected dim would be remove after this operation.
* For example, if input is:
* 1 2 3
* 4 5 6
* Select(1, 1) will give output [2 5]
* Select(1, -1) will give output [3 6]
*
* @param dim The dimension to select. 0-based index. Cannot select the batch dimension.
* -1 means the last dimension of the input.
* @param index The index of the dimension to be selected. 0-based index.
* -1 means the last dimension of the input.
*/
def indexSelect(dim: Int, index: Int): Variable[T] = {
val layer = Select[T](dim = dim,
index = index)
Variable(layer.inputs(this.node))
}
private[bigdl] def broadcast(x: Variable[T], y: Variable[T]): (Variable[T], Variable[T]) = {
var xx = x
var yy = y
var yShape = yy.getOutputShape().toSingle()
var xShape = xx.getOutputShape().toSingle()
if (yShape.size > xShape.size) {
xx = AutoGrad.expandDims(xx, 0)
xShape = xx.getOutputShape().toSingle()
} else if (yShape.size < xShape.size) {
yy = AutoGrad.expandDims(yy, 0)
yShape = yy.getOutputShape().toSingle()
}
require(xShape.size == yShape.size,
s"The two variables should have the same dims," +
s"but got: ${x.getOutputShape().toSingle().mkString(",")}" +
s"and ${y.getOutputShape().toSingle().mkString(",")}")
// Ignore the batch dim
val xElements = xShape.drop(1).reduceLeft(_+_)
val yElements = yShape.drop(1).reduceLeft(_+_)
// should not expand batch dim here as it's -1
if (xElements < yElements) {
xx = xx.expand(yShape)
} else if (xElements > yElements) {
yy = yy.expand(xShape)
}
(xx, yy)
}
// scalastyle:on
def replicate(axis: Int, times: Int): Variable[T] = {
val o =
new KerasLayerWrapper[T](
bnn.Replicate[T](dim = axis + 1,
nFeatures = times).asInstanceOf[AbstractModule[Activity, Activity, T]])
Variable(o.inputs(this.node))
}
/**
* Expand variable to configured size
* @param sizes target variable sizes, dim whose size is -1 will be ignored
*/
def expand(sizes: List[Int]): Variable[T] = {
val o =
new KerasLayerWrapper[T](
InternalExpand[T](sizes.toArray).asInstanceOf[AbstractModule[Activity, Activity, T]])
Variable(o.inputs(this.node))
}
def getOutputShape(): Shape = {
this.node.element.getOutputShape()
}
def getInputShape(): Shape = {
this.node.element.getInputShape()
}
private[bigdl] def getDummyTensor(fillValue: T, batchSize: Int): Tensor[T] = {
Tensor[T](getInputShape().copyAndUpdate(0, batchSize).toSingle().toArray).fill(fillValue)
}
} | intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/keras/autograd/math.scala | Scala | apache-2.0 | 20,405 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import java.{lang => jl}
import java.io.ObjectInputStream
import java.util.{ArrayList, Collections}
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.atomic.AtomicLong
import scala.collection.JavaConverters._
import org.apache.spark.{InternalAccumulator, SparkContext, TaskContext}
import org.apache.spark.scheduler.AccumulableInfo
private[spark] case class AccumulatorMetadata(
id: Long,
name: Option[String],
countFailedValues: Boolean) extends Serializable
/**
* The base class for accumulators, that can accumulate inputs of type `IN`, and produce output of
* type `OUT`.
*
* `OUT` should be a type that can be read atomically (e.g., Int, Long), or thread-safely
* (e.g., synchronized collections) because it will be read from other threads.
*/
abstract class AccumulatorV2[IN, OUT] extends Serializable {
private[spark] var metadata: AccumulatorMetadata = _
private[this] var atDriverSide = true
private[spark] def register(
sc: SparkContext,
name: Option[String] = None,
countFailedValues: Boolean = false): Unit = {
if (this.metadata != null) {
throw new IllegalStateException("Cannot register an Accumulator twice.")
}
this.metadata = AccumulatorMetadata(AccumulatorContext.newId(), name, countFailedValues)
AccumulatorContext.register(this)
sc.cleaner.foreach(_.registerAccumulatorForCleanup(this))
}
/**
* Returns true if this accumulator has been registered.
*
* @note All accumulators must be registered before use, or it will throw exception.
*/
final def isRegistered: Boolean =
metadata != null && AccumulatorContext.get(metadata.id).isDefined
private def assertMetadataNotNull(): Unit = {
if (metadata == null) {
throw new IllegalAccessError("The metadata of this accumulator has not been assigned yet.")
}
}
/**
* Returns the id of this accumulator, can only be called after registration.
*/
final def id: Long = {
assertMetadataNotNull()
metadata.id
}
/**
* Returns the name of this accumulator, can only be called after registration.
*/
final def name: Option[String] = {
assertMetadataNotNull()
metadata.name
}
/**
* Whether to accumulate values from failed tasks. This is set to true for system and time
* metrics like serialization time or bytes spilled, and false for things with absolute values
* like number of input rows. This should be used for internal metrics only.
*/
private[spark] final def countFailedValues: Boolean = {
assertMetadataNotNull()
metadata.countFailedValues
}
/**
* Creates an [[AccumulableInfo]] representation of this [[AccumulatorV2]] with the provided
* values.
*/
private[spark] def toInfo(update: Option[Any], value: Option[Any]): AccumulableInfo = {
val isInternal = name.exists(_.startsWith(InternalAccumulator.METRICS_PREFIX))
new AccumulableInfo(id, name, update, value, isInternal, countFailedValues)
}
final private[spark] def isAtDriverSide: Boolean = atDriverSide
/**
* Returns if this accumulator is zero value or not. e.g. for a counter accumulator, 0 is zero
* value; for a list accumulator, Nil is zero value.
*/
def isZero: Boolean
/**
* Creates a new copy of this accumulator, which is zero value. i.e. call `isZero` on the copy
* must return true.
*/
def copyAndReset(): AccumulatorV2[IN, OUT] = {
val copyAcc = copy()
copyAcc.reset()
copyAcc
}
/**
* Creates a new copy of this accumulator.
*/
def copy(): AccumulatorV2[IN, OUT]
/**
* Resets this accumulator, which is zero value. i.e. call `isZero` must
* return true.
*/
def reset(): Unit
/**
* Takes the inputs and accumulates.
*/
def add(v: IN): Unit
/**
* Merges another same-type accumulator into this one and update its state, i.e. this should be
* merge-in-place.
*/
def merge(other: AccumulatorV2[IN, OUT]): Unit
/**
* Defines the current value of this accumulator
*/
def value: OUT
// Called by Java when serializing an object
final protected def writeReplace(): Any = {
if (atDriverSide) {
if (!isRegistered) {
throw new UnsupportedOperationException(
"Accumulator must be registered before send to executor")
}
val copyAcc = copyAndReset()
assert(copyAcc.isZero, "copyAndReset must return a zero value copy")
copyAcc.metadata = metadata
copyAcc
} else {
this
}
}
// Called by Java when deserializing an object
private def readObject(in: ObjectInputStream): Unit = Utils.tryOrIOException {
in.defaultReadObject()
if (atDriverSide) {
atDriverSide = false
// Automatically register the accumulator when it is deserialized with the task closure.
// This is for external accumulators and internal ones that do not represent task level
// metrics, e.g. internal SQL metrics, which are per-operator.
val taskContext = TaskContext.get()
if (taskContext != null) {
taskContext.registerAccumulator(this)
}
} else {
atDriverSide = true
}
}
override def toString: String = {
if (metadata == null) {
"Un-registered Accumulator: " + getClass.getSimpleName
} else {
getClass.getSimpleName + s"(id: $id, name: $name, value: $value)"
}
}
}
/**
* An internal class used to track accumulators by Spark itself.
*/
private[spark] object AccumulatorContext {
/**
* This global map holds the original accumulator objects that are created on the driver.
* It keeps weak references to these objects so that accumulators can be garbage-collected
* once the RDDs and user-code that reference them are cleaned up.
* TODO: Don't use a global map; these should be tied to a SparkContext (SPARK-13051).
*/
private val originals = new ConcurrentHashMap[Long, jl.ref.WeakReference[AccumulatorV2[_, _]]]
private[this] val nextId = new AtomicLong(0L)
/**
* Returns a globally unique ID for a new [[AccumulatorV2]].
* Note: Once you copy the [[AccumulatorV2]] the ID is no longer unique.
*/
def newId(): Long = nextId.getAndIncrement
/** Returns the number of accumulators registered. Used in testing. */
def numAccums: Int = originals.size
/**
* Registers an [[AccumulatorV2]] created on the driver such that it can be used on the executors.
*
* All accumulators registered here can later be used as a container for accumulating partial
* values across multiple tasks. This is what `org.apache.spark.scheduler.DAGScheduler` does.
* Note: if an accumulator is registered here, it should also be registered with the active
* context cleaner for cleanup so as to avoid memory leaks.
*
* If an [[AccumulatorV2]] with the same ID was already registered, this does nothing instead
* of overwriting it. We will never register same accumulator twice, this is just a sanity check.
*/
def register(a: AccumulatorV2[_, _]): Unit = {
originals.putIfAbsent(a.id, new jl.ref.WeakReference[AccumulatorV2[_, _]](a))
}
/**
* Unregisters the [[AccumulatorV2]] with the given ID, if any.
*/
def remove(id: Long): Unit = {
originals.remove(id)
}
/**
* Returns the [[AccumulatorV2]] registered with the given ID, if any.
*/
def get(id: Long): Option[AccumulatorV2[_, _]] = {
Option(originals.get(id)).map { ref =>
// Since we are storing weak references, we must check whether the underlying data is valid.
val acc = ref.get
if (acc eq null) {
throw new IllegalAccessError(s"Attempted to access garbage collected accumulator $id")
}
acc
}
}
/**
* Clears all registered [[AccumulatorV2]]s. For testing only.
*/
def clear(): Unit = {
originals.clear()
}
/**
* Looks for a registered accumulator by accumulator name.
*/
private[spark] def lookForAccumulatorByName(name: String): Option[AccumulatorV2[_, _]] = {
originals.values().asScala.find { ref =>
val acc = ref.get
acc != null && acc.name.isDefined && acc.name.get == name
}.map(_.get)
}
// Identifier for distinguishing SQL metrics from other accumulators
private[spark] val SQL_ACCUM_IDENTIFIER = "sql"
}
/**
* An [[AccumulatorV2 accumulator]] for computing sum, count, and average of 64-bit integers.
*
* @since 2.0.0
*/
class LongAccumulator extends AccumulatorV2[jl.Long, jl.Long] {
private var _sum = 0L
private var _count = 0L
/**
* Adds v to the accumulator, i.e. increment sum by v and count by 1.
* @since 2.0.0
*/
override def isZero: Boolean = _sum == 0L && _count == 0
override def copy(): LongAccumulator = {
val newAcc = new LongAccumulator
newAcc._count = this._count
newAcc._sum = this._sum
newAcc
}
override def reset(): Unit = {
_sum = 0L
_count = 0L
}
/**
* Adds v to the accumulator, i.e. increment sum by v and count by 1.
* @since 2.0.0
*/
override def add(v: jl.Long): Unit = {
_sum += v
_count += 1
}
/**
* Adds v to the accumulator, i.e. increment sum by v and count by 1.
* @since 2.0.0
*/
def add(v: Long): Unit = {
_sum += v
_count += 1
}
/**
* Returns the number of elements added to the accumulator.
* @since 2.0.0
*/
def count: Long = _count
/**
* Returns the sum of elements added to the accumulator.
* @since 2.0.0
*/
def sum: Long = _sum
/**
* Returns the average of elements added to the accumulator.
* @since 2.0.0
*/
def avg: Double = _sum.toDouble / _count
override def merge(other: AccumulatorV2[jl.Long, jl.Long]): Unit = other match {
case o: LongAccumulator =>
_sum += o.sum
_count += o.count
case _ =>
throw new UnsupportedOperationException(
s"Cannot merge ${this.getClass.getName} with ${other.getClass.getName}")
}
private[spark] def setValue(newValue: Long): Unit = _sum = newValue
override def value: jl.Long = _sum
}
/**
* An [[AccumulatorV2 accumulator]] for computing sum, count, and averages for double precision
* floating numbers.
*
* @since 2.0.0
*/
class DoubleAccumulator extends AccumulatorV2[jl.Double, jl.Double] {
private var _sum = 0.0
private var _count = 0L
override def isZero: Boolean = _sum == 0.0 && _count == 0
override def copy(): DoubleAccumulator = {
val newAcc = new DoubleAccumulator
newAcc._count = this._count
newAcc._sum = this._sum
newAcc
}
override def reset(): Unit = {
_sum = 0.0
_count = 0L
}
/**
* Adds v to the accumulator, i.e. increment sum by v and count by 1.
* @since 2.0.0
*/
override def add(v: jl.Double): Unit = {
_sum += v
_count += 1
}
/**
* Adds v to the accumulator, i.e. increment sum by v and count by 1.
* @since 2.0.0
*/
def add(v: Double): Unit = {
_sum += v
_count += 1
}
/**
* Returns the number of elements added to the accumulator.
* @since 2.0.0
*/
def count: Long = _count
/**
* Returns the sum of elements added to the accumulator.
* @since 2.0.0
*/
def sum: Double = _sum
/**
* Returns the average of elements added to the accumulator.
* @since 2.0.0
*/
def avg: Double = _sum / _count
override def merge(other: AccumulatorV2[jl.Double, jl.Double]): Unit = other match {
case o: DoubleAccumulator =>
_sum += o.sum
_count += o.count
case _ =>
throw new UnsupportedOperationException(
s"Cannot merge ${this.getClass.getName} with ${other.getClass.getName}")
}
private[spark] def setValue(newValue: Double): Unit = _sum = newValue
override def value: jl.Double = _sum
}
/**
* An [[AccumulatorV2 accumulator]] for collecting a list of elements.
*
* @since 2.0.0
*/
class CollectionAccumulator[T] extends AccumulatorV2[T, java.util.List[T]] {
private val _list: java.util.List[T] = Collections.synchronizedList(new ArrayList[T]())
override def isZero: Boolean = _list.isEmpty
override def copyAndReset(): CollectionAccumulator[T] = new CollectionAccumulator
override def copy(): CollectionAccumulator[T] = {
val newAcc = new CollectionAccumulator[T]
_list.synchronized {
newAcc._list.addAll(_list)
}
newAcc
}
override def reset(): Unit = _list.clear()
override def add(v: T): Unit = _list.add(v)
override def merge(other: AccumulatorV2[T, java.util.List[T]]): Unit = other match {
case o: CollectionAccumulator[T] => _list.addAll(o.value)
case _ => throw new UnsupportedOperationException(
s"Cannot merge ${this.getClass.getName} with ${other.getClass.getName}")
}
override def value: java.util.List[T] = _list.synchronized {
java.util.Collections.unmodifiableList(new ArrayList[T](_list))
}
private[spark] def setValue(newValue: java.util.List[T]): Unit = {
_list.clear()
_list.addAll(newValue)
}
}
class LegacyAccumulatorWrapper[R, T](
initialValue: R,
param: org.apache.spark.AccumulableParam[R, T]) extends AccumulatorV2[T, R] {
private[spark] var _value = initialValue // Current value on driver
override def isZero: Boolean = _value == param.zero(initialValue)
override def copy(): LegacyAccumulatorWrapper[R, T] = {
val acc = new LegacyAccumulatorWrapper(initialValue, param)
acc._value = _value
acc
}
override def reset(): Unit = {
_value = param.zero(initialValue)
}
override def add(v: T): Unit = _value = param.addAccumulator(_value, v)
override def merge(other: AccumulatorV2[T, R]): Unit = other match {
case o: LegacyAccumulatorWrapper[R, T] => _value = param.addInPlace(_value, o.value)
case _ => throw new UnsupportedOperationException(
s"Cannot merge ${this.getClass.getName} with ${other.getClass.getName}")
}
override def value: R = _value
}
| JerryLead/spark | core/src/main/scala/org/apache/spark/util/AccumulatorV2.scala | Scala | apache-2.0 | 14,704 |
package org.littlewings.infinispan.query
import org.infinispan.query.Search
import org.infinispan.query.dsl.{ Query, SortOrder }
import org.scalatest.FunSpec
import org.scalatest.Matchers._
class IndexLessQueryDslSpec extends FunSpec with InfinispanClusteredSpecSupport {
private def sourceBooks: Seq[Book] =
Array(
Book("978-4798042169",
"わかりやすいJavaEEウェブシステム入門",
3456,
"JavaEE7準拠。ショッピングサイトや業務システムで使われるJavaEE学習書の決定版!"),
Book("978-4798124605",
"Beginning Java EE 6 GlassFish 3で始めるエンタープライズJava",
4410,
"エンタープライズJava入門書の決定版!Java EE 6は、大規模な情報システム構築に用いられるエンタープライズ環境向けのプログラミング言語です。"),
Book("978-4774127804",
"Apache Lucene 入門 ~Java・オープンソース・全文検索システムの構築",
3200,
"Luceneは全文検索システムを構築するためのJavaのライブラリです。Luceneを使えば,一味違う高機能なWebアプリケーションを作ることができます。"),
Book("978-4774161631",
"[改訂新版] Apache Solr入門 オープンソース全文検索エンジン",
3780,
"最新版Apaceh Solr Ver.4.5.1に対応するため大幅な書き直しと原稿の追加を行い、現在の開発環境に合わせて完全にアップデートしました。Apache Solrは多様なプログラミング言語に対応した全文検索エンジンです。"),
Book("978-4048662024",
"高速スケーラブル検索エンジン ElasticSearch Server",
3024,
"Apache Solrを超える全文検索エンジンとして注目を集めるElasticSearch Serverの日本初の解説書です。多くのサンプルを用いた実践的入門書になっています。"),
Book("978-1933988177",
"Lucene in Action",
6301,
"New edition of top-selling book on the new version of Lucene. the coreopen-source technology behind most full-text search and Intelligent Web applications.")
)
describe("Infinispan IndexLess Query DSL") {
it("Search") {
withCache[String, Book](3, "infinispan-query-dsl.xml", "indexLessCache") { cache =>
sourceBooks.foreach(b => cache.put(b.isbn, b))
val queryFactory = Search.getQueryFactory(cache)
val query: Query =
queryFactory
.from(classOf[Book])
.having("title")
.like("%Java%")
.and
.having("title")
.like("%全文検索%")
.toBuilder
.orderBy("price", SortOrder.ASC)
.build
query.getResultSize should be (1)
val books = query.list.asInstanceOf[java.util.List[Book]]
books should have size 1
books.get(0).title should be ("Apache Lucene 入門 ~Java・オープンソース・全文検索システムの構築")
}
}
}
}
| kazuhira-r/infinispan-getting-started | embedded-query/src/test/scala/org/littlewings/infinispan/query/IndexLessQueryDslSpec.scala | Scala | mit | 3,108 |
/*
* Copyright (c) 2011, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.queue.util
/**
* An importable object that provides automatic primitive to option conversion.
*/
object PrimitiveOptionConversions {
// Conversion from Option
implicit def byteOption2byte(x: Option[Byte]): Byte = x.get
implicit def shortOption2short(x: Option[Short]): Short = x.get
implicit def charOption2char(x: Option[Char]): Char = x.get
implicit def intOption2int(x: Option[Int]): Int = x.get
implicit def longOption2long(x: Option[Long]): Long = x.get
implicit def floatOption2float(x: Option[Float]): Float = x.get
implicit def doubleOption2double(x: Option[Double]): Double = x.get
implicit def booleanOption2boolean(x: Option[Boolean]): Boolean = x.get
// Conversion to Option
implicit def byte2byteOption(x: Byte): Option[Byte] = Some(x)
implicit def short2shortOption(x: Short): Option[Short] = Some(x)
implicit def char2charOption(x: Char): Option[Char] = Some(x)
implicit def int2intOption(x: Int): Option[Int] = Some(x)
implicit def long2longOption(x: Long): Option[Long] = Some(x)
implicit def float2floatOption(x: Float): Option[Float] = Some(x)
implicit def double2doubleOption(x: Double): Option[Double] = Some(x)
implicit def boolean2booleanOption(x: Boolean): Option[Boolean] = Some(x)
// Narrowing for constants to byte, short, and float
implicit def int2byteOption(x: Int): Option[Byte] = Some(x.toByte)
implicit def int2shortOption(x: Int): Option[Short] = Some(x.toShort)
implicit def double2floatOption(x: Float): Option[Float] = Some(x.toFloat)
// Widening
implicit def byte2shortOption(x: Byte): Option[Short] = Some(x.toShort)
implicit def byte2intOption(x: Byte): Option[Int] = Some(x.toInt)
implicit def byte2longOption(x: Byte): Option[Long] = Some(x.toLong)
implicit def byte2floatOption(x: Byte): Option[Float] = Some(x.toFloat)
implicit def byte2doubleOption(x: Byte): Option[Double] = Some(x.toDouble)
implicit def short2intOption(x: Short): Option[Int] = Some(x.toInt)
implicit def short2longOption(x: Short): Option[Long] = Some(x.toLong)
implicit def short2floatOption(x: Short): Option[Float] = Some(x.toFloat)
implicit def short2doubleOption(x: Short): Option[Double] = Some(x.toDouble)
implicit def char2intOption(x: Char): Option[Int] = Some(x.toInt)
implicit def char2longOption(x: Char): Option[Long] = Some(x.toLong)
implicit def char2floatOption(x: Char): Option[Float] = Some(x.toFloat)
implicit def char2doubleOption(x: Char): Option[Double] = Some(x.toDouble)
implicit def int2longOption(x: Int): Option[Long] = Some(x.toLong)
implicit def int2floatOption(x: Int): Option[Float] = Some(x.toFloat)
implicit def int2doubleOption(x: Int): Option[Double] = Some(x.toDouble)
implicit def long2floatOption(x: Long): Option[Float] = Some(x.toFloat)
implicit def long2doubleOption(x: Long): Option[Double] = Some(x.toDouble)
implicit def float2doubleOption(x: Float): Option[Double] = Some(x.toDouble)
}
/**
* A trait that exposes the above functions to all sub classes as well.
*/
trait PrimitiveOptionConversions {
// How to we import these implicit definitions into the trait so that they are seen by objects extending a trait?
// import PrimitiveOptionConversion._ inside of a trait does not seem to work?
// Declaring them in a trait like this does work but does not seem scala-ish.
implicit def byteOption2byte(x: Option[Byte]): Byte = PrimitiveOptionConversions.byteOption2byte(x)
implicit def shortOption2short(x: Option[Short]): Short = PrimitiveOptionConversions.shortOption2short(x)
implicit def charOption2char(x: Option[Char]): Char = PrimitiveOptionConversions.charOption2char(x)
implicit def intOption2int(x: Option[Int]): Int = PrimitiveOptionConversions.intOption2int(x)
implicit def longOption2long(x: Option[Long]): Long = PrimitiveOptionConversions.longOption2long(x)
implicit def floatOption2float(x: Option[Float]): Float = PrimitiveOptionConversions.floatOption2float(x)
implicit def doubleOption2double(x: Option[Double]): Double = PrimitiveOptionConversions.doubleOption2double(x)
implicit def booleanOption2boolean(x: Option[Boolean]): Boolean = PrimitiveOptionConversions.booleanOption2boolean(x)
implicit def byte2byteOption(x: Byte): Option[Byte] = PrimitiveOptionConversions.byte2byteOption(x)
implicit def short2shortOption(x: Short): Option[Short] = PrimitiveOptionConversions.short2shortOption(x)
implicit def char2charOption(x: Char): Option[Char] = PrimitiveOptionConversions.char2charOption(x)
implicit def int2intOption(x: Int): Option[Int] = PrimitiveOptionConversions.int2intOption(x)
implicit def long2longOption(x: Long): Option[Long] = PrimitiveOptionConversions.long2longOption(x)
implicit def float2floatOption(x: Float): Option[Float] = PrimitiveOptionConversions.float2floatOption(x)
implicit def double2doubleOption(x: Double): Option[Double] = PrimitiveOptionConversions.double2doubleOption(x)
implicit def boolean2booleanOption(x: Boolean): Option[Boolean] = PrimitiveOptionConversions.boolean2booleanOption(x)
implicit def int2byteOption(x: Int): Option[Byte] = PrimitiveOptionConversions.int2byteOption(x)
implicit def int2shortOption(x: Int): Option[Short] = PrimitiveOptionConversions.int2shortOption(x)
implicit def double2floatOption(x: Float): Option[Float] = PrimitiveOptionConversions.double2floatOption(x)
implicit def byte2shortOption(x: Byte): Option[Short] = PrimitiveOptionConversions.byte2shortOption(x)
implicit def byte2intOption(x: Byte): Option[Int] = PrimitiveOptionConversions.byte2intOption(x)
implicit def byte2longOption(x: Byte): Option[Long] = PrimitiveOptionConversions.byte2longOption(x)
implicit def byte2floatOption(x: Byte): Option[Float] = PrimitiveOptionConversions.byte2floatOption(x)
implicit def byte2doubleOption(x: Byte): Option[Double] = PrimitiveOptionConversions.byte2doubleOption(x)
implicit def short2intOption(x: Short): Option[Int] = PrimitiveOptionConversions.short2intOption(x)
implicit def short2longOption(x: Short): Option[Long] = PrimitiveOptionConversions.short2longOption(x)
implicit def short2floatOption(x: Short): Option[Float] = PrimitiveOptionConversions.short2floatOption(x)
implicit def short2doubleOption(x: Short): Option[Double] = PrimitiveOptionConversions.short2doubleOption(x)
implicit def char2intOption(x: Char): Option[Int] = PrimitiveOptionConversions.char2intOption(x)
implicit def char2longOption(x: Char): Option[Long] = PrimitiveOptionConversions.char2longOption(x)
implicit def char2floatOption(x: Char): Option[Float] = PrimitiveOptionConversions.char2floatOption(x)
implicit def char2doubleOption(x: Char): Option[Double] = PrimitiveOptionConversions.char2doubleOption(x)
implicit def int2longOption(x: Int): Option[Long] = PrimitiveOptionConversions.int2longOption(x)
implicit def int2floatOption(x: Int): Option[Float] = PrimitiveOptionConversions.int2floatOption(x)
implicit def int2doubleOption(x: Int): Option[Double] = PrimitiveOptionConversions.int2doubleOption(x)
implicit def long2floatOption(x: Long): Option[Float] = PrimitiveOptionConversions.long2floatOption(x)
implicit def long2doubleOption(x: Long): Option[Double] = PrimitiveOptionConversions.long2doubleOption(x)
implicit def float2doubleOption(x: Float): Option[Double] = PrimitiveOptionConversions.float2doubleOption(x)
}
| iontorrent/Torrent-Variant-Caller-stable | public/scala/src/org/broadinstitute/sting/queue/util/PrimitiveOptionConversions.scala | Scala | mit | 8,516 |
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.bigquery.client
import com.google.api.services.bigquery.model._
/** A BigQueryJob */
sealed trait BigQueryJob {
def jobReference: Option[JobReference]
def table: TableReference
def show: String =
s"${this.getClass.getName} id:${jobReference.map(_.getJobId).getOrElse("<unknown>")}"
}
/** Extract Job Container */
final private[scio] case class ExtractJob(
destinationUris: List[String],
jobReference: Option[JobReference],
table: TableReference
) extends BigQueryJob
/** Load Job Container */
final private[scio] case class LoadJob(
sources: List[String],
jobReference: Option[JobReference],
table: TableReference
) extends BigQueryJob
/** A query job that may delay execution. */
final private[scio] case class QueryJob(
query: String,
jobReference: Option[JobReference],
table: TableReference
) extends BigQueryJob
| spotify/scio | scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/client/Jobs.scala | Scala | apache-2.0 | 1,476 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import java.io.FileNotFoundException
import scala.collection.mutable
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs._
import org.apache.hadoop.mapred.{FileInputFormat, JobConf}
import org.apache.spark.SparkContext
import org.apache.spark.internal.Logging
import org.apache.spark.metrics.source.HiveCatalogMetrics
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.execution.streaming.FileStreamSink
import org.apache.spark.sql.types.StructType
import org.apache.spark.util.SerializableConfiguration
/**
* A [[FileIndex]] that generates the list of files to process by recursively listing all the
* files present in `paths`.
*
* @param rootPathsSpecified the list of root table paths to scan (some of which might be
* filtered out later)
* @param parameters as set of options to control discovery
* @param userSpecifiedSchema an optional user specified schema that will be use to provide
* types for the discovered partitions
*/
class InMemoryFileIndex(
sparkSession: SparkSession,
rootPathsSpecified: Seq[Path],
parameters: Map[String, String],
userSpecifiedSchema: Option[StructType],
fileStatusCache: FileStatusCache = NoopCache)
extends PartitioningAwareFileIndex(
sparkSession, parameters, userSpecifiedSchema, fileStatusCache) {
// Filter out streaming metadata dirs or files such as "/.../_spark_metadata" (the metadata dir)
// or "/.../_spark_metadata/0" (a file in the metadata dir). `rootPathsSpecified` might contain
// such streaming metadata dir or files, e.g. when after globbing "basePath/*" where "basePath"
// is the output of a streaming query.
override val rootPaths =
rootPathsSpecified.filterNot(FileStreamSink.ancestorIsMetadataDirectory(_, hadoopConf))
@volatile private var cachedLeafFiles: mutable.LinkedHashMap[Path, FileStatus] = _
@volatile private var cachedLeafDirToChildrenFiles: Map[Path, Array[FileStatus]] = _
@volatile private var cachedPartitionSpec: PartitionSpec = _
refresh0()
override def partitionSpec(): PartitionSpec = {
if (cachedPartitionSpec == null) {
cachedPartitionSpec = inferPartitioning()
}
logTrace(s"Partition spec: $cachedPartitionSpec")
cachedPartitionSpec
}
override protected def leafFiles: mutable.LinkedHashMap[Path, FileStatus] = {
cachedLeafFiles
}
override protected def leafDirToChildrenFiles: Map[Path, Array[FileStatus]] = {
cachedLeafDirToChildrenFiles
}
override def refresh(): Unit = {
fileStatusCache.invalidateAll()
refresh0()
}
private def refresh0(): Unit = {
val files = listLeafFiles(rootPaths)
cachedLeafFiles =
new mutable.LinkedHashMap[Path, FileStatus]() ++= files.map(f => f.getPath -> f)
cachedLeafDirToChildrenFiles = files.toArray.groupBy(_.getPath.getParent)
cachedPartitionSpec = null
}
override def equals(other: Any): Boolean = other match {
case hdfs: InMemoryFileIndex => rootPaths.toSet == hdfs.rootPaths.toSet
case _ => false
}
override def hashCode(): Int = rootPaths.toSet.hashCode()
/**
* List leaf files of given paths. This method will submit a Spark job to do parallel
* listing whenever there is a path having more files than the parallel partition discovery
* discovery threshold.
*
* This is publicly visible for testing.
*/
def listLeafFiles(paths: Seq[Path]): mutable.LinkedHashSet[FileStatus] = {
val output = mutable.LinkedHashSet[FileStatus]()
val pathsToFetch = mutable.ArrayBuffer[Path]()
for (path <- paths) {
fileStatusCache.getLeafFiles(path) match {
case Some(files) =>
HiveCatalogMetrics.incrementFileCacheHits(files.length)
output ++= files
case None =>
pathsToFetch += path
}
Unit // for some reasons scalac 2.12 needs this; return type doesn't matter
}
val filter = FileInputFormat.getInputPathFilter(new JobConf(hadoopConf, this.getClass))
val discovered = InMemoryFileIndex.bulkListLeafFiles(
pathsToFetch, hadoopConf, filter, sparkSession)
discovered.foreach { case (path, leafFiles) =>
HiveCatalogMetrics.incrementFilesDiscovered(leafFiles.size)
fileStatusCache.putLeafFiles(path, leafFiles.toArray)
output ++= leafFiles
}
output
}
}
object InMemoryFileIndex extends Logging {
/** A serializable variant of HDFS's BlockLocation. */
private case class SerializableBlockLocation(
names: Array[String],
hosts: Array[String],
offset: Long,
length: Long)
/** A serializable variant of HDFS's FileStatus. */
private case class SerializableFileStatus(
path: String,
length: Long,
isDir: Boolean,
blockReplication: Short,
blockSize: Long,
modificationTime: Long,
accessTime: Long,
blockLocations: Array[SerializableBlockLocation])
/**
* Lists a collection of paths recursively. Picks the listing strategy adaptively depending
* on the number of paths to list.
*
* This may only be called on the driver.
*
* @return for each input path, the set of discovered files for the path
*/
private def bulkListLeafFiles(
paths: Seq[Path],
hadoopConf: Configuration,
filter: PathFilter,
sparkSession: SparkSession): Seq[(Path, Seq[FileStatus])] = {
// Short-circuits parallel listing when serial listing is likely to be faster.
if (paths.size <= sparkSession.sessionState.conf.parallelPartitionDiscoveryThreshold) {
return paths.map { path =>
(path, listLeafFiles(path, hadoopConf, filter, Some(sparkSession)))
}
}
logInfo(s"Listing leaf files and directories in parallel under: ${paths.mkString(", ")}")
HiveCatalogMetrics.incrementParallelListingJobCount(1)
val sparkContext = sparkSession.sparkContext
val serializableConfiguration = new SerializableConfiguration(hadoopConf)
val serializedPaths = paths.map(_.toString)
val parallelPartitionDiscoveryParallelism =
sparkSession.sessionState.conf.parallelPartitionDiscoveryParallelism
// Set the number of parallelism to prevent following file listing from generating many tasks
// in case of large #defaultParallelism.
val numParallelism = Math.min(paths.size, parallelPartitionDiscoveryParallelism)
val previousJobDescription = sparkContext.getLocalProperty(SparkContext.SPARK_JOB_DESCRIPTION)
val statusMap = try {
val description = paths.size match {
case 0 =>
s"Listing leaf files and directories 0 paths"
case 1 =>
s"Listing leaf files and directories for 1 path:<br/>${paths(0)}"
case s =>
s"Listing leaf files and directories for $s paths:<br/>${paths(0)}, ..."
}
sparkContext.setJobDescription(description)
sparkContext
.parallelize(serializedPaths, numParallelism)
.mapPartitions { pathStrings =>
val hadoopConf = serializableConfiguration.value
pathStrings.map(new Path(_)).toSeq.map { path =>
(path, listLeafFiles(path, hadoopConf, filter, None))
}.iterator
}.map { case (path, statuses) =>
val serializableStatuses = statuses.map { status =>
// Turn FileStatus into SerializableFileStatus so we can send it back to the driver
val blockLocations = status match {
case f: LocatedFileStatus =>
f.getBlockLocations.map { loc =>
SerializableBlockLocation(
loc.getNames,
loc.getHosts,
loc.getOffset,
loc.getLength)
}
case _ =>
Array.empty[SerializableBlockLocation]
}
SerializableFileStatus(
status.getPath.toString,
status.getLen,
status.isDirectory,
status.getReplication,
status.getBlockSize,
status.getModificationTime,
status.getAccessTime,
blockLocations)
}
(path.toString, serializableStatuses)
}.collect()
} finally {
sparkContext.setJobDescription(previousJobDescription)
}
// turn SerializableFileStatus back to Status
statusMap.map { case (path, serializableStatuses) =>
val statuses = serializableStatuses.map { f =>
val blockLocations = f.blockLocations.map { loc =>
new BlockLocation(loc.names, loc.hosts, loc.offset, loc.length)
}
new LocatedFileStatus(
new FileStatus(
f.length, f.isDir, f.blockReplication, f.blockSize, f.modificationTime,
new Path(f.path)),
blockLocations)
}
(new Path(path), statuses)
}
}
/**
* Lists a single filesystem path recursively. If a SparkSession object is specified, this
* function may launch Spark jobs to parallelize listing.
*
* If sessionOpt is None, this may be called on executors.
*
* @return all children of path that match the specified filter.
*/
private def listLeafFiles(
path: Path,
hadoopConf: Configuration,
filter: PathFilter,
sessionOpt: Option[SparkSession]): Seq[FileStatus] = {
logTrace(s"Listing $path")
val fs = path.getFileSystem(hadoopConf)
// [SPARK-17599] Prevent InMemoryFileIndex from failing if path doesn't exist
// Note that statuses only include FileStatus for the files and dirs directly under path,
// and does not include anything else recursively.
val statuses = try fs.listStatus(path) catch {
case _: FileNotFoundException =>
logWarning(s"The directory $path was not found. Was it deleted very recently?")
Array.empty[FileStatus]
}
val filteredStatuses = statuses.filterNot(status => shouldFilterOut(status.getPath.getName))
val allLeafStatuses = {
val (dirs, topLevelFiles) = filteredStatuses.partition(_.isDirectory)
val nestedFiles: Seq[FileStatus] = sessionOpt match {
case Some(session) =>
bulkListLeafFiles(dirs.map(_.getPath), hadoopConf, filter, session).flatMap(_._2)
case _ =>
dirs.flatMap(dir => listLeafFiles(dir.getPath, hadoopConf, filter, sessionOpt))
}
val allFiles = topLevelFiles ++ nestedFiles
if (filter != null) allFiles.filter(f => filter.accept(f.getPath)) else allFiles
}
allLeafStatuses.filterNot(status => shouldFilterOut(status.getPath.getName)).map {
case f: LocatedFileStatus =>
f
// NOTE:
//
// - Although S3/S3A/S3N file system can be quite slow for remote file metadata
// operations, calling `getFileBlockLocations` does no harm here since these file system
// implementations don't actually issue RPC for this method.
//
// - Here we are calling `getFileBlockLocations` in a sequential manner, but it should not
// be a big deal since we always use to `listLeafFilesInParallel` when the number of
// paths exceeds threshold.
case f =>
// The other constructor of LocatedFileStatus will call FileStatus.getPermission(),
// which is very slow on some file system (RawLocalFileSystem, which is launch a
// subprocess and parse the stdout).
val locations = fs.getFileBlockLocations(f, 0, f.getLen)
val lfs = new LocatedFileStatus(f.getLen, f.isDirectory, f.getReplication, f.getBlockSize,
f.getModificationTime, 0, null, null, null, null, f.getPath, locations)
if (f.isSymlink) {
lfs.setSymlink(f.getSymlink)
}
lfs
}
}
/** Checks if we should filter out this path name. */
def shouldFilterOut(pathName: String): Boolean = {
// We filter follow paths:
// 1. everything that starts with _ and ., except _common_metadata and _metadata
// because Parquet needs to find those metadata files from leaf files returned by this method.
// We should refactor this logic to not mix metadata files with data files.
// 2. everything that ends with `._COPYING_`, because this is a intermediate state of file. we
// should skip this file in case of double reading.
val exclude = (pathName.startsWith("_") && !pathName.contains("=")) ||
pathName.startsWith(".") || pathName.endsWith("._COPYING_")
val include = pathName.startsWith("_common_metadata") || pathName.startsWith("_metadata")
exclude && !include
}
}
| szhem/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/InMemoryFileIndex.scala | Scala | apache-2.0 | 13,352 |
/*
* Copyright 2001-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import collection.mutable.ListBuffer
import scala.io.Source
import java.io.{File, FileWriter, BufferedWriter}
trait GenMustMatchersTestsBase {
def translateShouldToMustInTests(shouldLine: String): String = {
val temp1 = shouldLine.replaceAll("<code>must</code>", "<code>I_WAS_must_ORIGINALLY</code>")
val temp2 = temp1.replaceAll("<!-- PRESERVE -->should", " I_MUST_STAY_SHOULD")
val temp3 = temp2.replaceAll(
"<a href=\\"MustMatchers.html\\"><code>MustMatchers</code></a>",
"<a href=\\"I_WAS_Must_ORIGINALLYMatchers.html\\"><code>I_WAS_Must_ORIGINALLYMatchers</code></a>"
)
val temp4 = temp3.replaceAll("should", "must")
val temp5 = temp4.replaceAll("Should", "Must")
val temp6 = temp5.replaceAll("trait Matchers", "trait MustMatchers")
val temp7 = temp6.replaceAll("object Matchers extends Matchers", "object MustMatchers extends MustMatchers")
val temp8 = temp7.replaceAll("I_WAS_must_ORIGINALLY", "should")
val temp9 = temp8.replaceAll(" I_MUST_STAY_SHOULD", "should")
//val temp10 = temp9.replaceAll("import Matchers._", "import MustMatchers._")
//val temp11 = temp10.replaceAll("with Matchers", "with MustMatchers")
//val temp12 = temp11.replaceAll("Matchers.scala", "MustMatchers.scala")
temp9.replaceAll("I_WAS_Must_ORIGINALLY", "Should")
}
def genTestImpl(targetBaseDir: File, version: String, scalaVersion: String, scalaJS: Boolean): Seq[File] = {
val scalaJSSkipList =
List(
"ShouldBeAnSymbolSpec.scala", // skipped because depends on java reflections
"ShouldBeASymbolSpec.scala", // skipped because depends on java reflections.
"ShouldBeSymbolSpec.scala", // skipped because depends on java reflections.
"ShouldFileBePropertyMatcherSpec.scala", // skipped because depends on java.io.File
"ShouldLogicalMatcherExprSpec.scala", // skipped because depends on mockito
"ShouldSameInstanceAsSpec.scala" // skipped because identical string in js env is always the same instance.
)
val sourceBaseDir = new File("jvm/scalatest-test/src/test/scala/org/scalatest")
val matchersDir = new File(targetBaseDir, "matchers")
matchersDir.mkdirs()
def transformFile(shouldFile: File, mustFile: File) {
if (!mustFile.exists || shouldFile.lastModified > mustFile.lastModified) {
val writer = new BufferedWriter(new FileWriter(mustFile))
try {
val shouldLines = Source.fromFile(shouldFile).getLines().toList // for 2.8
var skipMode = false
for (shouldLine <- shouldLines) {
val mustLine: String =
if (scalaJS) {
if (shouldLine.trim == "// SKIP-SCALATESTJS,NATIVE-START") {
skipMode = true
""
}
else if (shouldLine.trim == "// SKIP-SCALATESTJS,NATIVE-END") {
skipMode = false
""
}
else if (!skipMode) {
if (shouldLine.trim.startsWith("//SCALATESTJS,NATIVE-ONLY "))
translateShouldToMustInTests(shouldLine.substring(shouldLine.indexOf("//SCALATESTJS,NATIVE-ONLY ") + 26))
else
translateShouldToMustInTests(shouldLine)
}
else
""
}
else
translateShouldToMustInTests(shouldLine)
writer.write(mustLine.toString)
writer.newLine() // add for 2.8
}
}
finally {
writer.flush()
writer.close()
println("Generated " + mustFile.getAbsolutePath)
}
}
}
// For those under org.scalatest
sourceBaseDir.listFiles flatMap { shouldFile =>
if (includeFile(shouldFile)) {
val shouldFileName = shouldFile.getName
if (!scalaJS || !scalaJSSkipList.contains(shouldFileName)) {
val mustFileName = shouldFileName.replace("Should", "Must")
val mustFile = new File(targetBaseDir, mustFileName)
transformFile(new File(sourceBaseDir, shouldFileName), mustFile)
Seq(mustFile)
}
else
Seq.empty[File]
}
else
Seq.empty[File]
}
}
def genTest(targetBaseDir: File, version: String, scalaVersion: String): Seq[File] = {
genTestImpl(targetBaseDir, version, scalaVersion, false)
}
def genTestForScalaJS(targetBaseDir: File, version: String, scalaVersion: String): Seq[File] = {
genTestImpl(targetBaseDir, version, scalaVersion, true)
}
def genTestForScalaNative(targetBaseDir: File, version: String, scalaVersion: String): Seq[File] = {
genTestImpl(targetBaseDir, version, scalaVersion, true)
}
def includeFile(file: File): Boolean
}
object GenMustMatchersTests extends GenMustMatchersTestsBase {
def includeFile(file: File): Boolean =
file.isFile &&
(file.getName.startsWith("Should") || file.getName.startsWith("ListShould") || file.getName.startsWith("EveryShould") || file.getName.startsWith("OptionShould"))
}
object GenMustMatchersTests1 extends GenMustMatchersTestsBase {
def includeFile(file: File): Boolean =
file.isFile &&
(file.getName.startsWith("Should") || file.getName.startsWith("ListShould") || file.getName.startsWith("EveryShould") || file.getName.startsWith("OptionShould")) &&
(file.getName.hashCode.abs % 4 == 0)
}
object GenMustMatchersTests2 extends GenMustMatchersTestsBase {
def includeFile(file: File): Boolean = {
file.isFile &&
(file.getName.startsWith("Should") || file.getName.startsWith("ListShould") || file.getName.startsWith("EveryShould") || file.getName.startsWith("OptionShould")) &&
(file.getName.hashCode.abs % 4 == 1)
}
}
object GenMustMatchersTests3 extends GenMustMatchersTestsBase {
def includeFile(file: File): Boolean = {
file.isFile &&
(file.getName.startsWith("Should") || file.getName.startsWith("ListShould") || file.getName.startsWith("EveryShould") || file.getName.startsWith("OptionShould")) &&
(file.getName.hashCode.abs % 4 == 2)
}
}
object GenMustMatchersTests4 extends GenMustMatchersTestsBase {
def includeFile(file: File): Boolean = {
file.isFile &&
(file.getName.startsWith("Should") || file.getName.startsWith("ListShould") || file.getName.startsWith("EveryShould") || file.getName.startsWith("OptionShould")) &&
(file.getName.hashCode.abs % 4 == 3)
}
} | scalatest/scalatest | project/GenMustMatchersTests.scala | Scala | apache-2.0 | 7,083 |
/**
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt.protocol.testing.codec
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
trait EndTestGroupErrorEventFormats { self: sjsonnew.BasicJsonProtocol =>
implicit lazy val EndTestGroupErrorEventFormat: JsonFormat[sbt.protocol.testing.EndTestGroupErrorEvent] = new JsonFormat[sbt.protocol.testing.EndTestGroupErrorEvent] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.protocol.testing.EndTestGroupErrorEvent = {
jsOpt match {
case Some(js) =>
unbuilder.beginObject(js)
val name = unbuilder.readField[String]("name")
val error = unbuilder.readField[String]("error")
unbuilder.endObject()
sbt.protocol.testing.EndTestGroupErrorEvent(name, error)
case None =>
deserializationError("Expected JsObject but found None")
}
}
override def write[J](obj: sbt.protocol.testing.EndTestGroupErrorEvent, builder: Builder[J]): Unit = {
builder.beginObject()
builder.addField("name", obj.name)
builder.addField("error", obj.error)
builder.endObject()
}
}
}
| Duhemm/sbt | testing/src/main/contraband-scala/sbt/protocol/testing/codec/EndTestGroupErrorEventFormats.scala | Scala | bsd-3-clause | 1,201 |
package com.imaginea.activegrid.core.models
import org.neo4j.graphdb.Node
import org.slf4j.LoggerFactory
/*
* Created by nagulmeeras on 25/10/16.
*/
case class Filter(override val id: Option[Long],
filterType: FilterType,
values: List[String]) extends BaseEntity
object Filter {
val filterLabelName = "Filter"
val logger = LoggerFactory.getLogger(getClass)
def fromNeo4jGraph(nodeId: Long): Option[Filter] = {
val mayBeNode = Neo4jRepository.findNodeById(nodeId)
mayBeNode match {
case Some(node) =>
if (Neo4jRepository.hasLabel(node, filterLabelName)) {
val map = Neo4jRepository.getProperties(node, "filterType", "values")
Some(Filter(Some(node.getId),
FilterType.toFilteType(map("filterType").asInstanceOf[String]),
map("values").asInstanceOf[Array[String]].toList))
} else {
None
}
case None => None
}
}
implicit class FilterImpl(filter: Filter) extends Neo4jRep[Filter] {
override def toNeo4jGraph(entity: Filter): Node = {
val map = Map("filterType" -> entity.filterType.toString, "values" -> entity.values.toArray)
Neo4jRepository.saveEntity[Filter](filterLabelName, entity.id, map)
}
override def fromNeo4jGraph(nodeId: Long): Option[Filter] = {
Filter.fromNeo4jGraph(nodeId)
}
}
}
| eklavya/activeGrid | src/main/scala/com/imaginea/activegrid/core/models/Filter.scala | Scala | apache-2.0 | 1,383 |
package im.actor.server.encryption
import akka.actor._
import akka.event.Logging
import akka.http.scaladsl.util.FastFuture
import cats.instances.all._
import cats.syntax.all._
import cats.data.{ Xor, XorT }
import im.actor.api.rpc.encryption._
import im.actor.cats.dbio._
import im.actor.server.db.DbExtension
import im.actor.server.model.encryption.{ EphermalPublicKey, EncryptionKeyGroup, EncryptionKeySignature, EncryptionKey }
import im.actor.server.persist.encryption.{ EphermalPublicKeyRepo, EncryptionKeyGroupRepo }
import im.actor.server.sequence.SeqUpdatesExtension
import im.actor.server.social.SocialExtension
import im.actor.util.misc.IdUtils
import im.actor.server.db.ActorPostgresDriver.api._
import scala.collection.immutable.Iterable
import scala.concurrent.Future
final class EncryptionExtension(system: ActorSystem) extends Extension {
import system.dispatcher
private val db = DbExtension(system).db
private val seqUpdExt = SeqUpdatesExtension(system)
private val socialExt = SocialExtension(system)
private val log = Logging(system, getClass)
def fetchKeyGroups(userId: Int): Future[Seq[EncryptionKeyGroup]] =
db.run(EncryptionKeyGroupRepo.fetch(userId))
def fetchApiKeyGroups(userId: Int): Future[Vector[ApiEncryptionKeyGroup]] = {
(for {
kgs ← XorT.right[Future, Exception, Seq[EncryptionKeyGroup]](fetchKeyGroups(userId))
apiKgs ← XorT.fromXor[Future](kgs.toVector.traverseU(toApi): Xor[Exception, Vector[ApiEncryptionKeyGroup]])
} yield apiKgs).value map (_.valueOr(throw _))
}
type KeysWithSignatures = (Seq[EncryptionKey], Seq[EncryptionKeySignature])
def fetchKeys(
userId: Int,
keyGroupId: Int,
keyIds: Set[Long]
): Future[KeysWithSignatures] = {
for {
kgOpt ← db.run(EncryptionKeyGroupRepo.find(userId, keyGroupId))
} yield {
kgOpt.map { kg ⇒
(kg.keys.filter(k ⇒ keyIds.contains(k.id)), kg.signatures.filter(ks ⇒ keyIds.contains(ks.keyId)))
}.getOrElse((Seq.empty, Seq.empty))
}
}
def fetchApiKeys(
userId: Int,
keyGroupId: Int,
keyIds: Set[Long]
): Future[(Vector[ApiEncryptionKey], Vector[ApiEncryptionKeySignature])] = {
val futureT =
for {
kss ← XorT.right[Future, Exception, KeysWithSignatures](fetchKeys(userId, keyGroupId, keyIds))
(keys, signs) = kss
apiKeys ← XorT.fromXor[Future](keys.toVector.traverseU(toApi): Xor[Exception, Vector[ApiEncryptionKey]])
apiSigns ← XorT.fromXor[Future](signs.toVector.traverseU(toApi): Xor[Exception, Vector[ApiEncryptionKeySignature]])
} yield (apiKeys, apiSigns)
futureT.value map (_.valueOr(throw _))
}
def createKeyGroup(
userId: Int,
authId: Long,
supportedEncryptions: Seq[String],
identityKey: EncryptionKey,
keys: Seq[EncryptionKey],
signatures: Seq[EncryptionKeySignature]
): Future[Int] = {
val id = IdUtils.nextIntId()
val keyGroup = EncryptionKeyGroup(
userId = userId,
id = id,
authIds = Vector(authId),
supportedEncryptions = supportedEncryptions,
identityKey = Some(identityKey),
keys = keys,
signatures = signatures
)
val actionT = for {
apiKeyGroup ← XorT.fromXor[DBIO](toApi(keyGroup))
relatedUserIds ← XorT.right[DBIO, Exception, Set[Int]](DBIO.from(socialExt.getRelations(userId)))
_ ← XorT.right[DBIO, Exception, Int](EncryptionKeyGroupRepo.create(keyGroup))
_ ← XorT.right[DBIO, Exception, Any](DBIO.from(seqUpdExt.broadcastPeopleUpdate(
userIds = relatedUserIds,
update = UpdatePublicKeyGroupAdded(
userId,
keyGroup = apiKeyGroup
)
)))
} yield id
db.run(actionT.value.transactionally) map (_.valueOr(throw _))
}
def createKeyGroup(
userId: Int,
authId: Long,
supportedEncryptions: Seq[String],
apiIdentityKey: ApiEncryptionKey,
apiKeys: Seq[ApiEncryptionKey],
apiSignatures: Seq[ApiEncryptionKeySignature]
): Future[Int] = {
val futureT = for {
identityKey ← XorT.fromXor[Future](toModel(apiIdentityKey))
keys ← XorT.fromXor[Future](apiKeys.toVector.traverseU(toModel))
signs ← XorT(FastFuture.successful(apiSignatures.toVector.traverseU(toModel)))
id ← XorT.right[Future, Exception, Int](createKeyGroup(userId, authId, supportedEncryptions, identityKey, keys, signs))
} yield id
futureT.value map (_.valueOr(throw _))
}
def deleteKeyGroup(userId: Int, id: Int) = {
val update = UpdatePublicKeyGroupRemoved(userId, id)
val action =
for {
_ ← EncryptionKeyGroupRepo.delete(userId, id)
relatedUserIds ← DBIO.from(socialExt.getRelations(userId))
_ ← DBIO.from(seqUpdExt.broadcastPeopleUpdate(relatedUserIds, update))
} yield ()
db.run(action.transactionally)
}
def createEphermalKeys(
userId: Int,
keyGroupId: Int,
apiKeys: Vector[ApiEncryptionKey],
apiSignatures: Vector[ApiEncryptionKeySignature]
) = {
val apiSignsMap = apiSignatures.groupBy(_.keyId)
val keysXor =
(apiKeys map { apiKey ⇒
for {
key ← toModel(apiKey)
signs ← apiSignsMap.getOrElse(key.id, Vector.empty).traverseU(toModel)
} yield EphermalPublicKey(userId, keyGroupId, Some(key), signs)
}).sequenceU
val actionT =
for {
_ ← XorT[DBIO, Exception, Unit](for {
exists ← EncryptionKeyGroupRepo.exists(userId, keyGroupId)
} yield if (exists) Xor.Right(()) else Xor.Left(new RuntimeException("KeyGroup does not exists")))
keys ← XorT.fromXor[DBIO](keysXor)
_ ← XorT.right[DBIO, Exception, Any](EphermalPublicKeyRepo.create(keys))
} yield ()
db.run(actionT.value.transactionally) map (_.valueOr(throw _))
}
def fetchEphermalKeys(
userId: Int,
keyGroupId: Int
): Future[Seq[EphermalPublicKey]] = {
db.run(for {
ekeys ← EphermalPublicKeyRepo.fetch(userId, keyGroupId)
} yield ekeys)
}
def fetchEphermalKeys(
userId: Int,
keyGroupId: Int,
keyIds: Set[Long]
): Future[Seq[EphermalPublicKey]] = {
db.run(for {
ekeys ← EphermalPublicKeyRepo.fetch(userId, keyGroupId, keyIds)
} yield ekeys)
}
def fetchApiEphermalKeys(
userId: Int,
keyGroupId: Int
): Future[(Vector[ApiEncryptionKey], Vector[ApiEncryptionKeySignature])] = {
val actionT =
for {
ekeys ← XorT.right[Future, Exception, Seq[EphermalPublicKey]](fetchEphermalKeys(userId, keyGroupId))
apiEKeys ← XorT.fromXor[Future](toApi(ekeys.toVector): Xor[Exception, ApiEphermalPublicKeys])
} yield apiEKeys
actionT.value map (_.valueOr(throw _))
}
def fetchApiEphermalKeys(
userId: Int,
keyGroupId: Int,
keyIds: Set[Long]
): Future[(Vector[ApiEncryptionKey], Vector[ApiEncryptionKeySignature])] = {
val actionT =
for {
ekeys ← XorT.right[Future, Exception, Seq[EphermalPublicKey]](fetchEphermalKeys(userId, keyGroupId, keyIds))
apiEKeys ← XorT.fromXor[Future](toApi(ekeys.toVector): Xor[Exception, ApiEphermalPublicKeys])
} yield apiEKeys
actionT.value map (_.valueOr(throw _))
}
def checkBox(
box: ApiEncryptedBox,
ignoredKeyGroups: Map[Int, Set[Int]]
): Future[Either[(Vector[ApiKeyGroupHolder], Vector[ApiKeyGroupId]), Map[Int, Vector[(Long, ApiEncryptedBox)]]]] = {
val userChecksFu: Iterable[Future[(Seq[ApiKeyGroupHolder], Seq[ApiKeyGroupId], Seq[EncryptionKeyGroup])]] =
box.keys.groupBy(_.usersId) map {
case (userId, keys) ⇒
db.run(EncryptionKeyGroupRepo.fetch(userId)) map { kgs ⇒
val ignored = ignoredKeyGroups.getOrElse(userId, Set.empty)
// kgs not presented in box
val missingKgs = kgs.view
.filterNot(kg ⇒ keys.exists(_.keyGroupId == kg.id))
.filterNot(kg ⇒ ignored.contains(kg.id))
.flatMap(kg ⇒ toApi(kg).toOption map (ApiKeyGroupHolder(userId, _)))
.force
// kgs presented in box but deleted by receiver
val obsKgs = keys.view
.filterNot(kg ⇒ kgs.exists(_.id == kg.keyGroupId))
.map(k ⇒ ApiKeyGroupId(userId, k.keyGroupId)).force
(missingKgs, obsKgs, kgs)
}
}
Future.sequence(userChecksFu) map { checks ⇒
val (missing, obs, kgs) =
checks.foldLeft((Vector.empty[ApiKeyGroupHolder], Vector.empty[ApiKeyGroupId], Vector.empty[EncryptionKeyGroup])) {
case ((macc, oacc, kgacc), (m, o, kg)) ⇒
(macc ++ m, oacc ++ o, kgacc ++ kg)
}
if (missing.nonEmpty || obs.nonEmpty) Left(missing → obs)
else Right(
kgs
.groupBy(_.userId)
.map {
case (userId, ukgs) ⇒
(userId,
ukgs flatMap { kg ⇒
val keys = box.keys.filter(_.keyGroupId == kg.id)
val mappedBox = box.copy(keys = keys)
kg.authIds map (_ → mappedBox)
})
}
)
}
}
}
object EncryptionExtension extends ExtensionId[EncryptionExtension] with ExtensionIdProvider {
override def createExtension(system: ExtendedActorSystem): EncryptionExtension = new EncryptionExtension(system)
override def lookup(): ExtensionId[_ <: Extension] = EncryptionExtension
}
| EaglesoftZJ/actor-platform | actor-server/actor-core/src/main/scala/im/actor/server/encryption/EncryptionExtension.scala | Scala | agpl-3.0 | 9,617 |
package com.julianpeeters.avro.annotations
package provider
package matchers
import scala.reflect.macros.blackbox.Context
import collection.JavaConversions._
import org.apache.avro.Schema
import org.codehaus.jackson.JsonNode
import org.codehaus.jackson.node._
object FromJsonMatcher {
def getDefaultValue(field: Schema.Field, c: Context) = {
import c.universe._
import Flag._
def fromJsonNode(node: JsonNode, schema: Schema): Tree = {
schema.getType match {
case _ if node == null => EmptyTree //not `default=null`, but no default
case Schema.Type.INT => q"${node.getIntValue}"
case Schema.Type.FLOAT => q"${node.getDoubleValue.asInstanceOf[Float]}"
case Schema.Type.LONG => q"${node.getLongValue}"
case Schema.Type.DOUBLE => q"${node.getDoubleValue}"
case Schema.Type.BOOLEAN => q"${node.getBooleanValue}"
case Schema.Type.STRING => q"${node.getTextValue}"
case Schema.Type.NULL => q"null"
case Schema.Type.UNION => {
val unionSchemas = schema.getTypes.toList
if (unionSchemas.length == 2 &&
unionSchemas.exists(schema => schema.getType == Schema.Type.NULL) &&
unionSchemas.exists(schema => schema.getType != Schema.Type.NULL)) {
val maybeSchema = unionSchemas.find(schema => schema.getType != Schema.Type.NULL)
maybeSchema match {
case Some(unionSchema) => {
node match {
case nn: NullNode => q"None"
case nonNullNode => q"Some(${fromJsonNode(nonNullNode, unionSchema)})"
}
}
case None => sys.error("no avro type found in this union")
}
}
else sys.error("not a union field")
}
case Schema.Type.ARRAY => {
q"List(..${node.getElements.toList.map(e => fromJsonNode(e, schema.getElementType))})"
}
case Schema.Type.MAP => {
val kvps = node.getFields.toList.map(e => q"${e.getKey} -> ${fromJsonNode(e.getValue, schema.getValueType)}")
q"Map(..$kvps)"
}
case Schema.Type.RECORD => {
val fields = schema.getFields
val fieldValues = fields.map(f => fromJsonNode(node.get(f.name), f.schema))
q"${TermName(schema.getName)}(..${fieldValues})"
}
case x => sys.error("Can't extract a default field, type not yet supported: " + x)
}
}
q"${fromJsonNode(field.defaultValue, field.schema)}"
}
} | iulianu/avro-scala-macro-annotations | macros/src/main/scala/avro/scala/macro/annotations/provider/matchers/FromJsonMatcher.scala | Scala | apache-2.0 | 2,565 |
/**
*
* Crypto
* Ledger wallet
*
* Created by Pierre Pollastri on 04/02/15.
*
* The MIT License (MIT)
*
* Copyright (c) 2015 Ledger
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package co.ledger.wallet.core.crypto
import java.security.Security
object Crypto {
val SpongyCastleProviderName = org.spongycastle.jce.provider.BouncyCastleProvider.PROVIDER_NAME
def ensureSpongyIsInserted(): Unit = {
val ProviderName = org.spongycastle.jce.provider.BouncyCastleProvider.PROVIDER_NAME
if (Security.getProvider(ProviderName) == null) {
Security.insertProviderAt(new org.spongycastle.jce.provider.BouncyCastleProvider, 1)
}
}
def ensureSpongyIsRemoved(): Unit = {
val ProviderName = org.spongycastle.jce.provider.BouncyCastleProvider.PROVIDER_NAME
if (Security.getProvider(ProviderName) != null) {
Security.removeProvider(ProviderName)
}
}
def splitAndXor(bytes: Array[Byte]) = {
val resultLength = bytes.length / 2
val result = new Array[Byte](resultLength)
for (i <- 0 until resultLength)
result(i) = (bytes(i) ^ bytes(i + resultLength)).asInstanceOf[Byte]
result
}
}
| LedgerHQ/ledger-wallet-android | app/src/main/scala/co/ledger/wallet/core/crypto/Crypto.scala | Scala | mit | 2,187 |
/*
Copyright The MITRE Corporation 2009-2010. All rights reserved.
*/
package org.mitre.jcarafe.posttagger
class PostTok(val str: String) {
override def toString = str
}
| wellner/jcarafe | jcarafe-ext/src/main/scala/org/mitre/jcarafe/posttagger/PostTok.scala | Scala | bsd-3-clause | 179 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import kafka.cluster.BrokerEndPoint
import kafka.metrics.KafkaMetricsGroup
import kafka.utils.Implicits._
import kafka.utils.Logging
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.utils.Utils
import scala.collection.{Map, Set, mutable}
abstract class AbstractFetcherManager[T <: AbstractFetcherThread](val name: String, clientId: String, numFetchers: Int)
extends Logging with KafkaMetricsGroup {
// map of (source broker_id, fetcher_id per source broker) => fetcher.
// package private for test
private[server] val fetcherThreadMap = new mutable.HashMap[BrokerIdAndFetcherId, T]
private val lock = new Object
private var numFetchersPerBroker = numFetchers
val failedPartitions = new FailedPartitions
this.logIdent = "[" + name + "] "
private val tags = Map("clientId" -> clientId)
newGauge("MaxLag", () => {
// current max lag across all fetchers/topics/partitions
fetcherThreadMap.values.foldLeft(0L) { (curMaxLagAll, fetcherThread) =>
val maxLagThread = fetcherThread.fetcherLagStats.stats.values.foldLeft(0L)((curMaxLagThread, lagMetrics) =>
math.max(curMaxLagThread, lagMetrics.lag))
math.max(curMaxLagAll, maxLagThread)
}
}, tags)
newGauge("MinFetchRate", () => {
// current min fetch rate across all fetchers/topics/partitions
val headRate = fetcherThreadMap.values.headOption.map(_.fetcherStats.requestRate.oneMinuteRate).getOrElse(0.0)
fetcherThreadMap.values.foldLeft(headRate)((curMinAll, fetcherThread) =>
math.min(curMinAll, fetcherThread.fetcherStats.requestRate.oneMinuteRate))
}, tags)
newGauge("FailedPartitionsCount", () => failedPartitions.size, tags)
newGauge("DeadThreadCount", () => deadThreadCount, tags)
private[server] def deadThreadCount: Int = lock synchronized { fetcherThreadMap.values.count(_.isThreadFailed) }
def resizeThreadPool(newSize: Int): Unit = {
def migratePartitions(newSize: Int): Unit = {
fetcherThreadMap.forKeyValue { (id, thread) =>
val partitionStates = removeFetcherForPartitions(thread.partitions)
if (id.fetcherId >= newSize)
thread.shutdown()
val fetchStates = partitionStates.map { case (topicPartition, currentFetchState) =>
val initialFetchState = InitialFetchState(thread.sourceBroker,
currentLeaderEpoch = currentFetchState.currentLeaderEpoch,
initOffset = currentFetchState.fetchOffset)
topicPartition -> initialFetchState
}
addFetcherForPartitions(fetchStates)
}
}
lock synchronized {
val currentSize = numFetchersPerBroker
info(s"Resizing fetcher thread pool size from $currentSize to $newSize")
numFetchersPerBroker = newSize
if (newSize != currentSize) {
// We could just migrate some partitions explicitly to new threads. But this is currently
// reassigning all partitions using the new thread size so that hash-based allocation
// works with partition add/delete as it did before.
migratePartitions(newSize)
}
shutdownIdleFetcherThreads()
}
}
// Visible for testing
private[server] def getFetcher(topicPartition: TopicPartition): Option[T] = {
lock synchronized {
fetcherThreadMap.values.find { fetcherThread =>
fetcherThread.fetchState(topicPartition).isDefined
}
}
}
// Visibility for testing
private[server] def getFetcherId(topicPartition: TopicPartition): Int = {
lock synchronized {
Utils.abs(31 * topicPartition.topic.hashCode() + topicPartition.partition) % numFetchersPerBroker
}
}
// This method is only needed by ReplicaAlterDirManager
def markPartitionsForTruncation(brokerId: Int, topicPartition: TopicPartition, truncationOffset: Long): Unit = {
lock synchronized {
val fetcherId = getFetcherId(topicPartition)
val brokerIdAndFetcherId = BrokerIdAndFetcherId(brokerId, fetcherId)
fetcherThreadMap.get(brokerIdAndFetcherId).foreach { thread =>
thread.markPartitionsForTruncation(topicPartition, truncationOffset)
}
}
}
// to be defined in subclass to create a specific fetcher
def createFetcherThread(fetcherId: Int, sourceBroker: BrokerEndPoint): T
def addFetcherForPartitions(partitionAndOffsets: Map[TopicPartition, InitialFetchState]): Unit = {
lock synchronized {
val partitionsPerFetcher = partitionAndOffsets.groupBy { case (topicPartition, brokerAndInitialFetchOffset) =>
BrokerAndFetcherId(brokerAndInitialFetchOffset.leader, getFetcherId(topicPartition))
}
def addAndStartFetcherThread(brokerAndFetcherId: BrokerAndFetcherId,
brokerIdAndFetcherId: BrokerIdAndFetcherId): T = {
val fetcherThread = createFetcherThread(brokerAndFetcherId.fetcherId, brokerAndFetcherId.broker)
fetcherThreadMap.put(brokerIdAndFetcherId, fetcherThread)
fetcherThread.start()
fetcherThread
}
for ((brokerAndFetcherId, initialFetchOffsets) <- partitionsPerFetcher) {
val brokerIdAndFetcherId = BrokerIdAndFetcherId(brokerAndFetcherId.broker.id, brokerAndFetcherId.fetcherId)
val fetcherThread = fetcherThreadMap.get(brokerIdAndFetcherId) match {
case Some(currentFetcherThread) if currentFetcherThread.sourceBroker == brokerAndFetcherId.broker =>
// reuse the fetcher thread
currentFetcherThread
case Some(f) =>
f.shutdown()
addAndStartFetcherThread(brokerAndFetcherId, brokerIdAndFetcherId)
case None =>
addAndStartFetcherThread(brokerAndFetcherId, brokerIdAndFetcherId)
}
addPartitionsToFetcherThread(fetcherThread, initialFetchOffsets)
}
}
}
def addFailedPartition(topicPartition: TopicPartition): Unit = {
lock synchronized {
failedPartitions.add(topicPartition)
}
}
protected def addPartitionsToFetcherThread(fetcherThread: T,
initialOffsetAndEpochs: collection.Map[TopicPartition, InitialFetchState]): Unit = {
fetcherThread.addPartitions(initialOffsetAndEpochs)
info(s"Added fetcher to broker ${fetcherThread.sourceBroker.id} for partitions $initialOffsetAndEpochs")
}
def removeFetcherForPartitions(partitions: Set[TopicPartition]): Map[TopicPartition, PartitionFetchState] = {
val fetchStates = mutable.Map.empty[TopicPartition, PartitionFetchState]
lock synchronized {
for (fetcher <- fetcherThreadMap.values)
fetchStates ++= fetcher.removePartitions(partitions)
failedPartitions.removeAll(partitions)
}
if (partitions.nonEmpty)
info(s"Removed fetcher for partitions $partitions")
fetchStates
}
def shutdownIdleFetcherThreads(): Unit = {
lock synchronized {
val keysToBeRemoved = new mutable.HashSet[BrokerIdAndFetcherId]
for ((key, fetcher) <- fetcherThreadMap) {
if (fetcher.partitionCount <= 0) {
fetcher.shutdown()
keysToBeRemoved += key
}
}
fetcherThreadMap --= keysToBeRemoved
}
}
def closeAllFetchers(): Unit = {
lock synchronized {
for ((_, fetcher) <- fetcherThreadMap) {
fetcher.initiateShutdown()
}
for ((_, fetcher) <- fetcherThreadMap) {
fetcher.shutdown()
}
fetcherThreadMap.clear()
}
}
}
/**
* The class FailedPartitions would keep a track of partitions marked as failed either during truncation or appending
* resulting from one of the following errors -
* <ol>
* <li> Storage exception
* <li> Fenced epoch
* <li> Unexpected errors
* </ol>
* The partitions which fail due to storage error are eventually removed from this set after the log directory is
* taken offline.
*/
class FailedPartitions {
private val failedPartitionsSet = new mutable.HashSet[TopicPartition]
def size: Int = synchronized {
failedPartitionsSet.size
}
def add(topicPartition: TopicPartition): Unit = synchronized {
failedPartitionsSet += topicPartition
}
def removeAll(topicPartitions: Set[TopicPartition]): Unit = synchronized {
failedPartitionsSet --= topicPartitions
}
def contains(topicPartition: TopicPartition): Boolean = synchronized {
failedPartitionsSet.contains(topicPartition)
}
}
case class BrokerAndFetcherId(broker: BrokerEndPoint, fetcherId: Int)
case class InitialFetchState(leader: BrokerEndPoint, currentLeaderEpoch: Int, initOffset: Long)
case class BrokerIdAndFetcherId(brokerId: Int, fetcherId: Int)
| lindong28/kafka | core/src/main/scala/kafka/server/AbstractFetcherManager.scala | Scala | apache-2.0 | 9,411 |
package ua.edu.odeku.ceem.mapRadar.tools.radar.surface
import com.google.common.primitives.Doubles
import gov.nasa.worldwind.WorldWind
import gov.nasa.worldwind.geom.{Sector, LatLon, Angle}
import gov.nasa.worldwind.globes.{Earth, ElevationModel}
import gov.nasa.worldwind.layers.RenderableLayer
import gov.nasa.worldwind.util.BufferFactory.DoubleBufferFactory
import gov.nasa.worldwind.util.{BufferFactory, BufferWrapper}
import ua.edu.odeku.ceem.mapRadar.AppCeemRadarFrame
import ua.edu.odeku.ceem.mapRadar.tools.radar.models.Radar
import ua.edu.odeku.ceem.mapRadar.utils.gui.VisibleUtils
import scala.collection.mutable.ArrayBuffer
/**
* Created by ABakalov on 18.12.2014.
*/
object SurfaceManager {
val SHOW_TYPE_ISOLINE: String = "ISOLINE"
val SHOW_TYPE_2D: String = "2D"
val SHOW_TYPE_3D: String = "3D"
protected val HUE_BLUE: Double = 240d / 360d
protected val HUE_RED: Double = 0d / 360d
protected val POLAR_RADIUS: Double = AppCeemRadarFrame.wwd.getModel.getGlobe.getPolarRadius
protected val EQUATORIAL_RADIUS: Double = AppCeemRadarFrame.wwd.getModel.getGlobe.getEquatorialRadius
protected val GLOBE_RADIUS: Double = (POLAR_RADIUS + EQUATORIAL_RADIUS) / 2.0
private val NORTH: Angle = Angle.fromDegrees(0)
private val EAST: Angle = Angle.fromDegrees(90)
private val SOUTH: Angle = Angle.fromDegrees(180)
private val WEST: Angle = Angle.fromDegrees(270)
private val step: Int = 100
private val renderableLayer: RenderableLayer = new RenderableLayer {
setPickEnabled(false)
setName("DistributionPowerDensityManager layer")
}
def hiden() : Unit = {
renderableLayer.removeAllRenderables()
AppCeemRadarFrame.wwd.getModel.getLayers.remove(renderableLayer)
AppCeemRadarFrame.wwd.redraw()
}
def show(typeShow: String, elevation: Int, radars: Iterable[Radar]): Unit = {
VisibleUtils.insertBeforeCompass(AppCeemRadarFrame.wwd, renderableLayer)
renderableLayer.clearList()
val elevationModel = AppCeemRadarFrame.wwd.getModel.getGlobe.getElevationModel
val distributionPowerDensity: AnalyticSurface = typeShow match {
case SHOW_TYPE_3D => createDistribution3DPowerDensity(elevationModel, step, elevation, radars);
case SHOW_TYPE_2D => createDistribution2DPowerDensity(elevationModel, step, elevation, radars);
}
distributionPowerDensity.setClientLayer(renderableLayer)
renderableLayer.addRenderable(distributionPowerDensity)
AppCeemRadarFrame.wwd.redraw()
}
private def createDistribution3DPowerDensity(em: ElevationModel, step: Int, roof: Double, radars: Iterable[Radar]): AnalyticSurface = {
val sector = createSectorForAllRadar(radars, roof)
val coordinates: Array[Array[LatLon]] = createSectorCoordinates(sector, step)
val elevation: Array[Array[Double]] = createElevationSector(coordinates, em, roof)
val gridPower: Array[Array[Double]] = createGridPower(coordinates, radars, roof)
val value: Array[Array[Double]] = mergeElevationAndPower(elevation, gridPower)
val flatValue: Array[Double] = value.flatten
new AnalyticSurface(sector, roof, coordinates.length, coordinates(0).length) {
setSurfaceAttributes(
new AnalyticSurfaceAttributes {
setDrawShadow(true)
setInteriorOpacity(1.0)
setOutlineWidth(3)
}
)
setValues(
AnalyticSurface.createColorGradientValues(
flatValue,
0.0, //Double.MAX_VALUE,
flatValue.min / 25000,
flatValue.max,
HUE_BLUE,
HUE_RED
)
)
}
}
private def createDistribution2DPowerDensity(em: ElevationModel, step: Int, roof: Double, radars: Iterable[Radar]): AnalyticSurface = {
val sector = createSectorForAllRadar(radars, roof)
val coordinates: Array[Array[LatLon]] = createSectorCoordinates(sector, step)
val elevation: Array[Array[Double]] = createElevationSector(coordinates, em, roof)
val gridPower: Array[Array[Double]] = createGridPower(coordinates, radars, roof)
val value: Array[Array[Double]] = mergeElevationAndPower(elevation, gridPower)
val flatValue: Array[Double] = value.flatten
new AnalyticSurface(sector, roof, coordinates.length, coordinates(0).length) {
setAltitude(WorldWind.CLAMP_TO_GROUND)
setSurfaceAttributes(
new AnalyticSurfaceAttributes {
setDrawShadow(false)
setInteriorOpacity(0.6)
setOutlineWidth(3)
}
)
setValues(
AnalyticSurface.createColorGradientValues(
flatValue,
0.0, //Double.MAX_VALUE,
flatValue.min / 10000,
flatValue.max,
HUE_BLUE,
HUE_RED
)
)
}
}
implicit def arrayDoublesToBufferWrapper(value: Array[Double]): BufferWrapper = {
val buffer: BufferWrapper = new DoubleBufferFactory().newBuffer(value.length)
buffer.putDouble(0, value, 0, value.length)
buffer
}
private def createSectorForAllRadar(radars: Iterable[Radar], researchHeight: Double): Sector = {
var minLat = 90.0
var minLon = 180.0
var maxLat = 0.0
var maxLon = -180.0
for (radar <- radars) {
val dist: Double = radar.radiusOnElevation(researchHeight)
val positions = Array(
destinationPoint(radar.latLon, NORTH, dist),
destinationPoint(radar.latLon, EAST, dist),
destinationPoint(radar.latLon, SOUTH, dist),
destinationPoint(radar.latLon, WEST, dist)
)
for (pos <- positions) {
minLat = Math.min(minLat, pos.latitude.degrees)
minLon = Math.min(minLon, pos.longitude.degrees)
maxLat = Math.max(maxLat, pos.latitude.degrees)
maxLon = Math.max(maxLon, pos.longitude.degrees)
}
}
new Sector(
Angle.fromDegreesLatitude(minLat),
Angle.fromDegreesLatitude(maxLat),
Angle.fromDegreesLongitude(minLon),
Angle.fromDegreesLongitude(maxLon)
)
}
private def destinationPoint(pos: LatLon, azimuth: Angle, dist: Double): LatLon = {
if (dist == 0) {
new LatLon(pos)
} else {
val θ: Double = azimuth.radians
val δ: Double = dist / GLOBE_RADIUS
val φ1: Double = pos.latitude.radians
val λ1: Double = pos.longitude.radians
val φ2: Double = Math.asin(Math.sin(φ1) * Math.cos(δ) + Math.cos(φ1) * Math.sin(δ) * Math.cos(θ))
var λ2: Double = λ1 + Math.atan2(Math.sin(θ) * Math.sin(δ) * Math.cos(φ1), Math.cos(δ) - Math.sin(φ1) * Math.sin(φ2))
λ2 = (λ2 + 3 * Math.PI) % (2 * Math.PI) - Math.PI
LatLon.fromRadians(φ2, λ2)
}
}
/**
* Метод возращает карту координат, с учетом шага
*
* @param sector сектор у которого надо веруть карту координат
* @param step шаг в координатах
* @return карта координат
*/
private def createSectorCoordinates(sector: Sector, step: Int): Array[Array[LatLon]] = {
val array: ArrayBuffer[ArrayBuffer[LatLon]] = new ArrayBuffer[ArrayBuffer[LatLon]]
val cornersSector: Array[LatLon] = sector.getCorners
val northWestCorner: LatLon = cornersSector(3)
var i = 0
var exitLine = false
while (!exitLine) {
val pos = destinationPoint(northWestCorner, SOUTH, step * i)
if (sector.getMinLatitude.degrees > pos.latitude.degrees) {
exitLine = true
} else {
val innerArray = new ArrayBuffer[LatLon]
var exitColumns = false
var j = 0
while (!exitColumns) {
val next = destinationPoint(pos, EAST, step * j)
if (sector.getMaxLongitude.degrees < next.longitude.degrees) {
exitColumns = true
} else {
innerArray += next
}
j += 1
}
array += innerArray
}
i += 1
}
def listOfListToMatrix(sourse: ArrayBuffer[ArrayBuffer[LatLon]]): Array[Array[LatLon]] = {
var maxHeight = sourse.length
var maxWidth = sourse.maxBy(list => list.length).length
val matrix = new Array[Array[LatLon]](maxHeight)
var i = 0
for (line <- sourse) {
var initIndex = (maxWidth - line.length) / 2
matrix(i) = new Array[LatLon](maxWidth)
for (column <- line) {
matrix(i)(initIndex) = column
initIndex += 1
}
i += 1
}
matrix
}
listOfListToMatrix(array)
}
private def createElevationSector(sectorCoordinates: Array[Array[LatLon]], em: ElevationModel, roof: Double): Array[Array[Double]] = {
val sectorElevation = new Array[Array[Double]](sectorCoordinates.length)
for (i <- 0 until sectorCoordinates.length) {
val line = sectorCoordinates(i)
sectorElevation(i) = new Array[Double](line.length)
for (j <- 0 until line.length) {
val latLon = line(j)
if (latLon != null) {
val elevation = em.getElevation(latLon.latitude, latLon.longitude)
sectorElevation(i)(j) = if (elevation < roof) 0 else elevation
}
}
}
sectorElevation
}
private def createGridPower(coordinates: Array[Array[LatLon]], radars: Iterable[Radar], elevation: Double): Array[Array[Double]] = {
def findMax(doubles: Iterable[Double]): Double = (List(Radar.OpacityValuePower) ++ doubles).max
def powerFromRadar(pos: LatLon, radars: Iterable[Radar], elevation: Double) = {
for (radar <- radars) yield {
val length = LatLon.ellipsoidalDistance(pos, radar.latLon, Earth.WGS84_EQUATORIAL_RADIUS, Earth.WGS84_POLAR_RADIUS)
if (pos != null) {
radar.power(length, elevation)
} else {
Radar.OpacityValuePower
}
}
}
(for (i <- 0 until coordinates.length) yield {
(for (j <- 0 until coordinates(i).length) yield {
if (coordinates(i)(j) != null) {
findMax(powerFromRadar(coordinates(i)(j), radars, elevation))
} else {
0
}
}).toArray
}).toArray
}
/**
* Метод должен совместить рельеф и
*
* @param elevation
* @param power
* @return
*/
private def mergeElevationAndPower(elevation: Array[Array[Double]], power: Array[Array[Double]]): Array[Array[Double]] = {
// сделаем границы. Обойдем матрицу и везде где есть elevation мощьность сделаем Double.NaN
for {i <- 0 until power.length
j <- 0 until power(i).length
if elevation(i)(j) > 0} {
power(i)(j) = Double.NaN
}
// Нормализуем
for {i <- 0 until power.length
j <- 0 until power(i).length
if power(i)(j) == Double.NaN} {
power(i)(j) = 1000
}
power
}
}
| aleo72/ww-ceem-radar | src/main/scala/ua/edu/odeku/ceem/mapRadar/tools/radar/surface/SurfaceManager.scala | Scala | apache-2.0 | 10,766 |
package com.cloudray.scalapress.plugin.form.controller
import org.springframework.stereotype.Controller
import org.springframework.web.bind.annotation._
import org.springframework.beans.factory.annotation.Autowired
import javax.servlet.http.{HttpServletResponse, HttpServletRequest}
import org.springframework.web.multipart.MultipartFile
import com.cloudray.scalapress.plugin.form.{RecaptchaClient, Form, FormSubmissionService, FormDao, SubmissionDao}
import com.cloudray.scalapress.theme.{ThemeService, ThemeDao}
import com.cloudray.scalapress.folder.controller.FolderController
import com.cloudray.scalapress.util.mvc.ScalapressPage
import scala.collection.JavaConverters._
import com.cloudray.scalapress.plugin.form.controller.renderer.FormSubmissionTextRenderer
import org.apache.http.impl.client.DefaultHttpClient
import com.cloudray.scalapress.framework.{Logging, ScalapressRequest, ScalapressContext}
/** @author Stephen Samuel */
@Controller
@RequestMapping(Array("form/{id}"))
class SubmissionController extends Logging {
@Autowired var submissionDao: SubmissionDao = _
@Autowired var context: ScalapressContext = _
@Autowired var formDao: FormDao = _
@Autowired var formService: FormSubmissionService = _
@Autowired var themeDao: ThemeDao = _
@Autowired var themeService: ThemeService = _
@Autowired var folderController: FolderController = _
@ModelAttribute("form") def form(@PathVariable("id") id: Long) = formDao.find(id)
@ResponseBody
@RequestMapping(produces = Array("text/html"), method = Array(RequestMethod.POST))
def submit(@ModelAttribute("form") form: Form,
req: HttpServletRequest,
resp: HttpServletResponse,
@RequestParam(value = "file") files: java.util.List[MultipartFile],
@RequestParam(value = "folderId", required = false, defaultValue = "0") folderId: Long,
@RequestParam(value = "objId", required = false, defaultValue = "0") objId: Long): ScalapressPage = {
val sreq = ScalapressRequest(req, context).withTitle("Form Submitted")
formService.checkErrors(form, sreq)
if (form.captcha) {
val client = new RecaptchaClient(new DefaultHttpClient())
if (!client.post(req.getParameter("recaptcha_challenge_field"),
req.getParameter("recaptcha_response_field"),
req.getRemoteAddr))
sreq.error("captcha.error", "Please complete captcha")
}
sreq.hasErrors match {
case true => {
logger.debug("Form has errors {}, redirecting to folder {}", sreq.errors, folderId)
if (folderId > 0) folderController.view(folderId, req, resp)
else folderController.view(req, resp)
}
case false => {
val submission = createSubmission(form, sreq, files.asScala, folderId, objId)
val theme = themeService.default
val page = ScalapressPage(theme, sreq)
if (form.submissionScript != null)
page.body(form.submissionScript)
page.body(FormSubmissionTextRenderer.render(form.submissionText, submission))
page
}
}
}
def createSubmission(form: Form, sreq: ScalapressRequest, files: Seq[MultipartFile], folderId: Long, objId: Long) = {
val submission = formService.doSubmission(form, sreq, files)
if (folderId > 0)
submission.folder = context.folderDao.find(folderId)
if (objId > 0)
submission.obj = context.itemDao.find(objId)
submissionDao.save(submission)
submission
}
}
| vidyacraghav/scalapress | src/main/scala/com/cloudray/scalapress/plugin/form/controller/SubmissionController.scala | Scala | apache-2.0 | 3,466 |
package com.hm.khols.reccom.functional
import com.twitter.scalding.TupleConversions
import com.pragmasoft.scaldingunit.TestInfrastructure
import scala.collection.mutable
import com.hm.khols.reccom.ProdRecByPriceJob
import com.hm.khols.reccom.schemas._
import com.hm.khols.reccom.test.data.TestData._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.FlatSpec
import org.scalatest.matchers.ShouldMatchers
import com.twitter.scalding.JobTest
import com.twitter.scalding.Csv
import com.twitter.scalding.Tsv
import com.hm.khols.reccom.ProdRecByPriceJob
/**
* @author wcbdd
*/
@RunWith(classOf[JUnitRunner])
class ProdRecbyAvgPriceJobTest extends FlatSpec with ShouldMatchers with TupleConversions with TestInfrastructure {
"Prod Recccom job" should "do the full transformation" in {
JobTest(classOf[ProdRecByPriceJob].getName)
.arg("prodRecommInput", "prodRecommInput")
.arg("prodPriceInput", "prodPriceInput")
.arg("output", "output")
.arg("errorReccomRecords", "errorReccomRecords")
.arg("errorPriceRecords", "errorPriceRecords")
.source(Csv("prodRecommInput", ",", PROD_RECCOM_SCHEMA), prodReccomData)
.source(Csv("prodPriceInput", ",", PROD_PRICE_SCHEMA), prodPriceData)
.sink(Tsv("output")) {
buffer: mutable.Buffer[(String, String)] =>
buffer.toList shouldEqual prodReccomResult
}
.sink(Tsv("errorReccomRecords")) {
buffer: mutable.Buffer[(String, String)] =>
buffer.toList shouldEqual List()
}
.sink(Tsv("errorPriceRecords")) {
buffer: mutable.Buffer[(String, String)] =>
buffer.toList.size == 1
}
.run
}
} | linu2891/reccom.io | examples/src/test/scala/com/hm/khols/reccom/functional/ProdRecbyAvgPriceJobTest.scala | Scala | apache-2.0 | 1,726 |
package com.tribbloids.spookystuff.uav.actions
import com.tribbloids.spookystuff.actions._
import com.tribbloids.spookystuff.row.SpookySchema
/**
* Do many things:
* Globally
* 1. add takeoff to the beginning of the trace if it is missing
*
* Locally
* 2. replace Anchors.Home with UAVConf.home
* 3. replace Anchors.HomeLevelProjection with previous action._end minus its relative altitude to UAVConf.home
* 4. replace Anchors.MSLProjection with previous action._end minus its absolute altitude to Anchors.Geodetic
* 5. (pending) replace Anchors.GroundProjection with previous action._end
* minus its relative altitude to ground elevation directly under it (query from various terrian API or DB)
*/
object AutoTakeoffRule extends RewriteRule[Trace] {
override def rewrite(v1: Trace, schema: SpookySchema): Trace = {
// val uavConf = schema.ec.spooky.getConf[UAVConf]
val navs = v1.collect {
case nav: UAVNavigation => nav
}
val result =
if (navs.isEmpty)
v1
else if (!navs.head.isInstanceOf[Takeoff])
List(Takeoff()) ++ v1
else
v1
result
}
}
| tribbloid/spookystuff | uav/src/main/scala/com/tribbloids/spookystuff/uav/actions/AutoTakeoffRule.scala | Scala | apache-2.0 | 1,146 |
/*
* Copyright 2016 MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mongodb.scala.model
import java.lang.reflect.Modifier._
import scala.util.{Success, Try}
import org.scalatest.prop.TableDrivenPropertyChecks._
import org.scalatest.{FlatSpec, Matchers}
class CollationAlternateSpec extends FlatSpec with Matchers {
"CollationAlternate" should "have the same static fields as the wrapped CollationAlternate" in {
val collationAlternateClass: Class[CollationAlternate] = classOf[com.mongodb.client.model.CollationAlternate]
val wrappedFields = collationAlternateClass.getDeclaredFields.filter(f => isStatic(f.getModifiers)).map(_.getName).toSet
val wrappedMethods = collationAlternateClass.getDeclaredMethods.filter(f => isStatic(f.getModifiers)).map(_.getName).toSet
val exclusions = Set("$VALUES", "valueOf", "values")
val wrapped = (wrappedFields ++ wrappedMethods) -- exclusions
val local = CollationAlternate.getClass.getDeclaredMethods.map(_.getName).toSet -- Set("apply", "$deserializeLambda$", "$anonfun$fromString$1")
local should equal(wrapped)
}
it should "return the expected CollationAlternate" in {
forAll(collationAlternates) { (value: String, expectedValue: Try[CollationAlternate]) =>
CollationAlternate.fromString(value) should equal(expectedValue)
}
}
it should "handle invalid values" in {
forAll(invalidCollationAlternates) { (value: String) =>
CollationAlternate.fromString(value) should be a 'failure
}
}
val collationAlternates =
Table(
("stringValue", "JavaValue"),
("non-ignorable", Success(CollationAlternate.NON_IGNORABLE)),
("shifted", Success(CollationAlternate.SHIFTED))
)
val invalidCollationAlternates = Table("invalid values", "NON_IGNORABLE", "SHIFTED")
}
| jCalamari/mongo-scala-driver | driver/src/test/scala/org/mongodb/scala/model/CollationAlternateSpec.scala | Scala | apache-2.0 | 2,339 |
package com.sksamuel.elastic4s.admin
import com.sksamuel.elastic4s._
import com.sksamuel.elastic4s.analyzers.WhitespaceAnalyzer
import org.scalatest.WordSpec
import com.sksamuel.elastic4s.testkit.ElasticSugar
class SettingsTest extends WordSpec with ElasticSugar with ElasticDsl {
client.execute {
createIndex("settings_test").mappings(
mapping("r").as(
stringField("a") stored true analyzer WhitespaceAnalyzer,
stringField("b")
)
)
}.await
"get settings" should {
"return settings" in {
val resp = client.execute {
getSettings("settings_test")
}.await
val settings = resp.getIndexToSettings.get("settings_test")
// default values
assert(settings.getAsSettings("index").get("number_of_shards") === "5")
assert(settings.getAsSettings("index").get("number_of_replicas") === "1")
}
}
"put settings" should {
"update settings" in {
client.execute {
updateSettings("settings_test").set(Map("index.refresh_interval" -> "10s"))
}.await
val resp = client.execute {
getSettings("settings_test")
}.await
val refresh_interval = resp.getSetting("settings_test", "index.refresh_interval")
assert(refresh_interval === "10s")
}
}
}
| aroundus-inc/elastic4s | elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/admin/SettingsTest.scala | Scala | apache-2.0 | 1,286 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.carbondata.spark.testsuite.filterexpr
import org.apache.spark.sql.Row
import org.apache.spark.sql.common.util.CarbonHiveContext._
import org.apache.spark.sql.common.util.QueryTest
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
import org.scalatest.BeforeAndAfterAll
class NullMeasureValueTestCaseFilter extends QueryTest with BeforeAndAfterAll {
override def beforeAll {
sql("drop table if exists t3")
sql(
"CREATE TABLE t3 (ID bigInt, date Timestamp, country String, name String, " +
"phonetype String, serialname String, salary Int) STORED BY 'org.apache.carbondata.format'"
)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
sql("LOAD DATA LOCAL INPATH './src/test/resources/datawithnullmeasure.csv' into table t3");
}
test("select ID from t3 where salary is not null") {
checkAnswer(
sql("select ID from t3 where salary is not null"),
Seq(Row(1),Row(4)))
}
test("select ID from t3 where salary is null") {
checkAnswer(
sql("select ID from t3 where salary is null"),
Seq(Row(2),Row(3)))
}
override def afterAll {
sql("drop table t3")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
}
}
| ashokblend/incubator-carbondata | integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/NullMeasureValueTestCaseFilter.scala | Scala | apache-2.0 | 2,213 |
package io.reactors
import scala.collection._
/** Service that tracks different transports for remote communication.
*
* The most important method is `resolve`, which creates a channel from a
* channel URL. This allows communication with reactors in non-local
* reactor systems, e.g. in another process, or on another machine.
*/
class Remote(val system: ReactorSystem) extends Protocol.Service {
private val transports = mutable.Map[String, Remote.Transport]()
private def initializeTransport(schema: String): Unit = {
val t = system.bundle.urlMap(schema)
val transport = Platform.Reflect.instantiate(t.transportName, Seq(system))
.asInstanceOf[Remote.Transport]
if (transport.schema != t.url.schema) exception.illegalArg(
s"Transport with schema '${transport.schema}' must have the same schema in " +
s"the reactor system configuration."
)
transports(t.url.schema) = transport
}
def transport(schema: String) = {
if (!transports.contains(schema)) {
initializeTransport(schema)
}
transports(schema)
}
def resolve[@spec(Int, Long, Double) T: Arrayable](url: ChannelUrl): Channel[T] = {
val schema = url.reactorUrl.systemUrl.schema
if (!transports.contains(schema)) {
initializeTransport(schema)
}
transports(schema).newChannel[T](url)
}
def resolve[@spec(Int, Long, Double) T: Arrayable](url: String): Channel[T] = {
val channelUrl = ChannelUrl.parse(url)
resolve(channelUrl)
}
def shutdown() {
for ((schema, transport) <- transports) transport.shutdown()
}
}
/** Types and methods related to the `Remote` service.
*/
object Remote {
/** Interface for the transport API.
*
* Concrete implementations of this interface represent different transports.
*/
trait Transport extends Platform.Reflectable {
/** Creates a new channel for this transport.
*
* @tparam T type of the events for the new channel
* @param url url of the newly created channel
* @return a new channel associated with this transport
*/
def newChannel[@spec(Int, Long, Double) T: Arrayable](url: ChannelUrl): Channel[T]
/** The schema string that this transport must be registered with.
*/
def schema: String
/** Port associated with the transport if applicable, or `-1` otherwise.
*/
def port: Int
/** Shuts down the transport, and releases the associated resources.
*/
def shutdown(): Unit
}
}
| storm-enroute/reactors | reactors-core/shared/src/main/scala/io/reactors/Remote.scala | Scala | bsd-3-clause | 2,509 |
package rsgr
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import scala.concurrent.duration._
class RSGRSimulationAPI100 extends Simulation {
object Browse {
val headers_10 = Map("Content-Type" -> "application/json") // Note the headers specific to a given request
// repeat is a loop resolved at RUNTIME
val browse = repeat(5, "i") { // Note how we force the counter name so we can reuse it
exec(http("api_rsgr_user ${i}")
.get("/api/rsgr/user"))
.pause(1)
}
}
val httpConf = http
.baseURL("http://runclub.akqatest.cn") // Here is the root for all relative URLs
.acceptHeader("application/json") // Here are the common headers
.doNotTrackHeader("1")
.acceptLanguageHeader("en-US,en;q=0.5")
.acceptEncodingHeader("gzip, deflate")
.userAgentHeader("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:16.0) Gecko/20100101 Firefox/16.0")
val users = scenario("RSGR API")
.exec(
addCookie(
Cookie("openid", "rsgrtest05001").withDomain("runclub.akqatest.cn").withPath("/")
))
.exec(Browse.browse)
setUp(users.inject(atOnceUsers(100)).protocols(httpConf))
}
| JoshuaChi/RSGR-gatling-stress-testing | src/test/scala/rsgr/RSGRSimulationAPI100.scala | Scala | mit | 1,172 |
package geek.lawsof.physics.lib.block.te.render
import cpw.mods.fml.relauncher.{Side, SideOnly}
import geek.lawsof.physics.Reference
import geek.lawsof.physics.lib.block.te.render.model.CustomModelBase
import net.minecraft.block.Block
import net.minecraft.client.renderer.tileentity.TileEntitySpecialRenderer
import net.minecraft.client.renderer.{OpenGlHelper, Tessellator}
import net.minecraft.tileentity.TileEntity
import net.minecraft.util.ResourceLocation
import net.minecraft.world.World
import org.lwjgl.opengl.GL11
/**
* Created by anshuman on 28-05-2014.
*/
@SideOnly(Side.CLIENT)
abstract class CustomRendererBase extends TileEntitySpecialRenderer {
def getResource(texPath: String, texName: String): ResourceLocation = {
new ResourceLocation(Reference.MOD_ID, s"$texPath/$texName")
}
def modelRender(model: CustomModelBase) {
model.render(null, 0.0F, 0.0F, 0.0F, 0.0F, 0.0F, 0.0625F)
}
def addTexture(texPath: String, texName: String) = this.bindTexture(getResource(texPath, texName))
def addTexture(tex: (String, String)) = this.bindTexture(getResource(tex._1, tex._1))
def renderTileEntityAt(tileEntity: TileEntity, x: Double, y: Double, z: Double, f: Float) {
GL11.glPushMatrix()
GL11.glTranslatef(x.asInstanceOf[Float] + 1.0F, y.asInstanceOf[Float] + 1.0F, z.asInstanceOf[Float])
this.addTexture(tex)
GL11.glPushMatrix()
GL11.glRotatef(180F, 0.0F, 0.0F, 1.0F)
this.renderModel()
GL11.glPopMatrix()
GL11.glPopMatrix()
}
private def rotateModelViaMeta(world: World, x: Int, y: Int, z: Int) {
val meta: Int = world getBlockMetadata(x, y, z)
GL11 glPushMatrix()
GL11 glRotatef(meta * (-90), 0.0F, 0.0F, 1.0F)
GL11 glPopMatrix()
}
private def adjustLightFixture(world: World, i: Int, j: Int, k: Int, block: Block) {
val tess: Tessellator = Tessellator.instance
val brightness: Float = block.getMixedBrightnessForBlock(world, i, j, k)
val skyLight: Int = world.getLightBrightnessForSkyBlocks(i, j, k, 0)
val modulousModifier: Int = skyLight % 65536
val divModifier: Int = skyLight / 65536
tess.setColorOpaque_F(brightness, brightness, brightness)
OpenGlHelper.setLightmapTextureCoords(OpenGlHelper.lightmapTexUnit, modulousModifier.asInstanceOf[Float], divModifier)
}
protected def renderModel()
protected def tex: (String, String)
}
| GeckoTheGeek42/TheLawsOfPhysics | src/main/scala/geek/lawsof/physics/lib/block/te/render/CustomRendererBase.scala | Scala | mit | 2,367 |
/*
* Copyright (c) 2013-14 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
import org.junit.Test
import org.junit.Assert._
import shapeless.test._
import testutil._
class TupleTests {
import nat._
import poly._
import syntax.std.traversable._
import syntax.std.tuple._
import syntax.typeable._
type SI = Tuple1[Set[Int]]
type OI = Tuple1[Option[Int]]
type SISS = (Set[Int], Set[String])
type OIOS = (Option[Int], Option[String])
type ISII = (Int, String, Int, Int)
type IIII = (Int, Int, Int, Int)
type IYII = (Int, Any, Int, Int)
type OIOSOIOI = (Option[Int], Option[String], Option[Int], Option[Int])
type SISSSISI = (Set[Int], Set[String], Set[Int], Set[Int])
type BBBB = (Boolean, Boolean, Boolean, Boolean)
trait Fruit
case class Apple() extends Fruit
case class Pear() extends Fruit
case class Banana() extends Fruit
type PWS = Product with Serializable with Fruit
type YYYY = (Any, Any, Any, Any)
type FF = (Fruit, Fruit)
type AP = (Apple, Pear)
type BP = (Banana, Pear)
type AF = (Apple, Fruit)
type FFFF = (Fruit, Fruit, Fruit, Fruit)
type APAP = (Apple, Pear, Apple, Pear)
type APBP = (Apple, Pear, Banana, Pear)
type APB = (Apple, Pear, Banana)
type PBPA = (Pear, Banana, Pear, Apple)
type PABP = (Pear, Apple, Banana, Pear)
val a : Apple = Apple()
val p : Pear = Pear()
val b : Banana = Banana()
val f : Fruit = new Fruit {}
val ap : AP = (a, p)
val bp : BP = (b, p)
val apap : APAP = (a, p, a, p)
val apbp : APBP = (a, p, b, p)
val apapList = List(a, p, a, p)
val apbpList = List(a, p, b, p)
val apapArray = Array(a, p, a, p)
val apbpArray = Array(a, p, b, p)
trait Ctv[-T]
val ci: Ctv[Int] = new Ctv[Int] {}
val cs: Ctv[String] = new Ctv[String] {}
val cd: Ctv[Double] = new Ctv[Double] {}
val cicscicicdList = List(ci, cs, ci, ci, cd)
val cicscicicdArray = Array(ci, cs, ci, ci, cd)
val cicscicicd = (ci, cs, ci, ci, cd)
trait M[T]
val mi: M[Int] = new M[Int] {}
val ms: M[String] = new M[String] {}
val md: M[Double] = new M[Double] {}
val mimsmimimdList = List(mi, ms, mi, mi, md)
val mimsmimimdArray = Array(mi, ms, mi, mi, md)
val mimsmimimd = (mi, ms, mi, mi, md)
import language.existentials
val mExist: M[_] = new M[Double] {}
val mimsmimemdList = List(mi, ms, mi, mExist, md)
val mimsmimemdArray = Array[M[_]](mi, ms, mi, mExist, md)
val mimsmimemd = (mi, ms, mi, mExist, md)
trait M2[A,B]
val m2i: M2[Int, Unit] = new M2[Int, Unit] {}
val m2s: M2[String, Unit] = new M2[String, Unit] {}
val m2d: M2[Double, Unit] = new M2[Double, Unit] {}
val m2im2sm2im2im2dList = List(m2i, m2s, m2i, m2i, m2d)
val m2im2sm2im2im2dArray = Array(m2i, m2s, m2i, m2i, m2d)
val m2im2sm2im2im2d = (m2i, m2s, m2i, m2i, m2d)
val m2iExist: M2[Int, _] = new M2[Int, Unit] {}
val m2sExist: M2[String, _] = new M2[String, Unit] {}
val m2dExist: M2[Double, _] = new M2[Double, Unit] {}
val m2eim2esm2eim2eem2edList = List(m2iExist, m2sExist, m2iExist, m2iExist, m2dExist)
val m2eim2esm2eim2eem2edArray = Array(m2iExist, m2sExist, m2iExist, m2iExist, m2dExist)
val m2eim2esm2eim2eem2ed = (m2iExist, m2sExist, m2iExist, m2iExist, m2dExist)
object mkString extends (Any -> String)(_.toString)
object fruit extends (Fruit -> Fruit)(f => f)
trait incInt0 extends Poly1 {
implicit def default[T] = at[T](t => ())
}
object incInt extends incInt0 {
implicit val caseInt = at[Int](i => Tuple1(i+1))
}
trait extendedChoose0 extends Poly1 {
implicit def default[T] = at[T](t => ())
}
object extendedChoose extends extendedChoose0 {
implicit def caseSet[T] = at[Set[T]](s => Tuple1(s.headOption))
}
@Test
def testBasics {
val t = (1, "foo", 2.0)
typed[Int](t.head)
assertEquals(1, t.head)
typed[String](t.tail.head)
assertEquals("foo", t.tail.head)
typed[Double](t.tail.tail.head)
assertEquals(2.0, t.tail.tail.head, Double.MinPositiveValue)
illTyped("""
().head
""")
illTyped("""
().tail
""")
illTyped("""
t.tail.tail.tail.head
""")
}
@Test
def testMap {
val s1 = Tuple1(Set(1))
val o1 = s1 map choose
typed[OI](o1)
assertEquals(Tuple1(Option(1)), o1)
val s2 = (Set(1), Set("foo"))
val o2 = s2 map choose
typed[OIOS](o2)
assertEquals((Option(1), Option("foo")), o2)
val l1 = (1, "foo", 2, 3)
val l2 = l1 map singleton
typed[SISSSISI](l2)
assertEquals((Set(1), Set("foo"), Set(2), Set(3)), l2)
val l3 = l1 map option
typed[OIOSOIOI](l3)
assertEquals((Option(1), Option("foo"), Option(2), Option(3)), l3)
val l4 = (Option(1), Option("foo"), Option(2), Option(3))
val l5 = l4 map get
typed[ISII](l5)
assertEquals((1, "foo", 2, 3), l5)
typed[Int](l5.head)
typed[String](l5.tail.head)
typed[Int](l5.tail.tail.head)
typed[Int](l5.tail.tail.tail.head)
val l6 = l1 map identity
typed[ISII](l6)
assertEquals((1, "foo", 2, 3), l6)
val l7 = l4 map isDefined
typed[BBBB](l7)
assertEquals((true, true, true, true), l7)
val l8 = (23, "foo", true)
val l9 = l8 map mkString
typed[(String, String, String)](l9)
assertEquals(("23", "foo", "true"), l9)
val l10 = apbp map fruit
typed[(Fruit, Fruit, Fruit, Fruit)](l10)
assertEquals(apbp, l10)
val l11 = apbp map mkString
typed[(String, String, String, String)](l11)
assertEquals(("Apple()", "Pear()", "Banana()", "Pear()"), l11)
}
object dup extends Poly1 {
implicit def default[T] = at[T](t => (t, t))
}
@Test
def testFlatMap {
val l1 = (1, "foo", true)
val l2 = l1 flatMap dup
typed[(Int, Int, String, String, Boolean, Boolean)](l2)
assertEquals((1, 1, "foo", "foo", true, true), l2)
val l3 = ((1, "foo"), (), (2.0, true), Tuple1("bar"))
val l4 = l3 flatMap identity
typed[(Int, String, Double, Boolean, String)](l4)
assertEquals((1, "foo", 2.0, true, "bar"), l4)
val l5 = (23, "foo", 7, true, 0)
val l6 = l5 flatMap incInt
typed[(Int, Int, Int)](l6)
assertEquals((24, 8, 1), l6)
val l7 = (Set(23), "foo", Set(true), 23)
val l8 = l7 flatMap extendedChoose
typed[(Option[Int], Option[Boolean])](l8)
assertEquals((Option(23), Option(true)), l8)
}
@Test
def testInitLast {
val lp = apbp.last
typed[Pear](lp)
assertEquals(p, lp)
val iapb = apbp.init
typed[APB](iapb)
assertEquals((a, p, b), iapb)
}
@Test
def testReverse {
val pbpa = apbp.reverse
typed[PBPA](pbpa)
assertEquals((p, b, p, a), pbpa)
val al = Tuple1(a)
val ral = al.reverse
typed[Tuple1[Apple]](ral)
assertEquals(Tuple1(a), ral)
}
@Test
def testPrepend {
val apbp2 = ap ::: bp
typed[APBP](apbp2)
assertEquals((a, p, b, p), apbp2)
typed[Apple](apbp2.head)
typed[Pear](apbp2.tail.head)
typed[Banana](apbp2.tail.tail.head)
typed[Pear](apbp2.tail.tail.tail.head)
val pabp = ap reverse_::: bp
typed[PABP](pabp)
assertEquals((p, a, b, p), pabp)
}
@Test
def testToSizedList {
def equalInferredTypes[A,B](a: A, b: B)(implicit eq: A =:= B) {}
val unit = ()
val sunit = unit.toSized[List]
assertEquals(0, sunit.length)
val expectedUnsized = List.empty[Nothing]
equalInferredTypes(expectedUnsized, sunit.unsized)
assertEquals(expectedUnsized, sunit.unsized)
val sizedApap = apap.toSized[List]
assertEquals(Nat toInt apap.length, sizedApap.length)
equalInferredTypes(apapList, sizedApap.unsized)
assertEquals(apapList, sizedApap.unsized)
val sizedApbp = apbp.toSized[List]
assertEquals(Nat toInt apbp.length, sizedApbp.length)
equalInferredTypes(apbpList, sizedApbp.unsized)
assertEquals(apbpList, sizedApbp.unsized)
val sizedCicscicicd = cicscicicd.toSized[List]
assertEquals(Nat toInt cicscicicd.length, sizedCicscicicd.length)
equalInferredTypes(cicscicicdList, sizedCicscicicd.unsized)
assertEquals(cicscicicdList, sizedCicscicicd.unsized)
val sizedMimsmimimd = mimsmimimd.toSized[List]
assertEquals(Nat toInt mimsmimimd.length, sizedMimsmimimd.length)
equalInferredTypes(mimsmimimdList, sizedMimsmimimd.unsized)
assertEquals(mimsmimimdList, sizedMimsmimimd.unsized)
val sizedMimsmimemd = mimsmimemd.toSized[List]
assertEquals(Nat toInt mimsmimemd.length, sizedMimsmimemd.length)
// equalInferredTypes(mimsmimemdList, sizedMimsmimemd.unsized)
typed[List[M[_]]](sizedMimsmimemd.unsized)
assertEquals(mimsmimemdList, sizedMimsmimemd.unsized)
val sizedM2im2sm2im2im2d = m2im2sm2im2im2d.toSized[List]
assertEquals(Nat toInt m2im2sm2im2im2d.length, sizedM2im2sm2im2im2d.length)
equalInferredTypes(m2im2sm2im2im2dList, sizedM2im2sm2im2im2d.unsized)
assertEquals(m2im2sm2im2im2dList, sizedM2im2sm2im2im2d.unsized)
val sizedM2eim2esm2eim2eem2ed = m2eim2esm2eim2eem2ed.toSized[List]
assertEquals(Nat toInt m2eim2esm2eim2eem2ed.length, sizedM2eim2esm2eim2eem2ed.length)
// equalInferredTypes(m2eim2esm2eim2eem2edList, sizedM2eim2esm2eim2eem2ed.unsized)
typed[List[M2[_ >: Double with Int with String, _]]](sizedM2eim2esm2eim2eem2ed.unsized)
assertEquals(m2eim2esm2eim2eem2edList, sizedM2eim2esm2eim2eem2ed.unsized)
}
@Test
def testToSizedArray {
def assertArrayEquals2[T](arr1 : Array[T], arr2 : Array[T]) =
assertArrayEquals(arr1.asInstanceOf[Array[Object]], arr2.asInstanceOf[Array[Object]])
def equalInferredTypes[A,B](a: A, b: B)(implicit eq: A =:= B) {}
val unit = ()
val snil = unit.toSized[Array]
assertEquals(Nat toInt unit.length, snil.length)
val expectedUnsized = Array.empty[Nothing]
equalInferredTypes(expectedUnsized, snil.unsized)
assertArrayEquals2(expectedUnsized, snil.unsized)
val sizedApap = apap.toSized[Array]
assertEquals(Nat toInt apap.length, sizedApap.length)
equalInferredTypes(apapArray, sizedApap.unsized)
assertArrayEquals2(apapArray, sizedApap.unsized)
val sizedApbp = apbp.toSized[Array]
assertEquals(Nat toInt apbp.length, sizedApbp.length)
equalInferredTypes(apbpArray, sizedApbp.unsized)
assertArrayEquals2(apbpArray, sizedApbp.unsized)
val sizedCicscicicd = cicscicicd.toSized[Array]
assertEquals(Nat toInt cicscicicd.length, sizedCicscicicd.length)
equalInferredTypes(cicscicicdArray, sizedCicscicicd.unsized)
assertArrayEquals2(cicscicicdArray, sizedCicscicicd.unsized)
val sizedMimsmimimd = mimsmimimd.toSized[Array]
assertEquals(Nat toInt mimsmimimd.length, sizedMimsmimimd.length)
equalInferredTypes(mimsmimimdArray, sizedMimsmimimd.unsized)
assertArrayEquals2(mimsmimimdArray, sizedMimsmimimd.unsized)
val sizedMimsmimemd = mimsmimemd.toSized[Array]
assertEquals(Nat toInt mimsmimemd.length, sizedMimsmimemd.length)
// equalInferredTypes(mimsmimemdArray, sizedMimsmimemd.unsized)
// typed[Array[M[_]]](sizedMimsmimemd.unsized)
// The line above compiles when mimsmimemd is an HList, not when it it a tuple...
assertArrayEquals2(mimsmimemdArray.map(x => x: Any), sizedMimsmimemd.unsized.map(x => x: Any))
val sizedM2im2sm2im2im2d = m2im2sm2im2im2d.toSized[Array]
assertEquals(Nat toInt m2im2sm2im2im2d.length, sizedM2im2sm2im2im2d.length)
equalInferredTypes(m2im2sm2im2im2dArray, sizedM2im2sm2im2im2d.unsized)
assertArrayEquals2(m2im2sm2im2im2dArray, sizedM2im2sm2im2im2d.unsized)
val sizedM2eim2esm2eim2eem2ed = m2eim2esm2eim2eem2ed.toSized[Array]
assertEquals(Nat toInt m2eim2esm2eim2eem2ed.length, sizedM2eim2esm2eim2eem2ed.length)
// equalInferredTypes(m2eim2esm2eim2eem2edArray, sizedM2eim2esm2eim2eem2ed.unsized)
// typed[Array[M2[_ >: Double with Int with String, _]]](sizedM2eim2esm2eim2eem2ed.unsized) // Same remark as above
assertArrayEquals2(m2eim2esm2eim2eem2edArray.map(x => x: Any), sizedM2eim2esm2eim2eem2ed.unsized.map(x => x: Any))
}
@Test
def testUnifier {
import ops.tuple._
implicitly[Unifier.Aux[Tuple1[Apple], Tuple1[Apple]]]
implicitly[Unifier.Aux[(Fruit, Pear), (Fruit, Fruit)]]
//implicitly[Unifier.Aux[(Apple, Pear), (Fruit, Fruit)]]
implicitly[Unifier.Aux[(Int, String, Int, Int), YYYY]]
val uapap = implicitly[Unifier.Aux[(Apple, Pear, Apple, Pear), (PWS, PWS, PWS, PWS)]]
val unified1 = uapap(apap)
typed[FFFF](unified1)
val unified2 = apap.unify
typed[FFFF](unified2)
val ununified1 = unified2.cast[APAP]
assertTrue(ununified1.isDefined)
typed[APAP](ununified1.get)
val ununified2 = unified2.cast[APBP]
assertFalse(ununified2.isDefined)
typed[Option[APBP]](ununified2)
def getUnifier[T, Out](t : T)(implicit u: Unifier.Aux[T, Out]) = u
val u2 = getUnifier(Tuple1(a))
typed[Unifier.Aux[Tuple1[Apple], Tuple1[Apple]]](u2)
val u3 = getUnifier((a, a))
typed[Unifier.Aux[(Apple, Apple), (Apple, Apple)]](u3)
val u4 = getUnifier((a, a, a))
typed[Unifier.Aux[(Apple, Apple, Apple), (Apple, Apple, Apple)]](u4)
val u5 = getUnifier((a, a, a, a))
typed[Unifier.Aux[(Apple, Apple, Apple, Apple), (Apple, Apple, Apple, Apple)]](u5)
//val u6 = getUnifier((a, p))
//typed[Unifier.Aux[(Apple, Pear), (Fruit, Fruit)]](u6)
val u7 = getUnifier((a, f))
typed[Unifier.Aux[(Apple, Fruit), (Fruit, Fruit)]](u7)
val u8 = getUnifier((f, a))
typed[Unifier.Aux[(Fruit, Apple), (Fruit, Fruit)]](u8)
val u9a = getUnifier((a, f))
typed[Unifier.Aux[(Apple, Fruit), FF]](u9a)
val u9b = getUnifier((a, p))
typed[Unifier.Aux[(Apple, Pear), (PWS, PWS)]](u9b)
val u10 = getUnifier(apap)
typed[Unifier.Aux[APAP, (PWS, PWS, PWS, PWS)]](u10)
val u11 = getUnifier(apbp)
typed[Unifier.Aux[APBP, (PWS, PWS, PWS, PWS)]](u11)
val invar1 = (Set(23), Set("foo"))
val uinvar1 = invar1.unify
typed[(Set[_ >: Int with String], Set[_ >: Int with String])](uinvar1)
// Unifying three or more elements which have an invariant outer type constructor and differing type
// arguments fails, presumably due to a failure to compute a sensible LUB.
//val invar2 = (Set(23), Set("foo"), Set(true))
//val uinvar2 = invar2.unify
}
@Test
def testSubtypeUnifier {
val fruits : (Apple, Pear, Fruit) = (a, p, f)
typed[(Fruit, Fruit, Fruit)](fruits.unifySubtypes[Fruit])
typed[(Apple, Pear, Fruit)](fruits.unifySubtypes[Apple])
assertEquals((a, p, f), fruits.unifySubtypes[Fruit].filter[Fruit])
val stuff : (Apple, String, Pear) = (a, "foo", p)
typed[(Fruit, String, Fruit)](stuff.unifySubtypes[Fruit])
assertEquals((), stuff.filter[Fruit])
assertEquals((a, p), stuff.unifySubtypes[Fruit].filter[Fruit])
}
@Test
def testToTraversableList {
val empty = ().to[List]
assertTypedEquals[List[Nothing]](Nil, empty)
val fruits1 = apap.to[List]
typed[List[Fruit]](fruits1)
assertEquals(List(a, p, a, p), fruits1)
val fruits2 = apbp.to[List]
typed[List[Fruit]](fruits2)
assertEquals(List(a, p, b, p), fruits2)
val l1 = (1, "foo", 2, 3)
val stuff = l1.to[List]
typed[List[Any]](stuff)
assertEquals(List(1, "foo", 2, 3), stuff)
val l4 = (Option(1), Option("foo"), Option(2), Option(3))
val l7 = l4 map isDefined
typed[BBBB](l7)
assertEquals((true, true, true, true), l7)
val ll2 = l7.to[List]
typed[Boolean](ll2.head)
val moreStuff = (a, "foo", p).to[List]
typed[List[Any]](moreStuff)
def equalInferredTypes[A,B](a: A, b: B)(implicit eq: A =:= B) {}
val ctv = cicscicicd.to[List]
equalInferredTypes(cicscicicdList, ctv)
typed[List[Ctv[Int with String with Double]]](ctv)
assertEquals(cicscicicdList, ctv)
val m = mimsmimimd.to[List]
equalInferredTypes(mimsmimimdList, m)
typed[List[M[_ >: Int with String with Double]]](m)
assertEquals(mimsmimimdList, m)
val mWithEx = mimsmimemd.to[List]
// equalType(mimsmimemdList, mWithEx)
typed[List[M[_]]](mWithEx)
assertEquals(mimsmimemdList, mWithEx)
val m2 = m2im2sm2im2im2d.to[List]
equalInferredTypes(m2im2sm2im2im2dList, m2)
typed[List[M2[_ >: Int with String with Double, Unit]]](m2)
assertEquals(m2im2sm2im2im2dList, m2)
val m2e = m2eim2esm2eim2eem2ed.to[List]
// equalType(m2eim2esm2eim2eem2edList, m2e)
typed[List[M2[_ >: Int with String with Double, _]]](m2e)
assertEquals(m2eim2esm2eim2eem2edList, m2e)
}
@Test
def testToList {
import ops.tuple.ToList
ToList[Unit, Nothing]
ToList[Unit, Int]
ToList[APAP, Fruit]
val empty = ().toList
assertTypedEquals[List[Nothing]](Nil, empty)
val fruits1 = apap.toList
typed[List[Fruit]](fruits1)
assertEquals(List(a, p, a, p), fruits1)
val fruits2 = apbp.toList
typed[List[Fruit]](fruits2)
assertEquals(List(a, p, b, p), fruits2)
val l1 = (1, "foo", 2, 3)
val stuff = l1.toList
typed[List[Any]](stuff)
assertEquals(List(1, "foo", 2, 3), stuff)
val l4 = (Option(1), Option("foo"), Option(2), Option(3))
val l7 = l4 map isDefined
typed[BBBB](l7)
assertEquals((true, true, true, true), l7)
val ll2 = l7.toList
typed[Boolean](ll2.head)
val moreStuff = (a, "foo", p).toList
typed[List[Any]](moreStuff)
def equalInferredTypes[A,B](a: A, b: B)(implicit eq: A =:= B) {}
val ctv = cicscicicd.toList
equalInferredTypes(cicscicicdList, ctv)
typed[List[Ctv[Int with String with Double]]](ctv)
assertEquals(cicscicicdList, ctv)
val m = mimsmimimd.toList
equalInferredTypes(mimsmimimdList, m)
typed[List[M[_ >: Int with String with Double]]](m)
assertEquals(mimsmimimdList, m)
val mWithEx = mimsmimemd.toList
// equalType(mimsmimemdList, mWithEx)
typed[List[M[_]]](mWithEx)
assertEquals(mimsmimemdList, mWithEx)
val m2 = m2im2sm2im2im2d.toList
equalInferredTypes(m2im2sm2im2im2dList, m2)
typed[List[M2[_ >: Int with String with Double, Unit]]](m2)
assertEquals(m2im2sm2im2im2dList, m2)
val m2e = m2eim2esm2eim2eem2ed.toList
// equalType(m2eim2esm2eim2eem2edList, m2e)
typed[List[M2[_ >: Int with String with Double, _]]](m2e)
assertEquals(m2eim2esm2eim2eem2edList, m2e)
}
@Test
def testToTraversableArray {
def assertArrayEquals2[T](arr1 : Array[T], arr2 : Array[T]) =
assertArrayEquals(arr1.asInstanceOf[Array[Object]], arr2.asInstanceOf[Array[Object]])
val empty = ().to[Array]
typed[Array[Nothing]](empty)
assertArrayEquals2(Array.empty, empty)
val fruits1 = apap.to[Array].map(x => x : Fruit)
typed[Array[Fruit]](fruits1)
assertArrayEquals2(Array[Fruit](a, p, a, p), fruits1)
val fruits2 = apbp.to[Array].map(x => x : Fruit)
typed[Array[Fruit]](fruits2)
assertArrayEquals2(Array[Fruit](a, p, b, p), fruits2)
val l1 = (1, "foo", 2, 3)
val stuff = l1.to[Array]
typed[Array[Any]](stuff)
assertArrayEquals2(Array(1, "foo", 2, 3), stuff)
val l4 = (Option(1), Option("foo"), Option(2), Option(3))
val l7 = l4 map isDefined
typed[BBBB](l7)
assertEquals((true, true, true, true), l7)
val ll2 = l7.to[Array]
typed[Boolean](ll2(0))
val moreStuff = (a, "foo", p).to[Array].map(x => x : AnyRef)
typed[Array[AnyRef]](moreStuff)
assertArrayEquals2(Array[AnyRef](a, "foo", p), moreStuff)
def equalInferredTypes[A,B](a: A, b: B)(implicit eq: A =:= B) {}
val ctv = cicscicicd.to[Array]
equalInferredTypes(cicscicicdArray, ctv)
typed[Array[Ctv[Int with String with Double]]](ctv)
assertArrayEquals2(cicscicicdArray, ctv)
val m = mimsmimimd.to[Array]
equalInferredTypes(mimsmimimdArray, m)
typed[Array[M[_ >: Int with String with Double]]](m)
assertArrayEquals2(mimsmimimdArray, m)
val mWithEx = mimsmimemd.to[Array]
// equalInferredTypes(mimsmimemdArray, mWithEx)
// typed[Array[M[_]]](mWithEx)
// The line above compiles when mimsmimemd is an HList, not when it it a tuple, as in testToSizedArray
assertArrayEquals2(mimsmimemdArray.map(x => x : Any), mWithEx.map(x => x : Any))
val m2 = m2im2sm2im2im2d.to[Array]
equalInferredTypes(m2im2sm2im2im2dArray, m2)
typed[Array[M2[_ >: Int with String with Double, Unit]]](m2)
assertArrayEquals2(m2im2sm2im2im2dArray, m2)
val m2e = m2eim2esm2eim2eem2ed.to[Array]
// equalInferredTypes(m2eim2esm2eim2eem2edArray, m2e)
// typed[Array[M2[_ >: Int with String with Double, _]]](m2e)
// Same remark as above
assertArrayEquals2(m2eim2esm2eim2eem2edArray.map(x => x : Any), m2e.map(x => x : Any))
}
@Test
def testToArray {
import ops.tuple.ToArray
ToArray[Unit, Nothing]
ToArray[Unit, Int]
ToArray[APAP, Fruit]
def assertArrayEquals2[T](arr1 : Array[T], arr2 : Array[T]) =
assertArrayEquals(arr1.asInstanceOf[Array[Object]], arr2.asInstanceOf[Array[Object]])
val empty = ().toArray
typed[Array[Nothing]](empty)
assertArrayEquals2(Array.empty, empty)
val fruits1 = apap.toArray[Fruit]
typed[Array[Fruit]](fruits1)
assertArrayEquals2(Array[Fruit](a, p, a, p), fruits1)
val fruits2 = apbp.toArray[Fruit]
typed[Array[Fruit]](fruits2)
assertArrayEquals2(Array[Fruit](a, p, b, p), fruits2)
val l1 = (1, "foo", 2, 3)
val stuff = l1.toArray
typed[Array[Any]](stuff)
assertArrayEquals2(Array(1, "foo", 2, 3), stuff)
val l4 = (Option(1), Option("foo"), Option(2), Option(3))
val l7 = l4 map isDefined
typed[BBBB](l7)
assertEquals((true, true, true, true), l7)
val ll2 = l7.toArray
typed[Boolean](ll2(0))
val moreStuff = (a, "foo", p).toArray[AnyRef]
typed[Array[AnyRef]](moreStuff)
assertArrayEquals2(Array[AnyRef](a, "foo", p), moreStuff)
def equalInferredTypes[A,B](a: A, b: B)(implicit eq: A =:= B) {}
val ctv = cicscicicd.toArray
equalInferredTypes(cicscicicdArray, ctv)
typed[Array[Ctv[Int with String with Double]]](ctv)
assertArrayEquals2(cicscicicdArray, ctv)
val m = mimsmimimd.toArray
equalInferredTypes(mimsmimimdArray, m)
typed[Array[M[_ >: Int with String with Double]]](m)
assertArrayEquals2(mimsmimimdArray, m)
val mWithEx = mimsmimemd.toArray[M[_]]
// equalInferredTypes(mimsmimemdArray, mWithEx)
typed[Array[M[_]]](mWithEx)
assertArrayEquals2(mimsmimemdArray, mWithEx)
val m2 = m2im2sm2im2im2d.toArray
equalInferredTypes(m2im2sm2im2im2dArray, m2)
typed[Array[M2[_ >: Int with String with Double, Unit]]](m2)
assertArrayEquals2(m2im2sm2im2im2dArray, m2)
val m2e = m2eim2esm2eim2eem2ed.toArray
// equalInferredTypes(m2eim2esm2eim2eem2edArray, m2e)
// typed[Array[M2[_ >: Int with String with Double, _]]](m2e)
// The line above compiles when mimsmimemd is an HList, not when it is a tuple...
assertArrayEquals2(m2eim2esm2eim2eem2edArray.map(x => x : Any), m2e.map(x => x : Any))
}
@Test
def testFoldMap {
val tl1 = (Option(1), Option("foo"), Option(2), Option(3))
val tl2 = (Option(1), Option("foo"), (None : Option[Int]), Option(3))
val mlfl1 = (tl1 map isDefined).toList.foldLeft(true)(_ && _)
assertTrue(mlfl1)
val mlfl2 = (tl2 map isDefined).toList.foldLeft(true)(_ && _)
assertFalse(mlfl2)
val fl1 = tl1.foldMap(true)(isDefined)(_ && _)
assertTrue(fl1)
val fl2 = tl2.foldMap(true)(isDefined)(_ && _)
assertFalse(fl2)
}
@Test
def testAt {
val sn1 = (23, 3.0, "foo", (), "bar", true, 5L)
val at0 = sn1(_0)
typed[Int](at0)
assertEquals(23, at0)
val at1 = sn1(_1)
typed[Double](at1)
assertEquals(3.0, at1, Double.MinPositiveValue)
val at2 = sn1(_2)
typed[String](at2)
assertEquals("foo", at2)
val at3 = sn1(_3)
typed[Unit](at3)
assertEquals((), at3)
val at4 = sn1(_4)
typed[String](at4)
assertEquals("bar", at4)
val at5 = sn1(_5)
typed[Boolean](at5)
assertEquals(true, at5)
val at6 = sn1(_6)
typed[Long](at6)
assertEquals(5L, at6)
val sn2 = (
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21
)
val at21 = sn2(_21)
typed[Int](at21)
assertEquals(21, at21)
}
@Test
def testAtLiteral {
val sn1 = (23, 3.0, "foo", (), "bar", true, 5L)
val at0 = sn1(0)
typed[Int](at0)
assertEquals(23, at0)
val at1 = sn1(1)
typed[Double](at1)
assertEquals(3.0, at1, Double.MinPositiveValue)
val at2 = sn1(2)
typed[String](at2)
assertEquals("foo", at2)
val at3 = sn1(3)
typed[Unit](at3)
assertEquals((), at3)
val at4 = sn1(4)
typed[String](at4)
assertEquals("bar", at4)
val at5 = sn1(5)
typed[Boolean](at5)
assertEquals(true, at5)
val at6 = sn1(6)
typed[Long](at6)
assertEquals(5L, at6)
val sn2 = (
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21
)
val at21 = sn2(21)
typed[Int](at21)
assertEquals(21, at21)
}
@Test
def testTakeDrop {
val sn1 = (23, 3.0, "foo", (), "bar", true, 5L)
val t0 = sn1.take(_0)
typed[Unit](t0)
assertEquals((), t0)
val d0 = sn1.drop(_0)
typed[(Int, Double, String, Unit, String, Boolean, Long)](d0)
assertEquals((23, 3.0, "foo", (), "bar", true, 5L), d0)
val t2 = sn1.take(_2)
typed[(Int, Double)](t2)
assertEquals((23, 3.0), t2)
val d2 = sn1.drop(_2)
typed[(String, Unit, String, Boolean, Long)](d2)
assertEquals(("foo", (), "bar", true, 5L), d2)
val t7 = sn1.take(_7)
typed[(Int, Double, String, Unit, String, Boolean, Long)](t7)
assertEquals((23, 3.0, "foo", (), "bar", true, 5L), t7)
val d7 = sn1.drop(_7)
typed[Unit](d7)
assertEquals((), d7)
}
@Test
def testTakeDropLiteral {
val sn1 = (23, 3.0, "foo", (), "bar", true, 5L)
val t0 = sn1.take(0)
typed[Unit](t0)
assertEquals((), t0)
val d0 = sn1.drop(0)
typed[(Int, Double, String, Unit, String, Boolean, Long)](d0)
assertEquals((23, 3.0, "foo", (), "bar", true, 5L), d0)
val t2 = sn1.take(2)
typed[(Int, Double)](t2)
assertEquals((23, 3.0), t2)
val d2 = sn1.drop(2)
typed[(String, Unit, String, Boolean, Long)](d2)
assertEquals(("foo", (), "bar", true, 5L), d2)
val t7 = sn1.take(7)
typed[(Int, Double, String, Unit, String, Boolean, Long)](t7)
assertEquals((23, 3.0, "foo", (), "bar", true, 5L), t7)
val d7 = sn1.drop(7)
typed[Unit](d7)
assertEquals((), d7)
}
@Test
def testSplit {
val sn1 = (23, 3.0, "foo", (), "bar", true, 5L)
val sni0 = sn1.split(_0)
typed[(Unit, (Int, Double, String, Unit, String, Boolean, Long))](sni0)
val sni1 = sn1.split(_1)
typed[(Tuple1[Int], (Double, String, Unit, String, Boolean, Long))](sni1)
val sni2 = sn1.split(_2)
typed[((Int, Double), (String, Unit, String, Boolean, Long))](sni2)
val sni3 = sn1.split(_3)
typed[((Int, Double, String), (Unit, String, Boolean, Long))](sni3)
val sni4 = sn1.split(_4)
typed[((Int, Double, String, Unit), (String, Boolean, Long))](sni4)
val sni5 = sn1.split(_5)
typed[((Int, Double, String, Unit, String), (Boolean, Long))](sni5)
val sni6 = sn1.split(_6)
typed[((Int, Double, String, Unit, String, Boolean), Tuple1[Long])](sni6)
val sni7 = sn1.split(_7)
typed[((Int, Double, String, Unit, String, Boolean, Long), Unit)](sni7)
val snri0 = sn1.reverse_split(_0)
typed[(Unit, (Int, Double, String, Unit, String, Boolean, Long))](snri0)
val snri1 = sn1.reverse_split(_1)
typed[(Tuple1[Int], (Double, String, Unit, String, Boolean, Long))](snri1)
val snri2 = sn1.reverse_split(_2)
typed[((Double, Int), (String, Unit, String, Boolean, Long))](snri2)
val snri3 = sn1.reverse_split(_3)
typed[((String, Double, Int), (Unit, String, Boolean, Long))](snri3)
val snri4 = sn1.reverse_split(_4)
typed[((Unit, String, Double, Int), (String, Boolean, Long))](snri4)
val snri5 = sn1.reverse_split(_5)
typed[((String, Unit, String, Double, Int), (Boolean, Long))](snri5)
val snri6 = sn1.reverse_split(_6)
typed[((Boolean, String, Unit, String, Double, Int), Tuple1[Long])](snri6)
val snri7 = sn1.reverse_split(_7)
typed[((Long, Boolean, String, Unit, String, Double, Int), Unit)](snri7)
}
@Test
def testSplitLiteral {
val sn1 = (23, 3.0, "foo", (), "bar", true, 5L)
val sni0 = sn1.split(0)
typed[(Unit, (Int, Double, String, Unit, String, Boolean, Long))](sni0)
val sni1 = sn1.split(1)
typed[(Tuple1[Int], (Double, String, Unit, String, Boolean, Long))](sni1)
val sni2 = sn1.split(2)
typed[((Int, Double), (String, Unit, String, Boolean, Long))](sni2)
val sni3 = sn1.split(3)
typed[((Int, Double, String), (Unit, String, Boolean, Long))](sni3)
val sni4 = sn1.split(4)
typed[((Int, Double, String, Unit), (String, Boolean, Long))](sni4)
val sni5 = sn1.split(5)
typed[((Int, Double, String, Unit, String), (Boolean, Long))](sni5)
val sni6 = sn1.split(6)
typed[((Int, Double, String, Unit, String, Boolean), Tuple1[Long])](sni6)
val sni7 = sn1.split(7)
typed[((Int, Double, String, Unit, String, Boolean, Long), Unit)](sni7)
val snri0 = sn1.reverse_split(0)
typed[(Unit, (Int, Double, String, Unit, String, Boolean, Long))](snri0)
val snri1 = sn1.reverse_split(1)
typed[(Tuple1[Int], (Double, String, Unit, String, Boolean, Long))](snri1)
val snri2 = sn1.reverse_split(2)
typed[((Double, Int), (String, Unit, String, Boolean, Long))](snri2)
val snri3 = sn1.reverse_split(3)
typed[((String, Double, Int), (Unit, String, Boolean, Long))](snri3)
val snri4 = sn1.reverse_split(4)
typed[((Unit, String, Double, Int), (String, Boolean, Long))](snri4)
val snri5 = sn1.reverse_split(5)
typed[((String, Unit, String, Double, Int), (Boolean, Long))](snri5)
val snri6 = sn1.reverse_split(6)
typed[((Boolean, String, Unit, String, Double, Int), Tuple1[Long])](snri6)
val snri7 = sn1.reverse_split(7)
typed[((Long, Boolean, String, Unit, String, Double, Int), Unit)](snri7)
}
@Test
def testSelect {
val sl = (1, true, "foo", 2.0)
val si = sl.select[Int]
typed[Int](si)
assertEquals(1, si)
val sb = sl.select[Boolean]
typed[Boolean](sb)
assertEquals(true, sb)
val ss = sl.select[String]
typed[String](ss)
assertEquals("foo", ss)
val sd = sl.select[Double]
typed[Double](sd)
assertEquals(2.0, sd, Double.MinPositiveValue)
}
@Test
def testFilter {
val l1 = (1, 2)
val f1 = l1.filter[Int]
typed[(Int, Int)](f1)
assertEquals((1, 2), f1)
val l2 = (1, true, "foo", 2)
val f2 = l2.filter[Int]
typed[(Int, Int)](f2)
assertEquals((1, 2), f2)
typed[Unit](l2.filter[Double])
}
@Test
def testFilterNot {
val l1 = (1, 2)
val f1 = l1.filterNot[String]
typed[(Int, Int)](f1)
assertEquals((1, 2), f1)
val l2 = (1, true, "foo", 2)
val f2 = l2.filterNot[String]
typed[(Int, Boolean, Int)](f2)
assertEquals((1, true, 2), f2)
typed[Unit](l1.filterNot[Int])
}
@Test
def testReplace {
val sl = (1, true, "foo", 2.0)
val (i, r1) = sl.replace(23)
typed[Int](i)
assertEquals(1, i)
assertEquals((23, true, "foo", 2.0), r1)
val (b, r2) = sl.replace(false)
typed[Boolean](b)
assertEquals(true, b)
assertEquals((1, false, "foo", 2.0), r2)
val (s, r3) = sl.replace("bar")
typed[String](s)
assertEquals("foo", s)
assertEquals((1, true, "bar", 2.0), r3)
val (d, r4) = sl.replace(3.0)
typed[Double](d)
assertEquals(2.0, d, Double.MinPositiveValue)
assertEquals((1, true, "foo", 3.0), r4)
val (i2, r5) = sl.replaceType[Int]('*')
typed[Int](i2)
typed[Char](r5(_0))
assertEquals(1, i2)
assertEquals(('*', true, "foo", 2.0), r5)
val (b2, r6) = sl.replaceType[Boolean]('*')
typed[Boolean](b2)
typed[Char](r6(_1))
assertEquals(true, b2)
assertEquals((1, '*', "foo", 2.0), r6)
val (s2, r7) = sl.replaceType[String]('*')
typed[String](s2)
typed[Char](r7(_2))
assertEquals("foo", s2)
assertEquals((1, true, '*', 2.0), r7)
val (d2, r8) = sl.replaceType[Double]('*')
typed[Double](d2)
typed[Char](r8(_3))
assertEquals(2.0, d2, Double.MinPositiveValue)
assertEquals((1, true, "foo", '*'), r8)
val fruits = (a, p, a, f)
val (x1, rr1) = fruits.replaceType[Pear](a)
typed[Pear](x1)
typed[(Apple, Apple, Apple, Fruit)](rr1)
val (x2, rr2) = fruits.replaceType[Pear](f)
typed[Pear](x2)
typed[(Apple, Fruit, Apple, Fruit)](rr2)
val (x3, rr3) = fruits.replaceType[Fruit](p)
typed[Fruit](x3)
typed[(Apple, Pear, Apple, Pear)](rr3)
val (x4, rr4) = fruits.replace(p)
typed[Pear](x4)
typed[(Apple, Pear, Apple, Fruit)](rr4)
val (x5, rr5) = fruits.replace(f)
typed[Fruit](x5)
typed[(Apple, Pear, Apple, Fruit)](rr5)
}
@Test
def testUpdate {
val sl = (1, true, "foo", 2.0)
val r1 = sl.updatedElem(23)
assertEquals((23, true, "foo", 2.0), r1)
val r2 = sl.updatedElem(false)
assertEquals((1, false, "foo", 2.0), r2)
val r3 = sl.updatedElem("bar")
assertEquals((1, true, "bar", 2.0), r3)
val r4 = sl.updatedElem(3.0)
assertEquals((1, true, "foo", 3.0), r4)
val r5 = sl.updatedType[Int]('*')
assertEquals(('*', true, "foo", 2.0), r5)
val r6 = sl.updatedType[Boolean]('*')
assertEquals((1, '*', "foo", 2.0), r6)
val r7 = sl.updatedType[String]('*')
assertEquals((1, true, '*', 2.0), r7)
val r8 = sl.updatedType[Double]('*')
assertEquals((1, true, "foo", '*'), r8)
val r9 = sl.updateWith[Int](i => (i + 1).toString)
assertEquals(("2", true, "foo", 2.0), r9)
val r10 = sl.updateWith[Boolean](b => if(b) 3.0 else 2.0)
assertEquals((1, 3.0, "foo", 2.0), r10)
val r11 = sl.updateWith[String](s => s.length)
assertEquals((1, true, 3, 2.0), r11)
val r12 = sl.updateWith[Double](d => '*')
assertEquals((1, true, "foo", '*'), r12)
val fruits = (a, p, a, f)
val rr1 = fruits.updatedType[Pear](a)
typed[(Apple, Apple, Apple, Fruit)](rr1)
val rr2 = fruits.updatedType[Pear](f)
typed[(Apple, Fruit, Apple, Fruit)](rr2)
val rr3 = fruits.updatedType[Fruit](p)
typed[(Apple, Pear, Apple, Pear)](rr3)
val rr4 = fruits.updatedElem(p)
typed[(Apple, Pear, Apple, Fruit)](rr4)
val rr5 = fruits.updatedElem(f)
typed[(Apple, Pear, Apple, Fruit)](rr5)
val rr6 = fruits.updateWith[Pear](p => a)
typed[(Apple, Apple, Apple, Fruit)](rr6)
val rr7 = fruits.updateWith[Fruit](f => p)
typed[(Apple, Pear, Apple, Pear)](rr7)
val rr8 = fruits.updateWith[Pear](p => f)
typed[(Apple, Fruit, Apple, Fruit)](rr8)
}
@Test
def testSplitLeft {
val sl = (1, true, "foo", 2.0)
val sl2 = (23, 3.0, "foo", (), "bar", true, 5L)
val (sp1, sp2) = sl.splitLeft[String]
typed[(Int, Boolean)](sp1)
typed[(String, Double)](sp2)
assertEquals((sp1 ::: sp2), sl)
val (sli1, sli2) = sl2.splitLeft[String]
typed[(Int, Double)](sli1)
typed[(String, Unit, String, Boolean, Long)](sli2)
assertEquals((sli1 ::: sli2), sl2)
val (rsp1, rsp2) = sl.reverse_splitLeft[String]
typed[(Boolean, Int)](rsp1)
typed[(String, Double)](rsp2)
assertEquals((rsp1 reverse_::: rsp2), sl)
val (rsli1, rsli2) = sl2.reverse_splitLeft[String]
typed[(Double, Int)](rsli1)
typed[(String, Unit, String, Boolean, Long)](rsli2)
assertEquals((rsli1 reverse_::: rsli2), sl2)
}
@Test
def testSplitRight {
val sl = (1, true, "foo", 2.0)
val sl2 = (23, 3.0, "foo", (), "bar", true, 5L)
val (srp1, srp2) = sl.splitRight[String]
typed[(Int, Boolean, String)](srp1)
typed[Tuple1[Double]](srp2)
assertEquals((srp1 ::: srp2), sl)
val (srli1, srli2) = sl2.splitRight[String]
typed[(Int, Double, String, Unit, String)](srli1)
typed[(Boolean, Long)](srli2)
assertEquals(sl2, srli1 ::: srli2)
val (rsrp1, rsrp2) = sl.reverse_splitRight[String]
typed[(String, Boolean, Int)](rsrp1)
typed[Tuple1[Double]](rsrp2)
assertEquals((rsrp1 reverse_::: rsrp2), sl)
val (rsrli1, rsrli2) = sl2.reverse_splitRight[String]
typed[(String, Unit, String, Double, Int)](rsrli1)
typed[(Boolean, Long)](rsrli2)
assertEquals((rsrli1 reverse_::: rsrli2), sl2)
}
@Test
def testTranspose {
val l1 = Tuple1(1)
val l2 = Tuple1(Tuple1("a"))
val z1 = l1.zipOne(l2)
typed[Tuple1[(Int, String)]](z1)
assertEquals(Tuple1((1, "a")), z1)
val mc1 = l1.mapConst(())
typed[Tuple1[Unit]](mc1)
assertEquals(Tuple1(()), mc1)
val t1 = Tuple1(l1).transpose
typed[Tuple1[Tuple1[Int]]](t1)
assertEquals(Tuple1(Tuple1(1)), t1)
val l3 = (1, 2, 3)
val l4 = (("a", 1.0), ("b", 2.0), ("c", 3.0))
val z2 = l3.zipOne(l4)
typed[((Int, String, Double), (Int, String, Double), (Int, String, Double))](z2)
assertEquals(((1, "a", 1.0), (2, "b", 2.0), (3, "c", 3.0)), z2)
val mc2 = l3.mapConst(())
typed[(Unit, Unit, Unit)](mc2)
assertEquals(((), (), ()), mc2)
val t2 = l4.transpose
typed[((String, String, String), (Double, Double, Double))](t2)
assertEquals((("a", "b", "c"), (1.0, 2.0, 3.0)), t2)
val t3 = z2.transpose
typed[((Int, Int, Int), (String, String, String), (Double, Double, Double))](t3)
assertEquals(((1, 2, 3), ("a", "b", "c"), (1.0, 2.0, 3.0)), t3)
val t4 = t3.transpose
typed[((Int, String, Double), (Int, String, Double), (Int, String, Double))](t4)
assertEquals(z2, t4)
}
@Test
def testZipUnzip {
val l1 = (1, "a", 1.0)
val l2 = (2, "b", 2.0)
val z1 = (l1, l2).transpose
typed[((Int, Int), (String, String), (Double, Double))](z1)
assertEquals(((1, 2), ("a", "b"), (1.0, 2.0)), z1)
val z2 = l1 zip l2
typed[((Int, Int), (String, String), (Double, Double))](z2)
assertEquals(((1, 2), ("a", "b"), (1.0, 2.0)), z2)
val z3 = (l1, l2).zip
typed[((Int, Int), (String, String), (Double, Double))](z3)
assertEquals(((1, 2), ("a", "b"), (1.0, 2.0)), z3)
val u1 = z1.transpose
typed[((Int, String, Double), (Int, String, Double))](u1)
assertEquals(((1, "a", 1.0), (2, "b", 2.0)), u1)
val u2 = z1.unzip
typed[((Int, String, Double), (Int, String, Double))](u2)
assertEquals(((1, "a", 1.0), (2, "b", 2.0)), u2)
val intInc : Int => Int = _+1
val stringInc : String => String = _+"*"
val doubleInc : Double => Int = _.toInt+1
val l3 = (intInc, stringInc, doubleInc)
val z5 = l3 zipApply l1
typed[(Int, String, Int)](z5)
assertEquals((2, "a*", 2), z5)
}
@Test
def testRemove {
val l = (1, true, "foo")
val li = l.removeElem[Int]
typed[(Int, (Boolean, String))](li)
assertEquals((1, (true, "foo")), li)
val lb = l.removeElem[Boolean]
typed[(Boolean, (Int, String))](lb)
assertEquals((true, (1, "foo")), lb)
val ls = l.removeElem[String]
typed[(String, (Int, Boolean))](ls)
assertEquals(("foo", (1, true)), ls)
}
@Test
def testRemoveAll {
val l = (1, true, "foo")
val lnil = l.removeAll[Unit]
typed[(Unit, (Int, Boolean, String))](lnil)
assertEquals(((), (1, true, "foo")), lnil)
val li = l.removeAll[Tuple1[Int]]
typed[(Tuple1[Int], (Boolean, String))](li)
assertEquals((Tuple1(1), (true, "foo")), li)
val lb = l.removeAll[Tuple1[Boolean]]
typed[(Tuple1[Boolean], (Int, String))](lb)
assertEquals((Tuple1(true), (1, "foo")), lb)
val lbi = l.removeAll[(Boolean, Int)]
typed[((Boolean, Int), Tuple1[String])](lbi)
assertEquals(((true, 1), Tuple1("foo")), lbi)
}
object combine extends Poly {
implicit def caseCharString = use((c : Char, s : String) => s.indexOf(c))
implicit def caseIntBoolean = use((i : Int, b : Boolean) => if ((i >= 0) == b) "pass" else "fail")
}
@Test
def testFoldLeft {
val c1a = combine('o', "foo")
val c1b = combine(c1a, true)
assertEquals("pass", c1b)
val l1 = ("foo", true)
val f1 = l1.foldLeft('o')(combine)
typed[String](f1)
assertEquals("pass", f1)
val c2a = combine('o', "bar")
val c2b = combine(c2a, false)
assertEquals("pass", c2b)
val l2 = ("bar", false)
val f2 = l2.foldLeft('o')(combine)
typed[String](f2)
assertEquals("pass", f2)
}
@Test
def testUpdatedAt {
type IBS = (Int, Boolean, String)
val l = (1, true, "foo")
val li = l.updatedAt[_0](2)
typed[IBS](li)
assertEquals((2, true, "foo"), li)
val lb = l.updatedAt[_1](false)
typed[IBS](lb)
assertEquals((1, false, "foo"), lb)
val ls = l.updatedAt[_2]("bar")
typed[IBS](ls)
assertEquals((1, true, "bar"), ls)
}
@Test
def testUpdatedAtLiteral {
type IBS = (Int, Boolean, String)
val l = (1, true, "foo")
val li = l.updatedAt(0, 2)
typed[IBS](li)
assertEquals((2, true, "foo"), li)
val lb = l.updatedAt(1, false)
typed[IBS](lb)
assertEquals((1, false, "foo"), lb)
val ls = l.updatedAt(2, "bar")
typed[IBS](ls)
assertEquals((1, true, "bar"), ls)
}
@Test
def testZipConst {
val l1 = (1, true, "a")
val c1 = 5
val zl1 = l1 zipConst c1
typed[((Int, Int), (Boolean, Int), (String, Int))](zl1)
assertEquals(((1, c1), (true, c1), ("a", c1)), zl1)
val l2 = (Option("a"), 2, Set(true))
val c2 = ("b", 5)
type C2 = (String, Int)
val zl2 = l2 zipConst c2
typed[(
(Option[String], C2),
(Int, C2),
(Set[Boolean], C2))](zl2)
val expected = (
(Option("a"), c2),
(2, c2),
(Set(true), c2))
assertEquals(expected, zl2)
}
@Test
def testPropagation {
def useHead[P <: Product](p: P)(implicit ic: ops.tuple.IsComposite[P]) = p.head
val h = useHead((23, "foo", true))
typed[Int](h)
}
@Test
def testCollect {
import poly._
object empty extends Poly1
object complex extends Poly1 {
implicit val caseInt = at[Int](_.toDouble)
implicit val caseString = at[String](_ => 1)
}
{ // () collect p
val in: Unit = ()
val emptyResult = in.collect(empty)
typed[Unit](emptyResult)
assertEquals((), emptyResult)
val identityResult = in.collect(poly.identity)
typed[Unit](identityResult)
assertEquals((), identityResult)
val complexResult = in.collect(complex)
typed[Unit](complexResult)
assertEquals((), complexResult)
}
{ // non-() collect empty
val in: (Int, String, Double) = (1, "foo", 2.2)
val result = in.collect(empty)
typed[Unit](result)
assertEquals((), result)
}
{ // t collect identity
val in: (Int, String, Double) = (1, "foo", 2.2)
val result = in.collect(identity)
typed[(Int, String, Double)](result)
assertEquals(in, result)
}
{ // t collect complex
val in: (Int, String, Double) = (1, "foo", 2.2)
val result = in.collect(complex)
typed[(Double, Int)](result)
assertEquals((1.0, 1), result)
}
}
@Test
def testPermutations {
assertEquals(((1, "foo"), ("foo", 1)), (1, "foo").permutations)
assertEquals((
(1, "foo", 2.0), ("foo", 1, 2.0), ("foo", 2.0, 1),
(1, 2.0, "foo"), (2.0, 1, "foo"), (2.0, "foo", 1)
), (1, "foo", 2.0).permutations)
}
@Test
def testMkString {
assertEquals("<1;foo;2.0>", (1, "foo", 2.0).mkString("<", ";", ">"))
}
@Test
def testRotateLeft {
val in2 = (1, "foo")
val in3 = (1, "foo", 2.0)
val in4 = (1, "foo", 2.0, 'a')
type S = String; type I = Int; type D = Double; type C = Char
// rotateLeft(0)
val r1 = in2.rotateLeft(0)
assertTypedEquals[(I, S)](in2, r1)
val r2 = in3.rotateLeft(0)
assertTypedEquals[(I, S, D)](in3, r2)
val r3 = in4.rotateLeft(0)
assertTypedEquals[(I, S, D, C)](in4, r3)
// rotateLeft(N % Size == 0)
val r4 = in2.rotateLeft(2)
assertTypedEquals[(I, S)](in2, r4)
val r5 = in2.rotateLeft(4)
assertTypedEquals[(I, S)](in2, r5)
val r6 = in3.rotateLeft(3)
assertTypedEquals[(I, S, D)](in3, r6)
val r7 = in3.rotateLeft(6)
assertTypedEquals[(I, S, D)](in3, r7)
val r8 = in4.rotateLeft(4)
assertTypedEquals[(I, S, D, C)](in4, r8)
val r9 = in4.rotateLeft(8)
assertTypedEquals[(I, S, D, C)](in4, r9)
// other
val r10 = in2.rotateLeft(1)
assertTypedEquals[(S, I)](("foo", 1), r10)
val r11 = in3.rotateLeft(1)
assertTypedEquals[(S, D, I)](("foo", 2.0, 1), r11)
val r12 = in4.rotateLeft(1)
assertTypedEquals[(S, D, C, I)](("foo", 2.0, 'a', 1), r12)
val r13 = in4.rotateLeft(2)
assertTypedEquals[(D, C, I, S)]((2.0, 'a', 1, "foo"), r13)
val r14 = in4.rotateLeft(3)
assertTypedEquals[(C, I, S, D)](('a', 1, "foo", 2.0), r14)
val r15 = in4.rotateLeft(5)
assertTypedEquals[(S, D, C, I)](("foo", 2.0, 'a', 1), r15)
val r16 = in4.rotateLeft(6)
assertTypedEquals[(D, C, I, S)]((2.0, 'a', 1, "foo"), r16)
}
@Test
def testRotateRight {
val in2 = (1, "foo")
val in3 = (1, "foo", 2.0)
val in4 = (1, "foo", 2.0, 'a')
type S = String; type I = Int; type D = Double; type C = Char
// rotateRight(0)
val r1 = in2.rotateRight(0)
assertTypedEquals[(I, S)](in2, r1)
val r2 = in3.rotateRight(0)
assertTypedEquals[(I, S, D)](in3, r2)
val r3 = in4.rotateRight(0)
assertTypedEquals[(I, S, D, C)](in4, r3)
// rotateRight(N % Size == 0)
val r4 = in2.rotateRight(2)
assertTypedEquals[(I, S)](in2, r4)
val r5 = in2.rotateRight(4)
assertTypedEquals[(I, S)](in2, r5)
val r6 = in3.rotateRight(3)
assertTypedEquals[(I, S, D)](in3, r6)
val r7 = in3.rotateRight(6)
assertTypedEquals[(I, S, D)](in3, r7)
val r8 = in4.rotateRight(4)
assertTypedEquals[(I, S, D, C)](in4, r8)
val r9 = in4.rotateRight(8)
assertTypedEquals[(I, S, D, C)](in4, r9)
// others
val r10 = in2.rotateRight(1)
assertTypedEquals[(S, I)](("foo", 1), r10)
val r11 = in3.rotateRight(1)
assertTypedEquals[(D, I, S)]((2.0, 1, "foo"), r11)
val r12 = in4.rotateRight(1)
assertTypedEquals[(C, I, S, D)](('a', 1, "foo", 2.0), r12)
val r13 = in4.rotateRight(2)
assertTypedEquals[(D, C, I, S)]((2.0, 'a', 1, "foo"), r13)
val r14 = in4.rotateRight(3)
assertTypedEquals[(S, D, C, I)](("foo", 2.0, 'a', 1), r14)
val r15 = in4.rotateRight(5)
assertTypedEquals[(C, I, S, D)](('a', 1, "foo", 2.0), r15)
val r16 = in4.rotateRight(6)
assertTypedEquals[(D, C, I, S)]((2.0, 'a', 1, "foo"), r16)
}
object smear extends Poly {
implicit val caseIntInt = use((x: Int, y: Int) => x + y)
implicit val caseStringInt = use((x: String, y: Int) => x.toInt + y)
implicit val caseIntString = use((x: Int, y: String) => x + y.toInt)
}
@Test
def testScanLeft {
val in = (1, "2", 3)
val out = in.scanLeft(1)(smear)
typed[(Int, Int, Int, Int)](out)
assertEquals((1, 2, 4, 7), out)
}
@Test
def testScanRight {
val in = (1, "2", 3)
val out = in.scanRight(1)(smear)
typed[(Int, Int, Int, Int)](out)
assertEquals((7, 6, 4, 1), out)
}
@Test
def testFill {
{
val empty = Tuple.fill(0)(true)
typed[Unit](empty)
}
{
val empty = Tuple.fill[Boolean](0)(true)
typed[Unit](empty)
}
{
val single = Tuple.fill(1)(None)
typed[Tuple1[None.type]](single)
assertEquals(Tuple1(None), single)
}
{
val single = Tuple.fill[None.type](1)(None)
typed[Tuple1[None.type]](single)
assertEquals(Tuple1(None), single)
}
{
val three = Tuple.fill(3)(m2i)
typed[(M2[Int, Unit], M2[Int, Unit], M2[Int, Unit])](three)
assertEquals((m2i, m2i, m2i), three)
}
{
val three = Tuple.fill[M2[Int, Unit]](3)(m2i)
typed[(M2[Int, Unit], M2[Int, Unit], M2[Int, Unit])](three)
assertEquals((m2i, m2i, m2i), three)
}
{
val empty = Tuple.fill(0, 0)(true)
typed[Unit](empty)
}
{
val empty = Tuple.fill[Boolean](0, 0)(true)
typed[Unit](empty)
}
{
val empty = Tuple.fill(2, 0)(true)
typed[(Unit, Unit)](empty)
}
{
val empty = Tuple.fill[Boolean](2, 0)(true)
typed[(Unit, Unit)](empty)
}
{
val empty = Tuple.fill(0, 2)(true)
typed[Unit](empty)
}
{
val empty = Tuple.fill[Boolean](0, 2)(true)
typed[Unit](empty)
}
{
val oneByTwo = Tuple.fill(1, 2)(None)
typed[Tuple1[(None.type, None.type)]](oneByTwo)
assertEquals(Tuple1((None, None)), oneByTwo)
}
{
val oneByTwo = Tuple.fill[None.type](1, 2)(None)
typed[Tuple1[(None.type, None.type)]](oneByTwo)
assertEquals(Tuple1((None, None)), oneByTwo)
}
{
val twoByThree = Tuple.fill(2, 3)(None)
typed[((None.type, None.type, None.type), (None.type, None.type, None.type))](twoByThree)
assertEquals(((None, None, None), (None, None, None)), twoByThree)
}
{
val twoByThree = Tuple.fill[None.type](2, 3)(None)
typed[((None.type, None.type, None.type), (None.type, None.type, None.type))](twoByThree)
assertEquals(((None, None, None), (None, None, None)), twoByThree)
}
}
@Test
def testPatch{
val in = (1, "two", 3)
{ //single patch w/ nothing removed
val out = in.patch(1, (4,5), 0)
val out2 = in.patch[_1, _0]((4,5))
typed[(Int, Int, Int, String, Int)](out)
assertEquals((1, 4, 5, "two", 3), out)
assertTypedEquals[(Int, Int, Int, String, Int)](out, out2)
}
{ //single patch w/ 2 elements removed
val out = in.patch(1, (3, 4), 2)
val out2 = in.patch[_1,_2]((3,4))
typed[(Int, Int, Int)](out)
assertEquals((1, 3, 4), out)
assertTypedEquals[(Int, Int, Int)](out, out2)
}
{ //essentially append
val out = in.patch(3, (4, 5, "six"), 0)
val out2 = in.patch[_3,_0]((4, 5, "six"))
typed[(Int, String, Int, Int, Int, String)](out)
assertEquals((1, "two", 3, 4, 5, "six"), out)
assertTypedEquals[(Int, String, Int, Int, Int, String)](out, out2)
}
{ //several patched w/ everything from original removed
val sub = (4, "five", "six")
val out = in.patch(0, sub, 3)
val out2 = in.patch[_0,_3]((4, "five", "six"))
typed[(Int, String, String)](out)
assertEquals(sub, out)
assertTypedEquals[(Int, String, String)](out, out2)
}
}
}
| roberth/shapeless | core/src/test/scala/shapeless/tuples.scala | Scala | apache-2.0 | 51,244 |
/*
* Copyright 2017 by Simba Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.spark.sql.simba
import java.util.concurrent.atomic.AtomicReference
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.internal.Logging
import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationEnd}
import org.apache.spark.sql.{Encoder, Row, SparkSession, DataFrame => SQLDataFrame, Dataset => SQLDataset}
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.execution.ui.SQLListener
import org.apache.spark.sql.internal.SessionState
import org.apache.spark.sql.internal.StaticSQLConf._
import org.apache.spark.sql.simba.index.IndexType
import org.apache.spark.util.Utils
import scala.language.implicitConversions
import scala.reflect.ClassTag
import scala.util.control.NonFatal
/**
* Created by dongx on 3/7/17.
*/
class SimbaSession private[simba] (@transient override val sparkContext: SparkContext)
extends SparkSession(sparkContext) { self =>
@transient
private[sql] override lazy val sessionState: SimbaSessionState = {
new SimbaSessionState(this)
}
def hasIndex(tableName: String, indexName: String): Boolean = {
sessionState.indexManager.lookupIndexedData(table(tableName), indexName).nonEmpty
}
def indexTable(tableName: String, indexType: IndexType,
indexName: String, column: Array[String]): Unit = {
val tbl = table(tableName)
assert(tbl != null, "Table not found")
val attrs = tbl.queryExecution.analyzed.output
val columnKeys = column.map(attr => {
var ans: Attribute = null
for (i <- attrs.indices)
if (attrs(i).name.equals(attr)) ans = attrs(i)
assert(ans != null, "Attribute not found")
ans
}).toList
sessionState.indexManager.createIndexQuery(table(tableName), indexType,
indexName, columnKeys, Some(tableName))
}
def showIndex(tableName: String): Unit = sessionState.indexManager.showQuery(tableName)
def persistIndex(indexName: String, fileName: String): Unit =
sessionState.indexManager.persistIndex(this, indexName, fileName)
def loadIndex(indexName: String, fileName: String): Unit =
sessionState.indexManager.loadIndex(this, indexName, fileName)
def dropIndexTableByName(tableName: String, indexName: String): Unit = {
sessionState.indexManager.dropIndexByNameQuery(table(tableName), indexName)
}
def clearIndex(): Unit = sessionState.indexManager.clearIndex()
object simbaImplicits extends Serializable {
protected[simba] def _simbaContext: SparkSession = self
implicit def datasetToSimbaDataSet[T : Encoder](ds: SQLDataset[T]): Dataset[T] =
Dataset(self, ds.queryExecution.logical)
implicit def dataframeToSimbaDataFrame(df: SQLDataFrame): DataFrame =
Dataset.ofRows(self, df.queryExecution.logical)
}
}
object SimbaSession {
class Builder extends Logging {
private[this] val options = new scala.collection.mutable.HashMap[String, String]
private[this] var userSuppliedContext: Option[SparkContext] = None
private[spark] def sparkContext(sparkContext: SparkContext): Builder = synchronized {
userSuppliedContext = Option(sparkContext)
this
}
def appName(name: String): Builder = config("spark.app.name", name)
def config(key: String, value: String): Builder = synchronized {
options += key -> value
this
}
def config(key: String, value: Long): Builder = synchronized {
options += key -> value.toString
this
}
def config(key: String, value: Double): Builder = synchronized {
options += key -> value.toString
this
}
def config(key: String, value: Boolean): Builder = synchronized {
options += key -> value.toString
this
}
def config(conf: SparkConf): Builder = synchronized {
conf.getAll.foreach { case (k, v) => options += k -> v }
this
}
def master(master: String): Builder = config("spark.master", master)
def enableHiveSupport(): Builder = synchronized {
if (hiveClassesArePresent) {
config(CATALOG_IMPLEMENTATION.key, "hive")
} else {
throw new IllegalArgumentException(
"Unable to instantiate SimbaSession with Hive support because " +
"Hive classes are not found.")
}
}
def getOrCreate(): SimbaSession = synchronized {
// Get the session from current thread's active session.
var session = activeThreadSession.get()
if ((session ne null) && !session.sparkContext.isStopped) {
options.foreach { case (k, v) => session.sessionState.setConf(k, v) }
if (options.nonEmpty) {
logWarning("Using an existing SimbaSession; some configuration may not take effect.")
}
return session
}
// Global synchronization so we will only set the default session once.
SimbaSession.synchronized {
// If the current thread does not have an active session, get it from the global session.
session = defaultSession.get()
if ((session ne null) && !session.sparkContext.isStopped) {
options.foreach { case (k, v) => session.sessionState.setConf(k, v) }
if (options.nonEmpty) {
logWarning("Using an existing SimbaSession; some configuration may not take effect.")
}
return session
}
// No active nor global default session. Create a new one.
val sparkContext = userSuppliedContext.getOrElse {
// set app name if not given
val randomAppName = java.util.UUID.randomUUID().toString
val sparkConf = new SparkConf()
options.foreach { case (k, v) => sparkConf.set(k, v) }
if (!sparkConf.contains("spark.app.name")) {
sparkConf.setAppName(randomAppName)
}
val sc = SparkContext.getOrCreate(sparkConf)
// maybe this is an existing SparkContext, update its SparkConf which maybe used
// by SimbaSession
options.foreach { case (k, v) => sc.conf.set(k, v) }
if (!sc.conf.contains("spark.app.name")) {
sc.conf.setAppName(randomAppName)
}
sc
}
session = new SimbaSession(sparkContext)
options.foreach { case (k, v) => session.sessionState.setConf(k, v) }
defaultSession.set(session)
// Register a successfully instantiated context to the singleton. This should be at the
// end of the class definition so that the singleton is updated only if there is no
// exception in the construction of the instance.
sparkContext.addSparkListener(new SparkListener {
override def onApplicationEnd(applicationEnd: SparkListenerApplicationEnd): Unit = {
defaultSession.set(null)
sqlListener.set(null)
}
})
}
return session
}
}
def builder(): Builder = new Builder
def setActiveSession(session: SimbaSession): Unit = {
activeThreadSession.set(session)
}
def clearActiveSession(): Unit = {
activeThreadSession.remove()
}
def setDefaultSession(session: SimbaSession): Unit = {
defaultSession.set(session)
}
def clearDefaultSession(): Unit = {
defaultSession.set(null)
}
private[sql] def getActiveSession: Option[SimbaSession] = Option(activeThreadSession.get)
private[sql] def getDefaultSession: Option[SimbaSession] = Option(defaultSession.get)
private[sql] val sqlListener = new AtomicReference[SQLListener]()
private val activeThreadSession = new InheritableThreadLocal[SimbaSession]
private val defaultSession = new AtomicReference[SimbaSession]
private val HIVE_SESSION_STATE_CLASS_NAME = "org.apache.spark.sql.hive.HiveSessionState"
private def sessionStateClassName(conf: SparkConf): String = {
conf.get(CATALOG_IMPLEMENTATION) match {
case "hive" => HIVE_SESSION_STATE_CLASS_NAME
case "in-memory" => classOf[SessionState].getCanonicalName
}
}
private def reflect[T, Arg <: AnyRef](
className: String,
ctorArg: Arg)(implicit ctorArgTag: ClassTag[Arg]): T = {
try {
val clazz = Utils.classForName(className)
val ctor = clazz.getDeclaredConstructor(ctorArgTag.runtimeClass)
ctor.newInstance(ctorArg).asInstanceOf[T]
} catch {
case NonFatal(e) =>
throw new IllegalArgumentException(s"Error while instantiating '$className':", e)
}
}
private[spark] def hiveClassesArePresent: Boolean = {
try {
Utils.classForName(HIVE_SESSION_STATE_CLASS_NAME)
Utils.classForName("org.apache.hadoop.hive.conf.HiveConf")
true
} catch {
case _: ClassNotFoundException | _: NoClassDefFoundError => false
}
}
}
| InitialDLab/Simba | src/main/scala/org/apache/spark/sql/simba/SimbaSession.scala | Scala | apache-2.0 | 9,378 |
/*
* Copyright (c) 2013-2016 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0, and
* you may not use this file except in compliance with the Apache License
* Version 2.0. You may obtain a copy of the Apache License Version 2.0 at
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the Apache License Version 2.0 is distributed on an "AS
* IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the Apache License Version 2.0 for the specific language
* governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow
package collectors
package scalastream
// Scala
import com.snowplowanalytics.snowplow.CollectorPayload
import com.snowplowanalytics.snowplow.collectors.scalastream.sinks.TestSink
import spray.http.{HttpCookie, HttpHeader}
import scala.collection.mutable.MutableList
// Akka
import akka.actor.{ActorSystem, Props}
// Specs2 and Spray testing
import org.specs2.matcher.AnyMatchers
import org.specs2.mutable.Specification
import org.specs2.specification.{Scope,Fragments}
import spray.testkit.Specs2RouteTest
// Spray
import spray.http.{DateTime,HttpHeader,HttpRequest,HttpCookie,RemoteAddress}
import spray.http.HttpHeaders.{
Cookie,
`Set-Cookie`,
`Remote-Address`,
`Raw-Request-URI`
}
// Config
import com.typesafe.config.{ConfigFactory,Config,ConfigException}
// Thrift
import org.apache.thrift.TDeserializer
// Snowplow
import sinks._
import CollectorPayload.thrift.model1.CollectorPayload
class CollectorServiceSpec extends Specification with Specs2RouteTest with
AnyMatchers {
val testConf: Config = ConfigFactory.parseString("""
collector {
interface = "0.0.0.0"
port = 8080
production = true
p3p {
policyref = "/w3c/p3p.xml"
CP = "NOI DSP COR NID PSA OUR IND COM NAV STA"
}
cookie {
enabled = true
expiration = 365 days
name = sp
domain = "test-domain.com"
Kinesis-Part-Key = "666"
}
sink {
enabled = "test"
kinesis {
aws {
access-key: "cpf"
secret-key: "cpf"
}
stream {
region: "us-east-1"
good: "snowplow_collector_example"
bad: "snowplow_collector_example"
}
backoffPolicy {
minBackoff: 3000 # 3 seconds
maxBackoff: 600000 # 5 minutes
}
}
kafka {
brokers: "localhost:9092"
topic {
good: "good-topic"
bad: "bad-topic"
}
}
buffer {
byte-limit: 4000000 # 4MB
record-limit: 500 # 500 records
time-limit: 60000 # 1 minute
}
}
}
""")
val collectorConfig = new CollectorConfig(testConf)
val sink = new TestSink
val sinks = CollectorSinks(sink, sink)
val responseHandler = new ResponseHandler(collectorConfig, sinks)
val collectorService = new CollectorService(collectorConfig, responseHandler, system)
val thriftDeserializer = new TDeserializer
// By default, spray will always add Remote-Address to every request
// when running with the `spray.can.server.remote-address-header`
// option. However, the testing does not read this option and a
// remote address always needs to be set.
def CollectorGet(uri: String, cookie: Option[`HttpCookie`] = None,
remoteAddr: String = "127.0.0.1") = {
val headers: MutableList[HttpHeader] =
MutableList(`Remote-Address`(remoteAddr),`Raw-Request-URI`(uri))
cookie.foreach(headers += `Cookie`(_))
Get(uri).withHeaders(headers.toList)
}
"Snowplow's Scala collector" should {
"return an invisible pixel" in {
CollectorGet("/i") ~> collectorService.collectorRoute ~> check {
responseAs[Array[Byte]] === ResponseHandler.pixel
}
}
"return a cookie expiring at the correct time" in {
CollectorGet("/i") ~> collectorService.collectorRoute ~> check {
headers must not be empty
val httpCookies: List[HttpCookie] = headers.collect {
case `Set-Cookie`(hc) => hc
}
httpCookies must not be empty
// Assume we only return a single cookie.
// If the collector is modified to return multiple cookies,
// this will need to be changed.
val httpCookie = httpCookies(0)
httpCookie.name must beEqualTo(collectorConfig.cookieName.get)
httpCookie.name must beEqualTo("sp")
httpCookie.path must beSome("/")
httpCookie.domain must beSome
httpCookie.domain.get must be(collectorConfig.cookieDomain.get)
httpCookie.expires must beSome
httpCookie.content.matches("""[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}""")
val expiration = httpCookie.expires.get
val offset = expiration.clicks - collectorConfig.cookieExpiration.get - DateTime.now.clicks
offset.asInstanceOf[Int] must beCloseTo(0, 2000) // 1000 ms window.
}
}
"return a cookie containing nuid query parameter" in {
CollectorGet("/i?nuid=UUID_Test_New") ~> collectorService.collectorRoute ~> check {
headers must not be empty
val httpCookies: List[HttpCookie] = headers.collect {
case `Set-Cookie`(hc) => hc
}
httpCookies must not be empty
// Assume we only return a single cookie.
// If the collector is modified to return multiple cookies,
// this will need to be changed.
val httpCookie = httpCookies(0)
httpCookie.name must beEqualTo(collectorConfig.cookieName.get)
httpCookie.name must beEqualTo("sp")
httpCookie.path must beSome("/")
httpCookie.domain must beSome
httpCookie.domain.get must be(collectorConfig.cookieDomain.get)
httpCookie.expires must beSome
httpCookie.content must beEqualTo("UUID_Test_New")
val expiration = httpCookie.expires.get
val offset = expiration.clicks - collectorConfig.cookieExpiration.get - DateTime.now.clicks
offset.asInstanceOf[Int] must beCloseTo(0, 3600000) // 1 hour window.
}
}
"return the same cookie as passed in" in {
CollectorGet("/i", Some(HttpCookie(collectorConfig.cookieName.get, "UUID_Test"))) ~>
collectorService.collectorRoute ~> check {
val httpCookies: List[HttpCookie] = headers.collect {
case `Set-Cookie`(hc) => hc
}
// Assume we only return a single cookie.
// If the collector is modified to return multiple cookies,
// this will need to be changed.
val httpCookie = httpCookies(0)
httpCookie.content must beEqualTo("UUID_Test")
}
}
"override cookie with nuid parameter" in {
CollectorGet("/i?nuid=UUID_Test_New", Some(HttpCookie("sp", "UUID_Test"))) ~>
collectorService.collectorRoute ~> check {
val httpCookies: List[HttpCookie] = headers.collect {
case `Set-Cookie`(hc) => hc
}
// Assume we only return a single cookie.
// If the collector is modified to return multiple cookies,
// this will need to be changed.
val httpCookie = httpCookies(0)
httpCookie.content must beEqualTo("UUID_Test_New")
}
}
"return a P3P header" in {
CollectorGet("/i") ~> collectorService.collectorRoute ~> check {
val p3pHeaders = headers.filter {
h => h.name.equals("P3P")
}
p3pHeaders.size must beEqualTo(1)
val p3pHeader = p3pHeaders(0)
val policyRef = collectorConfig.p3pPolicyRef
val CP = collectorConfig.p3pCP
p3pHeader.value must beEqualTo(
"policyref=\\"%s\\", CP=\\"%s\\"".format(policyRef, CP))
}
}
"store the expected event as a serialized Thrift object in the enabled sink" in {
val payloadData = "param1=val1¶m2=val2"
val storedRecordBytes = responseHandler.cookie(payloadData, null, None,
None, "localhost", RemoteAddress("127.0.0.1"), new HttpRequest(), None, "/i", true, null)._2
val storedEvent = new CollectorPayload
this.synchronized {
thriftDeserializer.deserialize(storedEvent, storedRecordBytes.head)
}
storedEvent.timestamp must beCloseTo(DateTime.now.clicks, 60000)
storedEvent.encoding must beEqualTo("UTF-8")
storedEvent.ipAddress must beEqualTo("127.0.0.1")
storedEvent.collector must beEqualTo("ssc-0.9.0-test")
storedEvent.path must beEqualTo("/i")
storedEvent.querystring must beEqualTo(payloadData)
}
"report itself as healthy" in {
CollectorGet("/health") ~> collectorService.collectorRoute ~> check {
response.status must beEqualTo(spray.http.StatusCodes.OK)
}
}
}
}
| atotech/snowplow | 2-collectors/scala-stream-collector/src/test/scala/com.snowplowanalytics.snowplow.collectors.scalastream/CollectorServiceSpec.scala | Scala | apache-2.0 | 8,776 |
package object slickless extends HListShapeImplicits
| underscoreio/slickless | src/main/scala/slickless/package.scala | Scala | apache-2.0 | 53 |
package org.jetbrains.plugins.hocon.highlight
import com.intellij.icons.AllIcons
import com.intellij.openapi.fileTypes.SyntaxHighlighterFactory
import com.intellij.openapi.options.colors.{AttributesDescriptor, ColorSettingsPage}
import org.jetbrains.plugins.hocon.highlight.{HoconHighlighterColors => HHC}
import org.jetbrains.plugins.hocon.lang.HoconLanguage
import scala.collection.JavaConverters._
class HoconColorSettingsPage extends ColorSettingsPage {
def getIcon =
AllIcons.FileTypes.Config
def getDemoText =
s"""<hashcomment># hash comment</hashcomment>
|<doubleslashcomment>// double slash comment<doubleslashcomment>
|
|<include>include</include> <inclmod>classpath</inclmod><imparens>(</imparens><quotedstring>"included.conf"</quotedstring><imparens>)</imparens>
|
|<key>object</key><dot>.</dot><key>subobject</key> <braces>{</braces>
| <key>someList</key> <pathvalueseparator>=</pathvalueseparator> <brackets>[</brackets>
| <null>null</null><comma>,</comma>
| <boolean>true</boolean><comma>,</comma>
| <number>123.4e5</number><comma>,</comma>
| <unquotedstring>unquoted string </unquotedstring><badchar>*</badchar><comma>,</comma>
| <quotedstring>"quo</quotedstring><validstringescape>\\\\n</validstringescape><quotedstring>ted</quotedstring><invalidstringescape>\\\\d</invalidstringescape><quotedstring> string"</quotedstring><comma>,</comma>
| <substsign>$$</substsign><substbraces>{</substbraces><optsubstsign>?</optsubstsign><substkey>substitution</substkey><dot>.</dot><substkey>inner</substkey><substbraces>}</substbraces><comma>,</comma>
| <multilinestring>${"\\"\\"\\""}multiline\\n multiline${"\\"\\"\\""}</multilinestring>
| <brackets>]</brackets>
|<braces>}</braces>
|""".stripMargin.trim
def getAdditionalHighlightingTagToDescriptorMap = Map(
"badchar" -> HHC.BadCharacter,
"hashcomment" -> HHC.HashComment,
"doubleslashcomment" -> HHC.DoubleSlashComment,
"null" -> HHC.Null,
"boolean" -> HHC.Boolean,
"number" -> HHC.Number,
"quotedstring" -> HHC.QuotedString,
"multilinestring" -> HHC.MultilineString,
"validstringescape" -> HHC.ValidStringEscape,
"invalidstringescape" -> HHC.InvalidStringEscape,
"brackets" -> HHC.Brackets,
"braces" -> HHC.Braces,
"imparens" -> HHC.IncludeModifierParens,
"substbraces" -> HHC.SubBraces,
"pathvalueseparator" -> HHC.PathValueSeparator,
"comma" -> HHC.Comma,
"include" -> HHC.Include,
"inclmod" -> HHC.IncludeModifier,
"substsign" -> HHC.SubstitutionSign,
"optsubstsign" -> HHC.OptionalSubstitutionSign,
"unquotedstring" -> HHC.UnquotedString,
"dot" -> HHC.PathSeparator,
"key" -> HHC.FieldKey,
"substkey" -> HHC.SubstitutionKey
).asJava
def getHighlighter =
SyntaxHighlighterFactory.getSyntaxHighlighter(HoconLanguage, null, null)
def getDisplayName =
"HOCON"
def getColorDescriptors =
Array.empty
def getAttributeDescriptors =
HoconColorSettingsPage.Attrs
}
object HoconColorSettingsPage {
final val Attrs = Array(
"Bad character" -> HHC.BadCharacter,
"Hash comment" -> HHC.HashComment,
"Double slash comment" -> HHC.DoubleSlashComment,
"Null" -> HHC.Null,
"Boolean" -> HHC.Boolean,
"Number" -> HHC.Number,
"Quoted string" -> HHC.QuotedString,
"Multiline string" -> HHC.MultilineString,
"Valid string escape" -> HHC.ValidStringEscape,
"Invalid string escape" -> HHC.InvalidStringEscape,
"Brackets" -> HHC.Brackets,
"Braces" -> HHC.Braces,
"Include modifier parens" -> HHC.IncludeModifierParens,
"Substitution braces" -> HHC.SubBraces,
"Path-value separator ('=', ':', '+=')" -> HHC.PathValueSeparator,
"Comma" -> HHC.Comma,
"Include keyword" -> HHC.Include,
"Include modifier" -> HHC.IncludeModifier,
"Substitution sign" -> HHC.SubstitutionSign,
"Optional substitution sign" -> HHC.OptionalSubstitutionSign,
"Unquoted string" -> HHC.UnquotedString,
"Path separator" -> HHC.PathSeparator,
"Key" -> HHC.FieldKey,
"Key in substitution" -> HHC.SubstitutionKey
).map({
case (displayName, key) => new AttributesDescriptor(displayName, key)
})
}
| triggerNZ/intellij-scala | src/org/jetbrains/plugins/hocon/highlight/HoconColorSettingsPage.scala | Scala | apache-2.0 | 4,280 |
// Databricks notebook source exported at Sun, 19 Jun 2016 03:03:29 UTC
// MAGIC %md
// MAGIC
// MAGIC # [Scalable Data Science](http://www.math.canterbury.ac.nz/~r.sainudiin/courses/ScalableDataScience/)
// MAGIC
// MAGIC
// MAGIC ### prepared by [Raazesh Sainudiin](https://nz.linkedin.com/in/raazesh-sainudiin-45955845) and [Sivanand Sivaram](https://www.linkedin.com/in/sivanand)
// MAGIC
// MAGIC *supported by* [](https://databricks.com/)
// MAGIC and
// MAGIC [](https://www.awseducate.com/microsite/CommunitiesEngageHome)
// COMMAND ----------
// MAGIC %md
// MAGIC This is an elaboration of the [Apache Spark 1.6 mllib-progamming-guide on mllib-data-types](http://spark.apache.org/docs/latest/mllib-data-types.html).
// MAGIC
// MAGIC # [Overview](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/000_MLlibProgGuide)
// MAGIC
// MAGIC ## [Data Types - MLlib Programming Guide](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/000_dataTypesProgGuide)
// MAGIC
// MAGIC - [Local vector](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/001_LocalVector) and [URL](http://spark.apache.org/docs/latest/mllib-data-types.html#local-vector)
// MAGIC - [Labeled point](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/002_LabeledPoint) and [URL](http://spark.apache.org/docs/latest/mllib-data-types.html#labeled-point)
// MAGIC - [Local matrix](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/003_LocalMatrix) and [URL](http://spark.apache.org/docs/latest/mllib-data-types.html#local-matrix)
// MAGIC - [Distributed matrix](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/004_DistributedMatrix) and [URL](http://spark.apache.org/docs/latest/mllib-data-types.html#distributed-matrix)
// MAGIC - [RowMatrix](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/005_RowMatrix) and [URL](http://spark.apache.org/docs/latest/mllib-data-types.html#rowmatrix)
// MAGIC - [IndexedRowMatrix](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/006_IndexedRowMatrix) and [URL](http://spark.apache.org/docs/latest/mllib-data-types.html#indexedrowmatrix)
// MAGIC - [CoordinateMatrix](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/007_CoordinateMatrix) and [URL](http://spark.apache.org/docs/latest/mllib-data-types.html#coordinatematrix)
// MAGIC - [BlockMatrix](/#workspace/scalable-data-science/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/008_BlockMatrix) and [URL](http://spark.apache.org/docs/latest/mllib-data-types.html#blockmatrix)
// MAGIC
// MAGIC MLlib supports local vectors and matrices stored on a single machine, as
// MAGIC well as distributed matrices backed by one or more RDDs. Local vectors
// MAGIC and local matrices are simple data models that serve as public
// MAGIC interfaces. The underlying linear algebra operations are provided by
// MAGIC [Breeze](http://www.scalanlp.org/) and [jblas](http://jblas.org/). A
// MAGIC training example used in supervised learning is called a “labeled point”
// MAGIC in MLlib.
// COMMAND ----------
// MAGIC %md
// MAGIC Distributed matrix in Scala
// MAGIC ------------------
// MAGIC
// MAGIC A distributed matrix has **long-typed row and column indices** and
// MAGIC **double-typed values**, stored distributively in one or more RDDs.
// MAGIC
// MAGIC It is **very important to choose the right format to store large and distributed
// MAGIC matrices**. Converting a distributed matrix to a different format may
// MAGIC require a global shuffle, which is quite expensive.
// MAGIC
// MAGIC Three types of distributed matrices have been implemented so far.
// MAGIC
// MAGIC 1. The basic type is called `RowMatrix`.
// MAGIC * A `RowMatrix` is a row-oriented distributed matrix without meaningful row indices, e.g., a collection of feature vectors.
// MAGIC It is backed by an RDD of its rows, where each row is a local vector.
// MAGIC * We assume that the number of columns is not huge for a `RowMatrix` so that a single local vector can be reasonably communicated to the driver and can also be stored / operated on using a single node.
// MAGIC * An `IndexedRowMatrix` is similar to a `RowMatrix` but with row indices, which can be used for identifying rows and executing joins.
// MAGIC * A `CoordinateMatrix` is a distributed matrix stored in [coordinate list (COO)](https://en.wikipedia.org/wiki/Sparse_matrix#Coordinate_list_.28COO.29) format, backed by an RDD of its entries.
// MAGIC
// MAGIC ***Note***
// MAGIC
// MAGIC The underlying RDDs of a distributed matrix must be deterministic,
// MAGIC because we cache the matrix size. In general the use of
// MAGIC non-deterministic RDDs can lead to errors.
// COMMAND ----------
// MAGIC %md
// MAGIC ***Remark:*** there is a huge difference in the orders of magnitude between the maximum size of local versus distributed matrices!
// COMMAND ----------
print(Long.MaxValue.toDouble, Int.MaxValue.toDouble, Long.MaxValue.toDouble / Int.MaxValue.toDouble) // index ranges and ratio for local and distributed matrices
// COMMAND ----------
// MAGIC %md
// MAGIC
// MAGIC # [Scalable Data Science](http://www.math.canterbury.ac.nz/~r.sainudiin/courses/ScalableDataScience/)
// MAGIC
// MAGIC
// MAGIC ### prepared by [Raazesh Sainudiin](https://nz.linkedin.com/in/raazesh-sainudiin-45955845) and [Sivanand Sivaram](https://www.linkedin.com/in/sivanand)
// MAGIC
// MAGIC *supported by* [](https://databricks.com/)
// MAGIC and
// MAGIC [](https://www.awseducate.com/microsite/CommunitiesEngageHome) | raazesh-sainudiin/scalable-data-science | db/xtraResources/ProgGuides1_6/MLlibProgrammingGuide/dataTypes/004_DistributedMatrix.scala | Scala | unlicense | 6,364 |
import sbt._
import Keys._
import play.Project._
object ApplicationBuild extends Build {
val appName = "serversents"
val appVersion = "1.0-SNAPSHOT"
val appDependencies = Seq(
// Add your project dependencies here,
javaCore,
javaJdbc,
javaEbean
)
val main = play.Project(appName, appVersion, appDependencies).settings(
// Add your own project settings here
)
}
| spamoc/SSETest | server/project/Build.scala | Scala | mit | 417 |
package views.html.user.show
import controllers.routes
import lila.api.Context
import lila.app.templating.Environment._
import lila.app.ui.ScalatagsTemplate._
import lila.common.paginator.Paginator
import lila.game.{ Game, Pov }
import lila.user.User
object gamesContent {
def apply(
u: User,
nbs: lila.app.mashup.UserInfo.NbGames,
pager: Paginator[Game],
filters: lila.app.mashup.GameFilterMenu,
filterName: String,
notes: Map[Game.ID, String]
)(implicit ctx: Context) =
frag(
div(cls := "number-menu number-menu--tabs menu-box-pop", id := "games")(
filters.list.map { f =>
a(
cls := s"nm-item to-${f.name}${(filters.current == f) ?? " active"}",
href := routes.User.games(u.username, f.name)
)(userGameFilterTitle(u, nbs, f))
}
),
nbs.crosstable.ifTrue(filters.current.name == "me").map {
views.html.game.crosstable(_, none)
},
div(cls := "search__result")(
if (filterName == "search") {
if (pager.nbResults > 0)
frag(
div(cls := "search__status")(
strong(pager.nbResults.localize, " games found")
),
div(cls := "search__rows infinite-scroll")(
views.html.game.widgets(pager.currentPageResults, notes, user = u.some, ownerLink = ctx is u),
pagerNext(pager, np => routes.User.games(u.username, filterName, np).url)
)
)
else
div(cls := "search__status")(strong("No game found"))
} else
div(
cls := List(
"games infinite-scroll" -> true,
"now-playing center" -> (filterName == "playing" && pager.nbResults > 2)
)
)(
if (filterName == "playing" && pager.nbResults > 2)
pager.currentPageResults.flatMap { Pov(_, u) }.map { pov =>
views.html.game.mini(pov)(ctx)(cls := "paginated")
}
else
views.html.game.widgets(pager.currentPageResults, notes, user = u.some, ownerLink = ctx is u),
pagerNext(pager, np => routes.User.games(u.username, filterName, np).url)
)
)
)
}
| luanlv/lila | app/views/user/show/gamesContent.scala | Scala | mit | 2,273 |
package com.arcusys.learn.liferay.update.version260
import com.arcusys.learn.liferay.LiferayClasses._
import com.arcusys.learn.liferay.services.{CompanyLocalServiceHelper, UserLocalServiceHelper}
import com.arcusys.learn.liferay.util.PortalUtilHelper
import com.arcusys.valamis.lrs.LrsType
import com.arcusys.learn.liferay.update.version260.lrs.ActorsSchema
import com.arcusys.learn.liferay.update.version260.lrs.AccountsSchema
import slick.jdbc.JdbcBackend
import slick.jdbc.meta.MTable
import com.arcusys.valamis.persistence.common.{DatabaseLayer, SlickDBInfo}
import com.arcusys.valamis.web.configuration.ioc.Configuration
import scala.collection.JavaConverters._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Await
import scala.concurrent.duration.Duration
class DBUpdater2507(dbInfo: SlickDBInfo) extends LUpgradeProcess
with ActorsSchema
with AccountsSchema {
override def getThreshold = 2507
def this() = this(Configuration.inject[SlickDBInfo](None))
lazy val db = dbInfo.databaseDef
val driver = dbInfo.slickProfile
import driver.simple._
override def doUpgrade(): Unit = {
val lrsType = LrsType.Simple
val tableActorsName = "lrs_actors"
val tableAccountsName = "lrs_accounts"
val hasTables = Await.result(db.run {
for {
actorsTable <- MTable.getTables(tableActorsName).headOption
accountsTable <- MTable.getTables(tableAccountsName).headOption
} yield actorsTable.isDefined && accountsTable.isDefined
}, Duration.Inf)
db.withTransaction { implicit s =>
if (hasTables) {
val companies = CompanyLocalServiceHelper.getCompanies
val actorsWithEmail = actors.filterNot(a => a.mBox === "").list
actorsWithEmail.foreach { a =>
updateActor(companies.asScala.toList, a._3.get)
}
}
}
}
def updateActor(companies: List[LCompany], email: String)(implicit session: JdbcBackend#SessionDef) = {
val companyForUser = getCompaniesForUser(companies, email)
val account = companyForUser
.find { case (company, user) => company.getCompanyId == PortalUtilHelper.getDefaultCompanyId }
.orElse(companyForUser.headOption)
.map { case (company, user) => (PortalUtilHelper.getHostName(company.getCompanyId), user.getUuid) }
for (a <- account) {
val idAccount = accounts
.filter { r => r.name === a._2 && r.homePage === a._1 }
.map { r => r.key }
.firstOption
if (idAccount.isDefined) {
actors.filter(a => a.mBox === email).map(a => (a.accountKey, a.mBox)).update((idAccount, null))
} else {
val newAccount = (Some(a._2), Some(a._1))
val accountId = (accounts returning accounts.map(_.key)) += newAccount
actors
.filter(a => a.mBox === email).map(a => (a.accountKey, a.mBox))
.update((Some(accountId), null))
}
}
}
def getCompaniesForUser(companies: List[LCompany], email: String): Seq[(LCompany, LUser)] = {
val mailPrefix = "mailto:"
companies.flatMap { company =>
val emailValamis = email.replace(mailPrefix, "")
Option(UserLocalServiceHelper().fetchUserByEmailAddress(company.getCompanyId, emailValamis))
.map(user => (company, user))
}
}
}
| igor-borisov/valamis | learn-portlet/src/main/scala/com/arcusys/learn/liferay/update/version260/DBUpdater2507.scala | Scala | gpl-3.0 | 3,285 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.database
import java.security.MessageDigest
import akka.event.Logging.ErrorLevel
import akka.stream.SinkShape
import akka.stream.scaladsl.{Broadcast, Flow, GraphDSL, Keep, Sink}
import akka.util.ByteString
import spray.json.DefaultJsonProtocol._
import spray.json.{JsObject, JsValue, RootJsonFormat}
import org.apache.openwhisk.common.{Logging, StartMarker, TransactionId}
import org.apache.openwhisk.core.entity.{DocInfo, DocRevision, DocumentReader, WhiskDocument}
import scala.concurrent.{ExecutionContext, Future}
private[database] object StoreUtils {
private val digestAlgo = "SHA-256"
private val encodedAlgoName = digestAlgo.toLowerCase.replaceAllLiterally("-", "")
def reportFailure[T](f: Future[T], start: StartMarker, failureMessage: Throwable => String)(
implicit transid: TransactionId,
logging: Logging,
ec: ExecutionContext): Future[T] = {
f.failed.foreach {
case _: ArtifactStoreException => // These failures are intentional and shouldn't trigger the catcher.
case x =>
transid.failed(this, start, s"${failureMessage(x)} [${x.getClass.getSimpleName}]", ErrorLevel)
}
f
}
def checkDocHasRevision(doc: DocInfo): Unit = {
require(doc != null, "doc undefined")
require(doc.rev.rev != null, "doc revision must be specified")
}
def deserialize[A <: DocumentAbstraction, DocumentAbstraction](doc: DocInfo, js: JsObject)(
implicit docReader: DocumentReader,
ma: Manifest[A],
jsonFormat: RootJsonFormat[DocumentAbstraction]): A = {
val asFormat = try {
docReader.read(ma, js)
} catch {
case _: Exception => jsonFormat.read(js)
}
if (asFormat.getClass != ma.runtimeClass) {
throw DocumentTypeMismatchException(
s"document type ${asFormat.getClass} did not match expected type ${ma.runtimeClass}.")
}
val deserialized = asFormat.asInstanceOf[A]
val responseRev = js.fields("_rev").convertTo[String]
assert(doc.rev.rev == null || doc.rev.rev == responseRev, "Returned revision should match original argument")
// FIXME remove mutability from appropriate classes now that it is no longer required by GSON.
deserialized.asInstanceOf[WhiskDocument].revision(DocRevision(responseRev))
}
def combinedSink[T](dest: Sink[ByteString, Future[T]])(
implicit ec: ExecutionContext): Sink[ByteString, Future[AttachmentUploadResult[T]]] = {
Sink.fromGraph(GraphDSL.create(digestSink(), lengthSink(), dest)(combineResult) {
implicit builder => (dgs, ls, dests) =>
import GraphDSL.Implicits._
val bcast = builder.add(Broadcast[ByteString](3))
bcast ~> dgs.in
bcast ~> ls.in
bcast ~> dests.in
SinkShape(bcast.in)
})
}
def emptyDigest(): MessageDigest = MessageDigest.getInstance(digestAlgo)
def encodeDigest(bytes: Array[Byte]): String = {
val digest = bytes.map("%02x".format(_)).mkString
s"$encodedAlgoName-$digest"
}
/**
* Transforms a json object by adding and removing fields
*
* @param json base json object to transform
* @param fieldsToAdd list of fields to add. If the value provided is `None` then it would be ignored
* @param fieldsToRemove list of field names to remove
* @return transformed json
*/
def transform(json: JsObject,
fieldsToAdd: Seq[(String, Option[JsValue])],
fieldsToRemove: Seq[String] = Seq.empty): JsObject = {
val fields = json.fields ++ fieldsToAdd.flatMap(f => f._2.map((f._1, _))) -- fieldsToRemove
JsObject(fields)
}
private def combineResult[T](digest: Future[String], length: Future[Long], upload: Future[T])(
implicit ec: ExecutionContext) = {
for {
d <- digest
l <- length
u <- upload
} yield AttachmentUploadResult(d, l, u)
}
case class AttachmentUploadResult[T](digest: String, length: Long, uploadResult: T)
private def digestSink(): Sink[ByteString, Future[String]] = {
Flow[ByteString]
.fold(emptyDigest())((digest, bytes) => { digest.update(bytes.toArray); digest })
.map(md => encodeDigest(md.digest()))
.toMat(Sink.head)(Keep.right)
}
private def lengthSink(): Sink[ByteString, Future[Long]] = {
Sink.fold[Long, ByteString](0)((length, bytes) => length + bytes.size)
}
}
| houshengbo/openwhisk | common/scala/src/main/scala/org/apache/openwhisk/core/database/StoreUtils.scala | Scala | apache-2.0 | 5,131 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.tools.admin
import org.apache.predictionio.data.storage._
import scala.concurrent.{ExecutionContext, Future}
abstract class BaseResponse()
case class GeneralResponse(
status: Int = 0,
message: String = ""
) extends BaseResponse()
case class AppRequest(
id: Int = 0,
name: String = "",
description: String = ""
)
case class TrainRequest(
enginePath: String = ""
)
case class AppResponse(
id: Int = 0,
name: String = "",
keys: Seq[AccessKey]
) extends BaseResponse()
case class AppNewResponse(
status: Int = 0,
message: String = "",
id: Int = 0,
name: String = "",
key: String
) extends BaseResponse()
case class AppListResponse(
status: Int = 0,
message: String = "",
apps: Seq[AppResponse]
) extends BaseResponse()
class CommandClient(
val appClient: Apps,
val accessKeyClient: AccessKeys,
val eventClient: LEvents
) {
def futureAppNew(req: AppRequest)(implicit ec: ExecutionContext): Future[BaseResponse] = Future {
val response = appClient.getByName(req.name) map { app =>
GeneralResponse(0, s"App ${req.name} already exists. Aborting.")
} getOrElse {
appClient.get(req.id) map {
app2 =>
GeneralResponse(0,
s"App ID ${app2.id} already exists and maps to the app '${app2.name}'. " +
"Aborting.")
} getOrElse {
val appid = appClient.insert(App(
id = Option(req.id).getOrElse(0),
name = req.name,
description = Option(req.description)))
appid map { id =>
val dbInit = eventClient.init(id)
val r = if (dbInit) {
val accessKey = AccessKey(
key = "",
appid = id,
events = Seq())
val accessKey2 = accessKeyClient.insert(AccessKey(
key = "",
appid = id,
events = Seq()))
accessKey2 map { k =>
new AppNewResponse(1,"App created successfully.",id, req.name, k)
} getOrElse {
GeneralResponse(0, s"Unable to create new access key.")
}
} else {
GeneralResponse(0, s"Unable to initialize Event Store for this app ID: ${id}.")
}
r
} getOrElse {
GeneralResponse(0, s"Unable to create new app.")
}
}
}
response
}
def futureAppList()(implicit ec: ExecutionContext): Future[AppListResponse] = Future {
val apps = appClient.getAll().sortBy(_.name)
val appsRes = apps.map {
app => {
new AppResponse(app.id, app.name, accessKeyClient.getByAppid(app.id))
}
}
new AppListResponse(1, "Successful retrieved app list.", appsRes)
}
def futureAppDataDelete(appName: String)
(implicit ec: ExecutionContext): Future[GeneralResponse] = Future {
val response = appClient.getByName(appName) map { app =>
val data = if (eventClient.remove(app.id)) {
GeneralResponse(1, s"Removed Event Store for this app ID: ${app.id}")
} else {
GeneralResponse(0, s"Error removing Event Store for this app.")
}
val dbInit = eventClient.init(app.id)
val data2 = if (dbInit) {
GeneralResponse(1, s"Initialized Event Store for this app ID: ${app.id}.")
} else {
GeneralResponse(0, s"Unable to initialize Event Store for this appId:" +
s" ${app.id}.")
}
GeneralResponse(data.status * data2.status, data.message + data2.message)
} getOrElse {
GeneralResponse(0, s"App ${appName} does not exist.")
}
response
}
def futureAppDelete(appName: String)
(implicit ec: ExecutionContext): Future[GeneralResponse] = Future {
val response = appClient.getByName(appName) map { app =>
val data = if (eventClient.remove(app.id)) {
Storage.getMetaDataApps.delete(app.id)
GeneralResponse(1, s"App successfully deleted")
} else {
GeneralResponse(0, s"Error removing Event Store for app ${app.name}.");
}
data
} getOrElse {
GeneralResponse(0, s"App ${appName} does not exist.")
}
response
}
def futureTrain(req: TrainRequest)
(implicit ec: ExecutionContext): Future[GeneralResponse] = Future {
null
}
}
| pferrel/PredictionIO | tools/src/main/scala/org/apache/predictionio/tools/admin/CommandClient.scala | Scala | apache-2.0 | 5,074 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.jdbc.connection
import java.sql.{Driver, DriverManager}
import javax.security.auth.login.Configuration
import scala.collection.JavaConverters._
import org.scalatest.BeforeAndAfterEach
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.execution.datasources.jdbc.{DriverRegistry, JDBCOptions}
abstract class ConnectionProviderSuiteBase extends SparkFunSuite with BeforeAndAfterEach {
protected def registerDriver(driverClass: String): Driver = {
DriverRegistry.register(driverClass)
DriverManager.getDrivers.asScala.collectFirst {
case d if d.getClass.getCanonicalName == driverClass => d
}.get
}
protected def options(url: String) = new JDBCOptions(Map[String, String](
JDBCOptions.JDBC_URL -> url,
JDBCOptions.JDBC_TABLE_NAME -> "table",
JDBCOptions.JDBC_KEYTAB -> "/path/to/keytab",
JDBCOptions.JDBC_PRINCIPAL -> "principal"
))
protected override def afterEach(): Unit = {
try {
Configuration.setConfiguration(null)
} finally {
super.afterEach()
}
}
protected def testSecureConnectionProvider(
provider: SecureConnectionProvider,
driver: Driver,
options: JDBCOptions): Unit = {
val providerAppEntry = provider.appEntry(driver, options)
// Make sure no authentication for the database is set
assert(Configuration.getConfiguration.getAppConfigurationEntry(providerAppEntry) == null)
// Make sure the first call sets authentication properly
val savedConfig = Configuration.getConfiguration
provider.setAuthenticationConfigIfNeeded(driver, options)
val config = Configuration.getConfiguration
assert(savedConfig != config)
val appEntry = config.getAppConfigurationEntry(providerAppEntry)
assert(appEntry != null)
// Make sure a second call is not modifying the existing authentication
provider.setAuthenticationConfigIfNeeded(driver, options)
assert(config.getAppConfigurationEntry(providerAppEntry) === appEntry)
}
}
| witgo/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuiteBase.scala | Scala | apache-2.0 | 2,839 |
package net.fehmicansaglam.bson.reader
import java.nio.ByteBuffer
import net.fehmicansaglam.bson.element.BsonLong
case class BsonLongReader(buffer: ByteBuffer) extends Reader[BsonLong] {
def read: Option[BsonLong] = {
val name = readCString()
val value = buffer.getLong()
Some(BsonLong(name, value))
}
}
| danielwegener/tepkin | bson/src/main/scala/net/fehmicansaglam/bson/reader/BsonLongReader.scala | Scala | apache-2.0 | 324 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.scala.extensions.impl.acceptPartialFunctions
import org.apache.flink.api.java.operators.CrossOperator
import org.apache.flink.api.scala._
import org.apache.flink.api.scala.extensions.acceptPartialFunctions
import org.apache.flink.api.scala.extensions.base.AcceptPFTestBase
import org.apache.flink.api.scala.extensions.data.KeyValuePair
import org.junit.Test
class OnCrossDataSetTest extends AcceptPFTestBase {
@Test
def testCrossProjectingOnTuple(): Unit = {
val test =
tuples.cross(tuples).projecting {
case ((_, v1), (_, v2)) => s"$v1 $v2"
}
assert(test.javaSet.isInstanceOf[CrossOperator[_, _, _]],
"projecting for cross on tuples should produce a CrossOperator")
}
@Test
def testCrossProjectingOnCaseClass(): Unit = {
val test =
caseObjects.cross(caseObjects).projecting {
case (KeyValuePair(_, v1), KeyValuePair(_, v2)) => s"$v1 $v2"
}
assert(test.javaSet.isInstanceOf[CrossOperator[_, _, _]],
"projecting for cross on case objects should produce a CrossOperator")
}
}
| kl0u/flink | flink-scala/src/test/scala/org/apache/flink/api/scala/extensions/impl/acceptPartialFunctions/OnCrossDataSetTest.scala | Scala | apache-2.0 | 1,894 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.examples.wordspec.fixturecontext
import collection.mutable.ListBuffer
import org.scalatest.WordSpec
class ExampleSpec extends WordSpec {
trait Builder {
val builder = new StringBuilder("ScalaTest is ")
}
trait Buffer {
val buffer = ListBuffer("ScalaTest", "is")
}
"Testing" should {
// This test needs the StringBuilder fixture
"be productive" in new Builder {
builder.append("productive!")
assert(builder.toString === "ScalaTest is productive!")
}
}
"Test code" should {
// This test needs the ListBuffer[String] fixture
"be readable" in new Buffer {
buffer += ("readable!")
assert(buffer === List("ScalaTest", "is", "readable!"))
}
// This test needs both the StringBuilder and ListBuffer
"be clear and concise" in new Builder with Buffer {
builder.append("clear!")
buffer += ("concise!")
assert(builder.toString === "ScalaTest is clear!")
assert(buffer === List("ScalaTest", "is", "concise!"))
}
}
}
| dotty-staging/scalatest | examples/src/test/scala/org/scalatest/examples/wordspec/fixturecontext/ExampleSpec.scala | Scala | apache-2.0 | 1,641 |
package spark.cassandra
import spark.SparkContext
import spark.SparkContext._
import spark.Logging
import java.nio.ByteBuffer
import spark.RDD
import spark.SparkException
import spark.TaskContext
import org.apache.hadoop.mapred.HadoopWriter
class CassContext(sc_ : SparkContext) extends Logging {
initLogging()
if (sc_ == null) {
throw new Exception("CassContext cannot be initialized with SparkContext as null")
}
private val clazz = classOf[java.util.Map[java.lang.String, ByteBuffer]];
def fetchFromCassandra(keyspace: String, columnFamily: String): CassResult = {
fetchFromCassandra(CassConfig.cql3.table(keyspace, columnFamily));
}
def fetchFromCassandra(keyspace: String, columnFamily: String, where: String): CassResult = {
fetchFromCassandra(CassConfig.cql3.table(keyspace, columnFamily).where(where));
}
def fetchFromCassandra(host: String, port: Int, keyspace: String, columnFamily: String, columns: String = null, where: String = null): CassResult = {
fetchFromCassandra(CassConfig.cql3.host(host).port(port).table(keyspace, columnFamily).columns(columns).where(where));
}
def fetchFromCassandra(config: Cql3Config): CassResult = {
new CassResult(sc_.newAPIHadoopRDD(config.getConf(), config.getInputFormatClass(), clazz, clazz));
}
}
| kernel164/spark-cassandra | src/main/scala/spark/cassandra/CassContext.scala | Scala | apache-2.0 | 1,299 |
package controllers
import javax.inject._
import actions._
import akka.stream.Materializer
import be.objectify.deadbolt.scala.DeadboltActions
import com.feth.play.module.pa.PlayAuthenticate
import com.nappin.play.recaptcha.{RecaptchaVerifier, WidgetHelper}
import constants.{SecurityRoleKey, SessionKey}
import org.webjars.play._
import play.api.{Configuration, Environment}
import play.api.mvc._
import services.{GoogleAuthService, UserService}
import play.api.i18n._
import play.api.routing.JavaScriptReverseRouter
import play.core.j.JavaHelpers
import providers.MyAuthProvider
import support.LangLookupSupport
import views.form._
import views.html.recaptcha
@Singleton
class Application @Inject() (implicit
val verifier: RecaptchaVerifier,
config: Configuration,
env: Environment,
mat: Materializer,
indexView: views.html.index,
restrictedView: views.html.restricted,
profileView: views.html.profile,
loginView: views.html.login,
restrictedForbidCookieView: views.html.restricted_forbid_cookie,
reloginView: views.html.relogin,
googleAuthenticationView: views.html.google_authentication,
signupView: views.html.signup,
widgetHelper: WidgetHelper,
webJarUtil: WebJarsUtil,
deadbolt: DeadboltActions,
auth: PlayAuthenticate,
userService: UserService,
authProvider: MyAuthProvider,
formContext: FormContext,
googleAuthService: GoogleAuthService,
recaptchaWidget: recaptcha.recaptchaWidget,
bodyParsers: PlayBodyParsers) extends InjectedController with I18nSupport with LangLookupSupport {
import scala.concurrent._
import ExecutionContext.Implicits.global
//-------------------------------------------------------------------
// public
//-------------------------------------------------------------------
def index =
WithJContextSupportAction { implicit jContext =>
TryCookieAuthAction {
deadbolt.WithAuthRequest()() { implicit authRequest =>
Future {
Ok(indexView(userService))
}
}
}
}
//-------------------------------------------------------------------
def restricted =
WithJContextSupportAction { implicit jContext =>
TryCookieAuthAction {
deadbolt.Restrict(List(Array(SecurityRoleKey.USER_ROLE.toString)))() { implicit authRequest =>
Future {
val localUser = userService.findInSession(jContext.session)
Ok(restrictedView(userService, localUser))
}
}
}
}
//-------------------------------------------------------------------
def profile =
WithJContextSupportAction { implicit jContext =>
TryCookieAuthAction {
deadbolt.Restrict(List(Array(SecurityRoleKey.USER_ROLE.toString)))() { implicit authRequest =>
Future {
val localUser = userService.findInSession(jContext.session)
Ok(profileView(auth, localUser.get, googleAuthService))
}
}
}
}
//-------------------------------------------------------------------
def login =
WithJContextSupportAction { implicit jContext =>
TryCookieAuthAction {
deadbolt.WithAuthRequest()() { implicit authRequest =>
Future {
Ok(loginView(auth, userService, formContext.loginForm.Instance))
}
}
}
}
//-------------------------------------------------------------------
def restrictedForbidCookie =
WithJContextSupportAction { implicit jContext =>
TryCookieAuthAction {
SudoForbidCookieAuthAction {
deadbolt.Restrict(List(Array(SecurityRoleKey.USER_ROLE.toString)))() { implicit authRequest =>
Future {
val localUser = userService.findInSession(jContext.session)
Ok(restrictedForbidCookieView(userService, localUser))
}
}
}
}
}
//-------------------------------------------------------------------
def relogin =
NoCacheAction {
WithJContextSupportAction { implicit jContext =>
TryCookieAuthAction {
deadbolt.WithAuthRequest()() { implicit authRequest =>
Future {
// taking chances here
val authUser = userService.findInSession(jContext.session).get
// partially initialize the Login form to only miss the password
val updatedForm = formContext.loginForm.Instance.fill(views.form.Login(
email = authUser.email.toString, password = "", isRememberMe = true))
// everything was filled
Ok(reloginView(auth, userService, updatedForm))
}
}
}
}
}
//-------------------------------------------------------------------
def doLogin =
NoCacheAction {
WithJContextSupportAction { implicit jContext =>
TryCookieAuthAction {
deadbolt.WithAuthRequest()() { implicit authRequest =>
Future {
formContext.loginForm.Instance.bindFromRequest.fold(
formWithErrors => {
// user did not fill everything properly
BadRequest(loginView(auth, userService, formWithErrors))
},
formSuccess => {
// everything was filled
val result = JavaHelpers.createResult(jContext, authProvider.handleLogin(jContext, formSuccess.isRememberMe))
def authorize(): Result =
Option(jContext.session().remove(SessionKey.REDIRECT_TO_URI_KEY)).map { uri =>
result.withHeaders(LOCATION -> uri)
}.getOrElse(result)
auth.getUser(jContext) match {
case null =>
result
case user if googleAuthService.isEnabled(user.getId) =>
(formSuccess.gauthCode, formSuccess.recoveryCode) match {
case (Some(gauthCode), _) if googleAuthService.isValidGAuthCode(user.getId, gauthCode) =>
authorize()
case (_, Some(recoveryCode)) if googleAuthService.tryAuthenticateWithRecoveryToken(user.getId, recoveryCode) =>
authorize()
case _ =>
val form = formContext.loginForm.Instance.fill(formSuccess)
val formWithError =
if (formSuccess.gauthCode.isDefined) {
form.withGlobalError(messagesApi("playauthenticate.gauthCode.login.invalid_code"))
} else if (formSuccess.recoveryCode.exists(_.nonEmpty)) {
form.withGlobalError(messagesApi("playauthenticate.recoveryToken.login.invalid_token"))
} else {
form
}
auth.logout(jContext)
Ok(googleAuthenticationView(auth, userService, formWithError))
}
case user => authorize()
}
}
)
}
}
}
}
}
//-------------------------------------------------------------------
def signup =
WithJContextSupportAction { implicit jContext =>
TryCookieAuthAction {
deadbolt.WithAuthRequest()() { implicit authRequest =>
Future {
Ok(signupView(auth, userService, formContext.signupForm.Instance))
}
}
}
}
//-------------------------------------------------------------------
def doSignup =
NoCacheAction {
WithJContextSupportAction { implicit jContext =>
TryCookieAuthAction {
deadbolt.WithAuthRequest()() { implicit authRequest =>
verifier.bindFromRequestAndVerify(formContext.signupForm.Instance).map { form =>
form.fold(
formWithErrors => {
// user did not fill everything properly
BadRequest(signupView(auth, userService, formWithErrors))
},
_ => {
// everything was filled:
// do something with your part of the form before handling the user signup
JavaHelpers.createResult(jContext, authProvider.handleSignup(jContext))
}
)
}
}
}
}
}
def enableGoogleAuthenticator =
NoCacheAction {
WithJContextSupportAction { implicit jContext =>
TryCookieAuthAction {
deadbolt.WithAuthRequest()() { implicit authRequest =>
Future {
userService.findInSession(jContext.session) match {
case Some(user) =>
googleAuthService.regenerateKey(user.id)
Ok(profileView(auth, user, googleAuthService, showSecrets = true))
case None =>
Ok("Current user not found")
}
}
}
}
}
}
def disableGoogleAuthenticator =
WithJContextSupportAction { implicit jContext =>
TryCookieAuthAction {
NoCacheAction {
deadbolt.WithAuthRequest()() { implicit authRequest =>
Future {
userService.findInSession(jContext.session) match {
case Some(user) =>
googleAuthService.disable(user.id)
Redirect(routes.Application.profile)
case None =>
Ok("Current user not found")
}
}
}
}
}
}
//-------------------------------------------------------------------
def jsRoutes =
deadbolt.WithAuthRequest()() { implicit authRequest =>
Future {
Ok(JavaScriptReverseRouter("jsRoutes")(routes.javascript.Signup.forgotPassword)).
as("text/javascript")
}
}
} | bravegag/play-authenticate-usage-scala | app/controllers/Application.scala | Scala | apache-2.0 | 10,534 |
package picasso.model.integer
import scala.collection.GenSeq
import picasso.graph._
import picasso.utils._
import picasso.utils.tools.armc._
/** Integer Program.
* Integer program are use during the termination proof of DBP.
* Right now their purpose is just these termination check (not safety).
*/
class Program(initPC: String, trs: GenSeq[Transition]) extends picasso.math.TransitionSystem {
type S = State
//transition type
type T = Transition
def initialPC = initPC
def transitions: GenSeq[T] = trs
lazy val pcs = (Set(initPC) /: trs)((acc, t) => acc + t.sourcePC + t.targetPC)
lazy val variables: Set[Variable] = {
trs.aggregate(Set[Variable]())(_ ++ _.variables, _ ++ _)
}
lazy val varsByLoc: Map[String, Set[Variable]] = {
val bySrc = trs.groupBy(_.sourcePC)
val byTrg = trs.groupBy(_.targetPC)
pcs.iterator.map( l => {
val v1 = bySrc.get(l).map(_.flatMap(_.domain).seq)
val v2 = byTrg.get(l).map(_.flatMap(_.range).seq)
(l, (v1 ++ v2).flatten.toSet)
}).toMap
}
def printForARMCnoPreds(writer: java.io.BufferedWriter) {
ARMCPrinter(writer, this, false)
writer.flush
}
def printForARMC(writer: java.io.BufferedWriter) {
ARMCPrinter(writer, this)
writer.flush
}
def printForARMC: String = {
val str = new java.io.StringWriter()
val writer = new java.io.BufferedWriter(str)
printForARMC(writer)
writer.close
str.toString
}
def simplifyForTermination = {
Logger("integer.Program", LogDebug, "unsimplified program:")
Logger("integer.Program", LogDebug, writer => printForARMCnoPreds(writer) )
Logger("integer.Program", LogInfo, "eliminating variables: " + Config.eliminateVar.mkString(", "))
val p1 = this.eliminateVariables(Config.eliminateVar)
Logger("integer.Program", LogDebug, writer => p1.printForARMCnoPreds(writer) )
Logger("integer.Program", LogInfo, "propagating constants.")
val p2 = p1.propagateCst
Logger("integer.Program", LogDebug, writer => p2.printForARMCnoPreds(writer) )
Logger("integer.Program", LogInfo, "merging variables.")
val p3 = p2.reduceNumberOfVariables
Logger("integer.Program", LogDebug, writer => p3.printForARMCnoPreds(writer) )
//this.infos
//p2.infos
//p3.infos
Logger("integer.Program", LogInfo, "compacting transitions.")
val p4 = p3.compactPath
Logger("integer.Program", LogDebug, writer => p4.printForARMCnoPreds(writer) )
Logger("integer.Program", LogInfo, "removing duplicate transitions.")
val p5 = p4.duplicateTransitions
Logger("integer.Program", LogDebug, writer => p5.printForARMCnoPreds(writer) )
p5
//TODO sinks, ...
}
def eliminateVariables(prefixes: Iterable[String]) = {
if (prefixes.isEmpty) {
this
} else {
val trs2 = trs.map(_.eliminateVariables(prefixes))
val p2 = new Program(initialPC, trs2)
Logger("integer.Program", LogInfo, "eliminateVariables: #variables before = " + variables.size + ", after = " + p2.variables.size)
p2
}
}
/** Returns a map of which variable has a cst value at some location
* Type of the abstract domain: Map[Variable, Option[Int]]
* None means it is not cst
* Some(i) means it has value i
* not in the map means we don't know
*/
def constantValueMap: Map[String,Map[Variable,Option[Int]]] = {
def default(s: String) = Map[Variable,Option[Int]]()
def transfer(cstMap: Map[Variable,Option[Int]], t: Transition): Map[Variable,Option[Int]] = {
val csts = cstMap.flatMap{ case (v,c) => c.map( v -> _ ) }
val t2 = t.propagteInputConstants(csts)
val outCst = t2.constantVariables
val frame = cstMap -- t.range
val unk = t.range -- outCst.keySet
frame ++ outCst.map{ case (v, c) => v -> Some(c) } ++ unk.iterator.map(_ -> None)
}
def join(a: Map[Variable,Option[Int]], b: Map[Variable,Option[Int]]) = {
val allKeys = a.keySet ++ b.keySet
val all = allKeys.view.map( v => {
val rhs = if(a.contains(v) && b.contains(v)) {
(a(v), b(v)) match {
case (Some(i1), Some(i2)) => if (i1 == i2) Some(i1) else None
case (_,_) => None
}
} else {
a.getOrElse(v, b(v))
}
(v -> rhs)
})
all.toMap
}
def cover(a: Map[Variable,Option[Int]], b: Map[Variable,Option[Int]]) = {
//all keys of b shoud be in a and they should be equal ...
b forall { case (k,v) => a.contains(k) && (a(k).isEmpty || a(k) == v) }
}
cfa.aiFixpoint(transfer, join, cover, default)
}
/** propagate the constants values */
def propagateCst = {
val map = constantValueMap
Logger("integer.Program", LogDebug, "propagateCst: " + map)
val trs2 = trs.par.map(t => {
val preSubst = map(t.sourcePC).flatMap{ case (k, v) => v.map(i => (k, i)) }
val postSubst = map(t.targetPC).flatMap{ case (k, v) => v.map(_ => k) }.toSet
val t2 = t.propagteInputConstants(preSubst).propagteOutputConstants(postSubst)
//Logger("integer.Program", LogDebug, "eliminating: " + postSubst.mkString(", ") + "\\nin " + t + "\\ngives " + t2)
t2
})
val p2 = new Program(initPC, trs2)
Logger("integer.Program", LogInfo, "propagateCst: #variables before = " + variables.size + ", after = " + p2.variables.size)
p2
}
def reduceNumberOfVariables = {
//TODO this creates unused and unconstrained variables
//TODO for some reason, i does no decrease the number of variables ...
val varsByLocButInit = varsByLoc - initialPC
Logger("model.integer", LogDebug, "varsByLocButInit ->\\n " + varsByLocButInit.mkString("\\n "))
//make the conflicts graph with varsByLoc
val conflictsBase = (DiGraph.empty[GT.ULGT{type V = Variable}] /: variables)(_ + _)
val conflicts = (conflictsBase /: varsByLocButInit.values)( (acc, grp) => {
val edges = for (x <- grp; y <- grp if x != y) yield (x, (), y)
acc ++ edges
})
//use the tranitions to compute the affinity: sum of variables CoI + name likenes
val varToIdx = variables.toSeq.zipWithIndex.toMap
val affinityArray = Array.ofDim[Int](variables.size, variables.size)
for(t <- trs;
(v1, vs) <- t.coneOfInfluence;
v2 <- vs) {
affinityArray(varToIdx(v1))(varToIdx(v2)) += 1
affinityArray(varToIdx(v2))(varToIdx(v1)) += 1
}
def affinity(v1: Variable, v2: Variable) = {
affinityArray(varToIdx(v1))(varToIdx(v2)) +
Misc.commonPrefix(v1.name, v2.name)
}
//small coloring of conflict graph
val largeClique = varsByLocButInit.values.maxBy(_.size)
Logger("integer.Program", LogDebug, "reduceNumberOfVariables: largeClique has size = " + largeClique.size )
val coloring = conflicts.smallColoring(affinity, largeClique)
Logger("model.integer", LogDebug, "coloring ->\\n " + coloring.mkString("\\n "))
//rename variables
val globalSubst = (Map[Variable, Variable]() /: coloring)( (acc, grp) => {
val repr = grp.head
(acc /: grp)( (map, v) => map + (v -> repr) )
})
//-> subst for each loc
val substByLoc = varsByLocButInit.map{ case (loc, vars) => (loc, globalSubst.filterKeys(vars contains _)) }
//TODO initialPC
//-> add frame cstr to transitions that gets new variables
val trs2 = for (t <- trs) yield {
//Logger("model.integer", LogNotice, "t -> " + t.toString)
val srcSubst = if (t.sourcePC == initialPC) globalSubst else substByLoc(t.sourcePC)
val trgSubst = substByLoc(t.targetPC)
val woFrame = t.alphaPre(srcSubst).alphaPost(trgSubst)
//Logger("model.integer", LogNotice, "srcSubst -> " + srcSubst.mkString(", "))
//Logger("model.integer", LogNotice, "trgSubst -> " + trgSubst.mkString(", "))
//Logger("model.integer", LogNotice, "woFrame -> " + woFrame.toString)
val newPreVars = srcSubst.values.toSet -- woFrame.domain
val newPostVars = trgSubst.values.toSet -- woFrame.range
val preVars2 = woFrame.preVars ++ newPreVars.iterator.map( v => (v, Variable(Namer("NewPreVar"))) )
val postVars2 = woFrame.postVars ++ newPostVars.iterator.map( v => (v, Variable(Namer("NewPostVar"))) )
//val cstr = newPostVars.iterator.map( v => Eq(preVars2(v), postVars2(v)) )
//val allCstr = (woFrame.relation /: cstr)(And(_,_))
new Transition(
woFrame.sourcePC,
woFrame.targetPC,
preVars2,
postVars2,
woFrame.relation,//allCstr,
woFrame.comment
)
}
val p2 = new Program(initialPC, trs2)
Logger("integer.Program", LogInfo, "reduceNumberOfVariables: #variables before = " + variables.size + ", after = " + p2.variables.size)
p2
}
def cfa: EdgeLabeledDiGraph[GT.ELGT{type V = String; type EL = Transition}] = {
val emp = EdgeLabeledDiGraph.empty[GT.ELGT{type V = String; type EL = Transition}]
emp ++ (transitions.map(t => (t.sourcePC, t, t.targetPC)).seq)
}
protected def compactPath = {
val trs2 = cfa.simplePaths.toSeq.par.flatMap( path => {
Transition.compact(path.labels)
})
val p2 = new Program(initPC, trs2)
Logger("integer.Program", LogInfo, "compactPath: #transitions before = " + transitions.size + ", after = " + p2.transitions.size)
p2
}
protected def duplicateTransitions = {
val grouped = trs.groupBy(_.sourcePC).map(_._2).flatMap(_.groupBy(_.targetPC).map(_._2))
val pruned = grouped.map( ts => {
(List[Transition]() /: ts.seq)( (acc, t) => {
val acc1 = acc.filter(t2 => !Transition.lteq(t2,t))
if (acc1 exists (Transition.lteq(t, _))) acc1 else t :: acc1
})
})
val trs2 = pruned.seq.flatten.toSeq.par
val p2 = new Program(initPC, trs2)
Logger("integer.Program", LogInfo, "duplicateTransitions: #transitions before = " + transitions.size + ", after = " + p2.transitions.size)
p2
}
//protected def flow: DiGraph[GT.ULGT{type V = Variable}] = {
// //try to track what is going where ...
// sys.error("TODO")
//}
protected def transfers: DiGraph[GT.ULGT{type V = Variable}] = {
val base = DiGraph.empty[GT.ULGT{type V = Variable}].addVertices(variables)
val edges = for (t <- trs; (v1, vs) <- t.coneOfInfluence; v2 <- vs) yield (v1, (), v2)
base ++ edges.seq
}
protected def infos {
val lvl = LogInfo
Logger("integer.Program", lvl, "#variables = " + variables.size )
Logger("integer.Program", lvl, "#transitions = " + transitions.size )
Logger("integer.Program", lvl, transfers.toGraphviz("transfers"))
for(t <- transitions) {
val frame = variables -- t.variables
val unused = t.unusedVariable
val unconstrained = t.unconstrainedVariables
Logger(
"integer.Program", lvl,
"from " + t.sourcePC + " to " + t.targetPC + ": " + t.comment + "\\n" +
"frame: " + frame.mkString(", ") + "\\n" +
"domain: " + t.domain.mkString(", ") + "\\n" +
"range: " + t.range.mkString(", ") + "\\n" +
"unused: " + unused.mkString(", ") + "\\n" +
"unconstrained: " + unconstrained.mkString(", ")
)
}
//Logger("integer.Program", lvl, writer => printForARMCnoPreds(writer) )
}
def candidateRankingFcts: Iterable[Set[Variable]] = {
val cyclesIterator = cfa.enumerateSomeCycles
val boundedIterator = if (Config.cyclesBound >= 0) cyclesIterator.take(Config.cyclesBound) else cyclesIterator
val candidates = boundedIterator.flatMap(c => Transition.candidateRankingFcts(c.labels))
candidates.toSet
}
}
| dzufferey/picasso | core/src/main/scala/picasso/model/integer/Program.scala | Scala | bsd-2-clause | 11,549 |
import sbt._
object Dependencies {
// Logging Support
val log4j2Version = "2.6"
val scalaLoggingVersion = "3.4.0"
// Config Hocoon Support
val typesafeConfigVersion = "1.3.0"
// Test Support
val scalaTestVersion = "2.2.6"
// Metrics Support
val scalaMetricsVersion = "3.5.4"
lazy val catsVersion = "1.0.1"
lazy val akkaVersion = "2.5.4"
lazy val akkaHttpVersion = "10.0.10"
val compileDeps = Seq(
"org.typelevel" %% "cats-core" % catsVersion,
"com.typesafe.akka" %% "akka-http" % akkaHttpVersion,
"com.typesafe.akka" %% "akka-http-spray-json" % akkaHttpVersion,
"com.typesafe.akka" %% "akka-testkit" % akkaVersion
)
val testDeps = Seq(
"org.scalatest" %% "scalatest" % "3.0.1" % "test",
"ch.qos.logback" % "logback-classic" % "1.2.3" % "test"
)
}
| IsmailMarmoush/scala-samples | scalalang/project/Dependencies.scala | Scala | agpl-3.0 | 864 |
package scala.slick.memory
import org.slf4j.LoggerFactory
import scala.collection.mutable.{ArrayBuffer, HashMap}
import scala.slick.ast._
import scala.slick.SlickException
import scala.slick.util.{SlickLogger, Logging}
import TypeUtil.typeToTypeUtil
import java.util.regex.Pattern
/** A query interpreter for the MemoryDriver and for client-side operations
* that need to be run as part of distributed queries against multiple
* backends.
*
* It uses ScalaType, ProductValue/StructValue and plain Scala collections to
* represent data. Queries are expected to be in the shape after running all
* the standard query compiler phases (but not the extra relational phases)
* and assigning ScalaTypes everywhere.
*
* @param db The in-memory database which is used for resolving Tables
* @param params The query parameters
*/
class QueryInterpreter(db: HeapBackend#Database, params: Any) extends Logging {
override protected[this] lazy val logger = new SlickLogger(LoggerFactory.getLogger(classOf[QueryInterpreter]))
import QueryInterpreter._
val scope = new HashMap[Symbol, Any]
var indent = 0
type Coll = Iterable[Any]
def logDebug(msg: String) {
logger.debug(Iterator.fill(indent)(" ").mkString("", "", msg))
}
def run(n: Node): Any = {
if(logger.isDebugEnabled) logDebug("Evaluating "+n)
indent += 1
val res = n match {
case Ref(sym) =>
scope.getOrElse(sym, throw new SlickException(s"Symbol $sym not found in scope"))
case Select(in, field) =>
val v = run(in)
field match {
case ElementSymbol(idx) => v.asInstanceOf[ProductValue].apply(idx-1)
case (_: AnonSymbol | _: FieldSymbol) => v.asInstanceOf[StructValue].getBySymbol(field)
}
case n: StructNode =>
new StructValue(n.nodeChildren.map(run), n.nodeType.asInstanceOf[StructType].symbolToIndex)
case ProductNode(ch) =>
new ProductValue(ch.map(run).toIndexedSeq)
case Pure(n, _) => Vector(run(n))
case t: TableNode =>
val dbt = db.getTable(t.tableName)
val acc = dbt.columnIndexes
dbt.rows.view.map { row => new StructValue(row, acc) }
case Bind(gen, from, sel) =>
val fromV = run(from).asInstanceOf[Coll]
val b = from.nodeType.asCollectionType.cons.canBuildFrom()
fromV.foreach { v =>
scope(gen) = v
b ++= run(sel).asInstanceOf[Coll]
}
scope.remove(gen)
b.result()
case Join(_, _, left, RangeFrom(0), JoinType.Zip, LiteralNode(true)) =>
val leftV = run(left).asInstanceOf[Coll]
leftV.zipWithIndex.map { case (l, r) => new ProductValue(Vector(l, r)) }
case Join(_, _, left, right, JoinType.Zip, LiteralNode(true)) =>
val leftV = run(left).asInstanceOf[Coll]
val rightV = run(right).asInstanceOf[Coll]
(leftV, rightV).zipped.map { (l, r) => new ProductValue(Vector(l, r)) }
case Join(leftGen, rightGen, left, right, JoinType.Inner, by) =>
val res = run(left).asInstanceOf[Coll].flatMap { l =>
scope(leftGen) = l
run(right).asInstanceOf[Coll].filter { r =>
scope(rightGen) = r
asBoolean(run(by))
}.map { r =>
new ProductValue(Vector(l, r))
}
}
scope.remove(leftGen)
scope.remove(rightGen)
res
case Join(leftGen, rightGen, left, right, JoinType.Left, by) =>
val res = run(left).asInstanceOf[Coll].flatMap { l =>
scope(leftGen) = l
val inner = run(right).asInstanceOf[Coll].filter { r =>
scope(rightGen) = r
asBoolean(run(by))
}.map { r =>
new ProductValue(Vector(l, r))
}
if(inner.headOption.isEmpty) Vector(new ProductValue(Vector(l, createNullRow(right.nodeType.asCollectionType.elementType))))
else inner
}
scope.remove(leftGen)
scope.remove(rightGen)
res
case Join(leftGen, rightGen, left, right, JoinType.Right, by) =>
val res = run(right).asInstanceOf[Coll].flatMap { r =>
scope(rightGen) = r
val inner = run(left).asInstanceOf[Coll].filter { l =>
scope(leftGen) = l
asBoolean(run(by))
}.map { l =>
new ProductValue(Vector(l, r))
}
if(inner.headOption.isEmpty) Vector(new ProductValue(Vector(createNullRow(left.nodeType.asCollectionType.elementType), r)))
else inner
}
scope.remove(leftGen)
scope.remove(rightGen)
res
case Filter(gen, from, where) =>
val res = run(from).asInstanceOf[Coll].filter { v =>
scope(gen) = v
asBoolean(run(where))
}
scope.remove(gen)
res
case First(ch) => run(ch).asInstanceOf[Coll].toIterator.next()
case SortBy(gen, from, by) =>
val fromV = run(from).asInstanceOf[Coll]
val b = from.nodeType.asCollectionType.cons.canBuildFrom()
val ords: IndexedSeq[scala.math.Ordering[Any]] = by.map { case (b, o) =>
b.nodeType.asInstanceOf[ScalaType[Any]].scalaOrderingFor(o)
}(collection.breakOut)
b ++= fromV.toSeq.sortBy { v =>
scope(gen) = v
by.map { case (b, _) => run(b) }(collection.breakOut): IndexedSeq[Any]
}(new scala.math.Ordering[IndexedSeq[Any]] {
def compare(x: IndexedSeq[Any], y: IndexedSeq[Any]): Int = {
var i = 0
while(i < ords.length) {
val v = ords(i).compare(x(i), y(i))
if(v != 0) return v
i += 1
}
0
}
})
scope.remove(gen)
b.result()
case GroupBy(gen, from, by) =>
val fromV = run(from).asInstanceOf[Coll]
val grouped = new HashMap[Any, ArrayBuffer[Any]]()
fromV.foreach { v =>
scope(gen) = v
grouped.getOrElseUpdate(run(by), new ArrayBuffer[Any]()) += v
}
scope.remove(gen)
val b = from.nodeType.asCollectionType.cons.canBuildFrom()
grouped.foreach { case (k, vs) => b += new ProductValue(Vector(k, vs)) }
b.result()
case Take(from, num) =>
val fromV = run(from).asInstanceOf[Coll]
val b = from.nodeType.asCollectionType.cons.canBuildFrom()
b ++= fromV.toIterator.take(num)
b.result()
case Drop(from, num) =>
val fromV = run(from).asInstanceOf[Coll]
val b = from.nodeType.asCollectionType.cons.canBuildFrom()
b ++= fromV.toIterator.drop(num)
b.result()
case Union(left, right, all, _, _) =>
val leftV = run(left).asInstanceOf[Coll]
val rightV = run(right).asInstanceOf[Coll]
if(all) leftV ++ rightV
else leftV ++ {
val s = leftV.toSet
rightV.filter(e => !s.contains(e))
}
case GetOrElse(ch, default) =>
run(ch).asInstanceOf[Option[Any]].getOrElse(default())
case OptionApply(ch) =>
Option(run(ch))
case ConditionalExpr(clauses, elseClause) =>
val opt = n.nodeType.asInstanceOf[ScalaType[_]].nullable
val take = clauses.find { case IfThen(pred, _) => asBoolean(run(pred)) }
take match {
case Some(IfThen(_, r)) =>
val res = run(r)
if(opt && !r.nodeType.asInstanceOf[ScalaType[_]].nullable) Option(res)
else res
case _ =>
val res = run(elseClause)
if(opt && !elseClause.nodeType.asInstanceOf[ScalaType[_]].nullable) Option(res)
else res
}
case QueryParameter(extractor, _) =>
extractor(params)
case Library.Exists(coll) =>
!run(coll).asInstanceOf[Coll].isEmpty
case Library.IfNull(cond, default) =>
val condV = run(cond)
if((condV.asInstanceOf[AnyRef] eq null) || condV == None) {
val defaultV = run(default)
if(n.nodeType.isInstanceOf[OptionType] && !default.nodeType.isInstanceOf[OptionType]) Some(defaultV)
else defaultV
} else if(n.nodeType.isInstanceOf[OptionType] && !cond.nodeType.isInstanceOf[OptionType]) Some(condV)
else condV
case Library.In(what, where) =>
val whatV = run(what)
val whereV = run(where)
val whatOpt = what.nodeType.isInstanceOf[OptionType]
if(whatOpt && (whatV.asInstanceOf[AnyRef].eq(null) || whatV == None)) None
else {
val whatBase = if(whatOpt) whatV.asInstanceOf[Option[Any]].get else whatV
where.nodeType match {
case ProductType(elTypes) =>
val p = whereV.asInstanceOf[ProductValue]
0.until(elTypes.length).iterator.map { i =>
if(elTypes(i).isInstanceOf[OptionType]) {
p(i).asInstanceOf[Option[Any]] match {
case Some(v) => whatBase == v
case None => false
}
} else whatBase == p(i)
} contains true
case ct: CollectionType =>
val (els, singleType) = unwrapSingleColumn(whereV.asInstanceOf[Coll], ct)
(if(singleType.isInstanceOf[OptionType])
els.map(_.asInstanceOf[Option[Any]] match {
case Some(v) => whatBase == v
case None => false
})
else els.map(whatBase.==)) contains true
}
}
case Library.Sum(ch) =>
val coll = run(ch).asInstanceOf[Coll]
val (it, itType) = unwrapSingleColumn(coll, ch.nodeType)
val (num, opt) = itType match {
case t: ScalaOptionType[_] => (t.elementType.asInstanceOf[ScalaNumericType[Any]].numeric, true)
case t => (t.asInstanceOf[ScalaNumericType[Any]].numeric, false)
}
foldOptionIt(it, opt, num.zero, (a, b) => num.plus(a, b))
case Library.Avg(ch) =>
val coll = run(ch).asInstanceOf[Coll]
val (it, itType) = unwrapSingleColumn(coll, ch.nodeType)
val (num, opt) = itType match {
case t: ScalaOptionType[_] => (t.elementType.asInstanceOf[ScalaNumericType[Any]].numeric, true)
case t => (t.asInstanceOf[ScalaNumericType[Any]].numeric, false)
}
foldOptionIt(it, opt, num.zero, (a, b) => num.plus(a, b)).map { sum =>
if(num.isInstanceOf[Fractional[_]]) num.asInstanceOf[Fractional[Any]].div(sum, num.fromInt(coll.size))
else num.fromInt(num.toInt(sum) / coll.size)
}
case Library.Min(ch) =>
val coll = run(ch).asInstanceOf[Coll]
val (it, itType) = unwrapSingleColumn(coll, ch.nodeType)
val (num, opt) = itType match {
case t: ScalaOptionType[_] => (t.elementType.asInstanceOf[ScalaNumericType[Any]].numeric, true)
case t => (t.asInstanceOf[ScalaNumericType[Any]].numeric, false)
}
foldOptionIt(it, opt, num.zero, (a, b) => if(num.lt(b, a)) b else a)
case Apply(sym, ch) =>
val chV = ch.map(n => (n.nodeType, run(n)))
logDebug("[chV: "+chV.mkString(", ")+"]")
// Use ternary logic for function calls
if(n.nodeType.isInstanceOf[OptionType]) {
if(chV.exists { case (t, v) => t.isInstanceOf[OptionType] && (v == None) }) None
else {
val chPlainV = chV.map {
case (t: OptionType, v) => (t.elementType, v.asInstanceOf[Option[Any]].get)
case other => other
}
logDebug("[chPlainV: "+chPlainV.mkString(", ")+"]")
Some(evalFunction(sym, chPlainV, n.nodeType.asOptionType.elementType))
}
} else evalFunction(sym, chV, n.nodeType)
//case Library.CountAll(ch) => run(ch).asInstanceOf[Coll].size
case l: LiteralNode => l.value
}
indent -= 1
if(logger.isDebugEnabled) logDebug("Result: "+res)
res
}
def evalFunction(sym: Symbol, args: Seq[(Type, Any)], retType: Type) = sym match {
case Library.== => args(0)._2 == args(1)._2
case Library.< => args(0)._1.asInstanceOf[ScalaBaseType[Any]].ordering.lt(args(0)._2, args(1)._2)
case Library.<= => args(0)._1.asInstanceOf[ScalaBaseType[Any]].ordering.lteq(args(0)._2, args(1)._2)
case Library.> => args(0)._1.asInstanceOf[ScalaBaseType[Any]].ordering.gt(args(0)._2, args(1)._2)
case Library.>= => args(0)._1.asInstanceOf[ScalaBaseType[Any]].ordering.gteq(args(0)._2, args(1)._2)
case Library.+ => args(0)._1.asInstanceOf[ScalaNumericType[Any]].numeric.plus(args(0)._2, args(1)._2)
case Library.- => args(0)._1.asInstanceOf[ScalaNumericType[Any]].numeric.minus(args(0)._2, args(1)._2)
case Library.* => args(0)._1.asInstanceOf[ScalaNumericType[Any]].numeric.times(args(0)._2, args(1)._2)
case Library.% => args(0)._1.asInstanceOf[ScalaNumericType[Any]].numeric.asInstanceOf[Integral[Any]].rem(args(0)._2, args(1)._2)
case Library.Abs => args(0)._1.asInstanceOf[ScalaNumericType[Any]].numeric.abs(args(0)._2)
case Library.And => args(0)._2.asInstanceOf[Boolean] && args(1)._2.asInstanceOf[Boolean]
case Library.Cast =>
val v = args(0)._2
(args(0)._1, retType) match {
case (a, b) if a == b => v
case (_, ScalaBaseType.stringType) => v.toString
case (_, ScalaBaseType.intType) => v.toString.toInt
case (_, ScalaBaseType.longType) => v.toString.toLong
}
case Library.Ceiling =>
val t = args(0)._1.asInstanceOf[ScalaNumericType[Any]]
t.fromDouble(scala.math.ceil(t.toDouble(args(0)._2)))
case Library.Concat => args.iterator.map(_._2.toString).mkString
case Library.CountAll => args(0)._2.asInstanceOf[Coll].size
case Library.Count =>
val CollectionType(_, elType) = args(0)._1
val coll = args(0)._2.asInstanceOf[Coll]
(elType match {
case ProductType(_) =>
coll.iterator.filter { p =>
val v = p.asInstanceOf[ProductValue].apply(0)
v != null && v != None
}
case _ =>
coll.iterator.filter(v => v != null && v != None)
}).size
case Library.Database => ""
case Library.Degrees =>
val t = args(0)._1.asInstanceOf[ScalaNumericType[Any]]
t.fromDouble(scala.math.toDegrees(t.toDouble(args(0)._2)))
case Library.Floor =>
val t = args(0)._1.asInstanceOf[ScalaNumericType[Any]]
t.fromDouble(scala.math.floor(t.toDouble(args(0)._2)))
case Library.LCase => args(0)._2.asInstanceOf[String].toLowerCase
case Library.Length => args(0)._2.asInstanceOf[String].length
case Library.Like =>
val pat = compileLikePattern(args(1)._2.toString, if(args.length > 2) Some(args(2)._2.toString.charAt(0)) else None)
val mat = pat.matcher(args(0)._2.toString())
mat.matches()
case Library.LTrim =>
val s = args(0)._2.asInstanceOf[String]
val len = s.length
var start = 0
while(start < len && s.charAt(start) == ' ') start += 1
if(start == 0) s else s.substring(start)
case Library.Not => !args(0)._2.asInstanceOf[Boolean]
case Library.Or => args(0)._2.asInstanceOf[Boolean] || args(1)._2.asInstanceOf[Boolean]
case Library.Pi => scala.math.Pi
case Library.Radians =>
val t = args(0)._1.asInstanceOf[ScalaNumericType[Any]]
t.fromDouble(scala.math.toRadians(t.toDouble(args(0)._2)))
case Library.RTrim =>
val s = args(0)._2.asInstanceOf[String]
var len = s.length
while(len > 0 && s.charAt(len-1) == ' ') len -= 1
if(len == s.length) s else s.substring(0, len)
case Library.Sign => args(0)._1.asInstanceOf[ScalaNumericType[Any]].numeric.signum(args(0)._2)
case Library.Trim => args(0)._2.asInstanceOf[String].trim
case Library.UCase => args(0)._2.asInstanceOf[String].toUpperCase
case Library.User => ""
}
def unwrapSingleColumn(coll: Coll, tpe: Type): (Iterator[Any], Type) = tpe.asCollectionType.elementType match {
case ProductType(Seq(t)) => (coll.iterator.map(_.asInstanceOf[ProductValue](0)), t)
case t => (coll.iterator, t)
}
def foldOptionIt(it: Iterator[Any], opt: Boolean, zero: Any, f: (Any, Any) => Any): Option[Any] = {
if(!it.hasNext) None
else if(opt) it.foldLeft(Some(zero): Option[Any]) { (z, b) =>
for(z <- z; b <- b.asInstanceOf[Option[Any]]) yield f(z, b)
}
else Some(it.foldLeft(zero) { (z, b) => f(z, b) })
}
def createNullRow(tpe: Type): Any = tpe match {
case t: ScalaType[_] => if(t.nullable) None else null
case StructType(el) =>
new StructValue(el.map{ case (_, tpe) => createNullRow(tpe) }(collection.breakOut),
el.zipWithIndex.map{ case ((sym, _), idx) => (sym, idx) }(collection.breakOut): Map[Symbol, Int])
case ProductType(el) =>
new ProductValue(el.map(tpe => createNullRow(tpe))(collection.breakOut))
}
def asBoolean(v: Any) = v match {
case b: Boolean => b
case Some(b: Boolean) => b
case None => false
case null => false
}
def compileLikePattern(s: String, escape: Option[Char]): Pattern = {
val b = new StringBuilder append '^'
val len = s.length
val esc = escape.getOrElse('\\u0000')
var i = 0
while(i < len) {
s.charAt(i) match {
case e if e == esc =>
i += 1
b.append(Pattern.quote(String.valueOf(s.charAt(i))))
case '%' => b.append(".*")
case c => b.append(Pattern.quote(String.valueOf(c)))
}
i += 1
}
Pattern.compile(b.append('$').toString)
}
}
object QueryInterpreter {
/** The representation for ProductType values in the interpreter */
class ProductValue(private val data: IndexedSeq[Any]) extends (Int => Any) {
def length: Int = data.length
def apply(idx: Int): Any = data(idx)
override def toString = "ProductValue("+data.mkString(", ")+")"
override def equals(other: Any) = other match {
case p: ProductValue => data == p.data
case _ => false
}
override def hashCode = data.hashCode()
}
/** The representation for StructType values in the interpreter */
class StructValue(data: IndexedSeq[Any], symbolToIndex: (Symbol => Int)) extends ProductValue(data) {
def getBySymbol(sym: Symbol): Any = apply(symbolToIndex(sym))
override def toString = "StructValue("+data.mkString(", ")+"){"+symbolToIndex+"}"
}
}
| retronym/slick | src/main/scala/scala/slick/memory/QueryInterpreter.scala | Scala | bsd-2-clause | 18,307 |
import dispatch.{Request, Http, NoLogging, StatusCode, url}
import cc.spray.json.{JsNull, JsonParser, DefaultJsonProtocol, JsValue}
import RichJsValue._
import org.apache.commons.codec.digest.DigestUtils
import org.apache.commons.codec.binary.{Hex, Base64}
import java.io.{IOException, File, FileInputStream}
import scalaz.Scalaz.{mkIdentity, ValidationNEL}
import Settings._
import sbt._
case class JsonSubmission(api_state: String, user_info: JsValue, submission_metadata: JsValue, solutions: JsValue, submission_encoding: String, submission: String)
//case class JsonQueueResult(submission: JsonSubmission)
object SubmitJsonProtocol extends DefaultJsonProtocol {
implicit val jsonSubmissionFormat = jsonFormat6(JsonSubmission)
// implicit val jsonQueueResultFormat = jsonFormat1(JsonQueueResult)
}
// forwarder to circumvent deprecation
object DeprectaionForwarder {
@deprecated("", "") class FwdClass { def insecureClientForwarder(credentials: dispatch.Http.CurrentCredentials) = insecureClient(credentials) }; object FwdClass extends FwdClass
import org.apache.http.impl.client.DefaultHttpClient
import org.apache.http.conn.ssl._
import org.apache.http.conn.scheme._
import javax.net.ssl.{X509TrustManager, SSLContext}
import java.security.cert.X509Certificate
import org.apache.http.impl.conn.SingleClientConnManager
import java.security.SecureRandom
class NaiveTrustManager extends X509TrustManager {
override def checkClientTrusted(arg0: Array[X509Certificate], arg1: String) {
}
override def checkServerTrusted(arg0: Array[X509Certificate], arg1: String) {
}
override def getAcceptedIssuers(): Array[X509Certificate] = {
return null;
}
}
@deprecated("", "") def insecureClient(credentials: dispatch.Http.CurrentCredentials) = {
val sslContext = SSLContext.getInstance("TLS");
sslContext.init(null, Array(new NaiveTrustManager()), new SecureRandom());
val sf = new SSLSocketFactory(sslContext, SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
val httpsScheme = new Scheme("https", sf, 443);
val schemeRegistry = new SchemeRegistry();
schemeRegistry.register(httpsScheme);
val dispatch_client = new dispatch.ConfiguredHttpClient(credentials)
val params = dispatch_client.createHttpParams
val cm = new SingleClientConnManager(params, schemeRegistry);
val client = new DefaultHttpClient(cm, params)
client
}
}
object CourseraHttp {
private lazy val http = new Http with NoLogging {
override def make_client = DeprectaionForwarder.FwdClass.insecureClientForwarder(credentials)
}
private def executeRequest[T](req: Request)(parse: String => ValidationNEL[String, T]): ValidationNEL[String, T] = {
try {
http(req >- { res =>
parse(res)
})
} catch {
case ex: IOException =>
("Connection failed\\n"+ ex.toString()).failNel
case StatusCode(code, message) =>
("HTTP failed with status "+ code +"\\n"+ message).failNel
}
}
/******************************
* SUBMITTING
*/
def getChallenge(email: String, submitProject: ProjectDetails): ValidationNEL[String, Challenge] = {
val baseReq = url(challengeUrl(submitProject.courseId))
val withArgs = baseReq << Map("email_address" -> email,
"assignment_part_sid" -> submitProject.assignmentPartId,
"response_encoding" -> "delim")
executeRequest(withArgs) { res =>
// example result. there might be an `aux_data` value at the end.
// |email_address|a@b.com|challenge_key|XXYYXXYYXXYY|state|XXYYXXYYXXYY|challenge_aux_data|
val parts = res.split('|').filterNot(_.isEmpty)
if (parts.length < 7)
("Unexpected challenge format: \\n"+ res).failNel
else
Challenge(parts(1), parts(3), parts(5)).successNel
}
}
def submitSolution(sourcesJar: File, submitProject: ProjectDetails, challenge: Challenge, chResponse: String): ValidationNEL[String, String] = {
val fileLength = sourcesJar.length()
if (!sourcesJar.exists()) {
("Sources jar archive does not exist\\n"+ sourcesJar.getAbsolutePath).failNel
} else if (fileLength == 0l) {
("Sources jar archive is empty\\n"+ sourcesJar.getAbsolutePath).failNel
} else if (fileLength > maxSubmitFileSize) {
("Sources jar archive is too big. Allowed size: "+
maxSubmitFileSize +" bytes, found "+ fileLength +" bytes.\\n"+
sourcesJar.getAbsolutePath).failNel
} else {
val bytes = new Array[Byte](fileLength.toInt)
val sizeRead = try {
val is = new FileInputStream(sourcesJar)
val read = is.read(bytes)
is.close()
read
} catch {
case ex: IOException =>
("Failed to read sources jar archive\\n"+ ex.toString()).failNel
}
if (sizeRead != bytes.length) {
("Failed to read the sources jar archive, size read: "+ sizeRead).failNel
} else {
val fileData = encodeBase64(bytes)
val baseReq = url(submitUrl(submitProject.courseId))
val withArgs = baseReq << Map("assignment_part_sid" -> submitProject.assignmentPartId,
"email_address" -> challenge.email,
"submission" -> fileData,
"submission_aux" -> "",
"challenge_response" -> chResponse,
"state" -> challenge.state)
executeRequest(withArgs) { res =>
// the API returns HTTP 200 even if there are failures, how impolite...
if (res.contains("Your submission has been accepted"))
res.successNel
else
res.failNel
}
}
}
}
def challengeResponse(challenge: Challenge, otPassword: String): String =
shaHexDigest(challenge.challengeKey + otPassword)
/********************************
* DOWNLOADING SUBMISSIONS
*/
// def downloadFromQueue(queue: String, targetJar: File, apiKey: String): ValidationNEL[String, QueueResult] = {
// val baseReq = url(Settings.submitQueueUrl)
// val withArgsAndHeader = baseReq << Map("queue" -> queue) <:< Map("X-api-key" -> apiKey)
// executeRequest(withArgsAndHeader) { res =>
// extractJson(res, targetJar)
// }
// }
def readJsonFile(jsonFile: File, targetJar: File): ValidationNEL[String, QueueResult] = {
extractJson(sbt.IO.read(jsonFile), targetJar)
}
def extractJson(jsonData: String, targetJar: File): ValidationNEL[String, QueueResult] = {
import SubmitJsonProtocol._
for {
jsonSubmission <- {
try {
val parsed = JsonParser(jsonData)
val submission = parsed \\ "submission"
if (submission == JsNull) {
("Nothing to grade, queue is empty.").failNel
} else {
submission.convertTo[JsonSubmission].successNel
}
} catch {
case e: Exception =>
("Could not parse submission\\n"+ jsonData +"\\n"+ fullExceptionString(e)).failNel
}
}
queueResult <- {
val encodedFile = jsonSubmission.submission
val jarContent = decodeBase64(encodedFile)
try {
sbt.IO.write(targetJar, jarContent)
QueueResult(jsonSubmission.api_state).successNel
} catch {
case e: IOException =>
("Failed to write jar file to "+ targetJar.getAbsolutePath +"\\n"+ e.toString).failNel
}
}
} yield queueResult
}
def unpackJar(file: File, targetDirectory: File): ValidationNEL[String, Unit] = {
try {
val files = sbt.IO.unzip(file, targetDirectory)
if (files.isEmpty)
("No files found when unpacking jar file "+ file.getAbsolutePath).failNel
else
().successNel
} catch {
case e: IOException =>
val msg = "Error while unpacking the jar file "+ file.getAbsolutePath +" to "+ targetDirectory.getAbsolutePath +"\\n"+ e.toString
if (Settings.offlineMode) {
println("[offline mode] "+ msg)
().successNel
} else {
msg.failNel
}
}
}
/********************************
* SUBMITTING GRADES
*/
def submitGrade(feedback: String, score: String, apiState: String, apiKey: String, gradeProject: ProjectDetails, logger: Option[Logger]): ValidationNEL[String, Unit] = {
import DefaultJsonProtocol._
val baseReq = url(Settings.uploadFeedbackUrl(gradeProject.courseId))
val reqArgs = Map("api_state" -> apiState, "score" -> score, "feedback" -> feedback)
val withArgs = baseReq << reqArgs <:< Map("X-api-key" -> apiKey)
for (l <- logger) l.debug("Submit grade arguments: \\n X-api-key: " + apiKey + " " + reqArgs + " ")
executeRequest(withArgs) { res =>
try {
for (l <- logger) l.debug("Response:" + res)
val js = JsonParser(res)
val status = (js \\ "status").convertTo[String]
if (status == "202")
().successNel
else
("Unexpected result from submit request: "+ status).failNel
} catch {
case e: Exception =>
("Failed to parse response while submitting grade\\n"+ res +"\\n"+ fullExceptionString(e)).failNel
}
}
}
/*********************************
* TOOLS AND STUFF
*/
def shaHexDigest(s: String): String = {
val chars = Hex.encodeHex(DigestUtils.sha(s))
new String(chars)
}
def fullExceptionString(e: Throwable) =
e.toString +"\\n"+ e.getStackTrace.map(_.toString).mkString("\\n")
/* Base 64 tools */
def encodeBase64(bytes: Array[Byte]): String =
new String(Base64.encodeBase64(bytes))
def decodeBase64(str: String): Array[Byte] = {
// codecs 1.4 has a version accepting a string, but not 1.2; jar hell.
Base64.decodeBase64(str.getBytes)
}
}
case class Challenge(email: String, challengeKey: String, state: String)
case class QueueResult(apiState: String)
| wangpeibin713/scala_study | scala_at_coursera/progfun-recfun/project/CourseraHttp.scala | Scala | gpl-2.0 | 10,049 |
package org.github.sguzman.scala.game.scalebra.util.log.help
import org.github.sguzman.scala.game.scalebra.util.config.values.App
/**
* This case class holds all config information that will be included in log
* message. It is information that is not known until the logging actually
* happens.
*
* @param name Name of the app
* @param mode mode of the app
* @param `class` invoking class
* @param method invoking method
* @param line invoking line number
* @param context context of log
*/
case class LogConfig(
// Name of the app is unlikely to change
val name: Option[String] = Some(App.name),
val mode: Option[String] = None,
val `class`: Option[String] = None,
val method: Option[String] = None,
val line: Option[String] = None,
val context: Option[String] = None,
val custom: Option[String] = None
) {
def tuple = Seq[(String, Option[String])](
("name", this.name),
("mode", this.mode),
("class", this.`class`),
("method", this.method),
("line", this.line),
("context", this.context),
("custom", this.custom)
)
} | sguzman/Scalebra | src/main/scala/org/github/sguzman/scala/game/scalebra/util/log/help/LogConfig.scala | Scala | mit | 1,090 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.controller
import kafka.api.LeaderAndIsr
import kafka.server.KafkaConfig
import kafka.utils.TestUtils
import kafka.zk.KafkaZkClient.UpdateLeaderAndIsrResult
import kafka.zk.{KafkaZkClient, TopicPartitionStateZNode}
import kafka.zookeeper.{GetDataResponse, ResponseMetadata}
import org.apache.kafka.common.TopicPartition
import org.apache.zookeeper.KeeperException.Code
import org.apache.zookeeper.data.Stat
import org.easymock.EasyMock
import org.junit.Assert._
import org.junit.{Before, Test}
import org.scalatest.junit.JUnitSuite
import scala.collection.mutable
class ReplicaStateMachineTest extends JUnitSuite {
private var controllerContext: ControllerContext = null
private var mockZkClient: KafkaZkClient = null
private var mockControllerBrokerRequestBatch: ControllerBrokerRequestBatch = null
private var mockTopicDeletionManager: TopicDeletionManager = null
private var replicaState: mutable.Map[PartitionAndReplica, ReplicaState] = null
private var replicaStateMachine: ReplicaStateMachine = null
private val brokerId = 5
private val config = KafkaConfig.fromProps(TestUtils.createBrokerConfig(brokerId, "zkConnect"))
private val controllerEpoch = 50
private val partition = new TopicPartition("t", 0)
private val partitions = Seq(partition)
private val replica = PartitionAndReplica(partition, brokerId)
private val replicas = Seq(replica)
@Before
def setUp(): Unit = {
controllerContext = new ControllerContext
controllerContext.epoch = controllerEpoch
mockZkClient = EasyMock.createMock(classOf[KafkaZkClient])
mockControllerBrokerRequestBatch = EasyMock.createMock(classOf[ControllerBrokerRequestBatch])
mockTopicDeletionManager = EasyMock.createMock(classOf[TopicDeletionManager])
replicaState = mutable.Map.empty[PartitionAndReplica, ReplicaState]
replicaStateMachine = new ReplicaStateMachine(config, new StateChangeLogger(brokerId, true, None), controllerContext, mockTopicDeletionManager, mockZkClient,
replicaState, mockControllerBrokerRequestBatch)
}
@Test
def testNonexistentReplicaToNewReplicaTransition(): Unit = {
replicaStateMachine.handleStateChanges(replicas, NewReplica)
assertEquals(NewReplica, replicaState(replica))
}
@Test
def testInvalidNonexistentReplicaToOnlineReplicaTransition(): Unit = {
replicaStateMachine.handleStateChanges(replicas, OnlineReplica)
assertEquals(NonExistentReplica, replicaState(replica))
}
@Test
def testInvalidNonexistentReplicaToOfflineReplicaTransition(): Unit = {
replicaStateMachine.handleStateChanges(replicas, OfflineReplica)
assertEquals(NonExistentReplica, replicaState(replica))
}
@Test
def testInvalidNonexistentReplicaToReplicaDeletionStartedTransition(): Unit = {
replicaStateMachine.handleStateChanges(replicas, ReplicaDeletionStarted)
assertEquals(NonExistentReplica, replicaState(replica))
}
@Test
def testInvalidNonexistentReplicaToReplicaDeletionIneligibleTransition(): Unit = {
replicaStateMachine.handleStateChanges(replicas, ReplicaDeletionIneligible)
assertEquals(NonExistentReplica, replicaState(replica))
}
@Test
def testInvalidNonexistentReplicaToReplicaDeletionSuccessfulTransition(): Unit = {
replicaStateMachine.handleStateChanges(replicas, ReplicaDeletionSuccessful)
assertEquals(NonExistentReplica, replicaState(replica))
}
@Test
def testInvalidNewReplicaToNonexistentReplicaTransition(): Unit = {
testInvalidTransition(NewReplica, NonExistentReplica)
}
@Test
def testNewReplicaToOnlineReplicaTransition(): Unit = {
replicaState.put(replica, NewReplica)
controllerContext.partitionReplicaAssignment.put(partition, Seq(brokerId))
replicaStateMachine.handleStateChanges(replicas, OnlineReplica)
assertEquals(OnlineReplica, replicaState(replica))
}
@Test
def testNewReplicaToOfflineReplicaTransition(): Unit = {
replicaState.put(replica, NewReplica)
EasyMock.expect(mockControllerBrokerRequestBatch.newBatch())
EasyMock.expect(mockControllerBrokerRequestBatch.addStopReplicaRequestForBrokers(EasyMock.eq(Seq(brokerId)),
EasyMock.eq(partition), EasyMock.eq(false), EasyMock.anyObject()))
EasyMock.expect(mockControllerBrokerRequestBatch.sendRequestsToBrokers(controllerEpoch))
EasyMock.replay(mockControllerBrokerRequestBatch)
replicaStateMachine.handleStateChanges(replicas, OfflineReplica)
EasyMock.verify(mockControllerBrokerRequestBatch)
assertEquals(NewReplica, replicaState(replica))
}
@Test
def testInvalidNewReplicaToReplicaDeletionStartedTransition(): Unit = {
testInvalidTransition(NewReplica, ReplicaDeletionStarted)
}
@Test
def testInvalidNewReplicaToReplicaDeletionIneligibleTransition(): Unit = {
testInvalidTransition(NewReplica, ReplicaDeletionIneligible)
}
@Test
def testInvalidNewReplicaToReplicaDeletionSuccessfulTransition(): Unit = {
testInvalidTransition(NewReplica, ReplicaDeletionSuccessful)
}
@Test
def testInvalidOnlineReplicaToNonexistentReplicaTransition(): Unit = {
testInvalidTransition(OnlineReplica, NonExistentReplica)
}
@Test
def testInvalidOnlineReplicaToNewReplicaTransition(): Unit = {
testInvalidTransition(OnlineReplica, NewReplica)
}
@Test
def testOnlineReplicaToOnlineReplicaTransition(): Unit = {
replicaState.put(replica, OnlineReplica)
controllerContext.partitionReplicaAssignment.put(partition, Seq(brokerId))
val leaderIsrAndControllerEpoch = LeaderIsrAndControllerEpoch(LeaderAndIsr(brokerId, List(brokerId)), controllerEpoch)
controllerContext.partitionLeadershipInfo.put(partition, leaderIsrAndControllerEpoch)
EasyMock.expect(mockControllerBrokerRequestBatch.newBatch())
EasyMock.expect(mockControllerBrokerRequestBatch.addLeaderAndIsrRequestForBrokers(Seq(brokerId),
partition, leaderIsrAndControllerEpoch, Seq(brokerId), isNew = false))
EasyMock.expect(mockControllerBrokerRequestBatch.sendRequestsToBrokers(controllerEpoch))
EasyMock.replay(mockZkClient, mockControllerBrokerRequestBatch)
replicaStateMachine.handleStateChanges(replicas, OnlineReplica)
EasyMock.verify(mockZkClient, mockControllerBrokerRequestBatch)
assertEquals(OnlineReplica, replicaState(replica))
}
@Test
def testOnlineReplicaToOfflineReplicaTransition(): Unit = {
val otherBrokerId = brokerId + 1
val replicaIds = List(brokerId, otherBrokerId)
replicaState.put(replica, OnlineReplica)
controllerContext.partitionReplicaAssignment.put(partition, replicaIds)
val leaderAndIsr = LeaderAndIsr(brokerId, replicaIds)
val leaderIsrAndControllerEpoch = LeaderIsrAndControllerEpoch(leaderAndIsr, controllerEpoch)
controllerContext.partitionLeadershipInfo.put(partition, leaderIsrAndControllerEpoch)
val stat = new Stat(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
EasyMock.expect(mockControllerBrokerRequestBatch.newBatch())
EasyMock.expect(mockControllerBrokerRequestBatch.addStopReplicaRequestForBrokers(EasyMock.eq(Seq(brokerId)),
EasyMock.eq(partition), EasyMock.eq(false), EasyMock.anyObject()))
val adjustedLeaderAndIsr = leaderAndIsr.newLeaderAndIsr(LeaderAndIsr.NoLeader, List(otherBrokerId))
val updatedLeaderAndIsr = adjustedLeaderAndIsr.withZkVersion(adjustedLeaderAndIsr .zkVersion + 1)
val updatedLeaderIsrAndControllerEpoch = LeaderIsrAndControllerEpoch(updatedLeaderAndIsr, controllerEpoch)
EasyMock.expect(mockZkClient.getTopicPartitionStatesRaw(partitions)).andReturn(
Seq(GetDataResponse(Code.OK, null, Some(partition),
TopicPartitionStateZNode.encode(leaderIsrAndControllerEpoch), stat, ResponseMetadata(0, 0))))
EasyMock.expect(mockZkClient.updateLeaderAndIsr(Map(partition -> adjustedLeaderAndIsr), controllerEpoch))
.andReturn(UpdateLeaderAndIsrResult(Map(partition -> updatedLeaderAndIsr), Seq.empty, Map.empty))
EasyMock.expect(mockTopicDeletionManager.isPartitionToBeDeleted(partition)).andReturn(false)
EasyMock.expect(mockControllerBrokerRequestBatch.addLeaderAndIsrRequestForBrokers(Seq(otherBrokerId),
partition, updatedLeaderIsrAndControllerEpoch, replicaIds, isNew = false))
EasyMock.expect(mockControllerBrokerRequestBatch.sendRequestsToBrokers(controllerEpoch))
EasyMock.replay(mockZkClient, mockControllerBrokerRequestBatch, mockTopicDeletionManager)
replicaStateMachine.handleStateChanges(replicas, OfflineReplica)
EasyMock.verify(mockZkClient, mockControllerBrokerRequestBatch, mockTopicDeletionManager)
assertEquals(updatedLeaderIsrAndControllerEpoch, controllerContext.partitionLeadershipInfo(partition))
assertEquals(OfflineReplica, replicaState(replica))
}
@Test
def testInvalidOnlineReplicaToReplicaDeletionStartedTransition(): Unit = {
testInvalidTransition(OnlineReplica, ReplicaDeletionStarted)
}
@Test
def testInvalidOnlineReplicaToReplicaDeletionIneligibleTransition(): Unit = {
testInvalidTransition(OnlineReplica, ReplicaDeletionIneligible)
}
@Test
def testInvalidOnlineReplicaToReplicaDeletionSuccessfulTransition(): Unit = {
testInvalidTransition(OnlineReplica, ReplicaDeletionSuccessful)
}
@Test
def testInvalidOfflineReplicaToNonexistentReplicaTransition(): Unit = {
testInvalidTransition(OfflineReplica, NonExistentReplica)
}
@Test
def testInvalidOfflineReplicaToNewReplicaTransition(): Unit = {
testInvalidTransition(OfflineReplica, NewReplica)
}
@Test
def testOfflineReplicaToOnlineReplicaTransition(): Unit = {
replicaState.put(replica, OfflineReplica)
controllerContext.partitionReplicaAssignment.put(partition, Seq(brokerId))
val leaderIsrAndControllerEpoch = LeaderIsrAndControllerEpoch(LeaderAndIsr(brokerId, List(brokerId)), controllerEpoch)
controllerContext.partitionLeadershipInfo.put(partition, leaderIsrAndControllerEpoch)
EasyMock.expect(mockControllerBrokerRequestBatch.newBatch())
EasyMock.expect(mockControllerBrokerRequestBatch.addLeaderAndIsrRequestForBrokers(Seq(brokerId),
partition, leaderIsrAndControllerEpoch, Seq(brokerId), isNew = false))
EasyMock.expect(mockControllerBrokerRequestBatch.sendRequestsToBrokers(controllerEpoch))
EasyMock.replay(mockZkClient, mockControllerBrokerRequestBatch)
replicaStateMachine.handleStateChanges(replicas, OnlineReplica)
EasyMock.verify(mockZkClient, mockControllerBrokerRequestBatch)
assertEquals(OnlineReplica, replicaState(replica))
}
@Test
def testOfflineReplicaToReplicaDeletionStartedTransition(): Unit = {
val callbacks = new Callbacks()
replicaState.put(replica, OfflineReplica)
EasyMock.expect(mockControllerBrokerRequestBatch.newBatch())
EasyMock.expect(mockControllerBrokerRequestBatch.addStopReplicaRequestForBrokers(Seq(brokerId),
partition, true, callbacks.stopReplicaResponseCallback))
EasyMock.expect(mockControllerBrokerRequestBatch.sendRequestsToBrokers(controllerEpoch))
EasyMock.replay(mockZkClient, mockControllerBrokerRequestBatch)
replicaStateMachine.handleStateChanges(replicas, ReplicaDeletionStarted, callbacks)
EasyMock.verify(mockZkClient, mockControllerBrokerRequestBatch)
assertEquals(ReplicaDeletionStarted, replicaState(replica))
}
@Test
def testInvalidOfflineReplicaToReplicaDeletionIneligibleTransition(): Unit = {
testInvalidTransition(OfflineReplica, ReplicaDeletionIneligible)
}
@Test
def testInvalidOfflineReplicaToReplicaDeletionSuccessfulTransition(): Unit = {
testInvalidTransition(OfflineReplica, ReplicaDeletionSuccessful)
}
@Test
def testInvalidReplicaDeletionStartedToNonexistentReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionStarted, NonExistentReplica)
}
@Test
def testInvalidReplicaDeletionStartedToNewReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionStarted, NewReplica)
}
@Test
def testInvalidReplicaDeletionStartedToOnlineReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionStarted, OnlineReplica)
}
@Test
def testInvalidReplicaDeletionStartedToOfflineReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionStarted, OfflineReplica)
}
@Test
def testReplicaDeletionStartedToReplicaDeletionIneligibleTransition(): Unit = {
replicaState.put(replica, ReplicaDeletionStarted)
replicaStateMachine.handleStateChanges(replicas, ReplicaDeletionIneligible)
assertEquals(ReplicaDeletionIneligible, replicaState(replica))
}
@Test
def testReplicaDeletionStartedToReplicaDeletionSuccessfulTransition(): Unit = {
replicaState.put(replica, ReplicaDeletionStarted)
replicaStateMachine.handleStateChanges(replicas, ReplicaDeletionSuccessful)
assertEquals(ReplicaDeletionSuccessful, replicaState(replica))
}
@Test
def testReplicaDeletionSuccessfulToNonexistentReplicaTransition(): Unit = {
replicaState.put(replica, ReplicaDeletionSuccessful)
controllerContext.partitionReplicaAssignment.put(partition, Seq(brokerId))
replicaStateMachine.handleStateChanges(replicas, NonExistentReplica)
assertEquals(Seq.empty, controllerContext.partitionReplicaAssignment(partition))
assertEquals(None, replicaState.get(replica))
}
@Test
def testInvalidReplicaDeletionSuccessfulToNewReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionSuccessful, NewReplica)
}
@Test
def testInvalidReplicaDeletionSuccessfulToOnlineReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionSuccessful, OnlineReplica)
}
@Test
def testInvalidReplicaDeletionSuccessfulToOfflineReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionSuccessful, OfflineReplica)
}
@Test
def testInvalidReplicaDeletionSuccessfulToReplicaDeletionStartedTransition(): Unit = {
testInvalidTransition(ReplicaDeletionSuccessful, ReplicaDeletionStarted)
}
@Test
def testInvalidReplicaDeletionSuccessfulToReplicaDeletionIneligibleTransition(): Unit = {
testInvalidTransition(ReplicaDeletionSuccessful, ReplicaDeletionIneligible)
}
@Test
def testInvalidReplicaDeletionIneligibleToNonexistentReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionIneligible, NonExistentReplica)
}
@Test
def testInvalidReplicaDeletionIneligibleToNewReplicaTransition(): Unit = {
testInvalidTransition(ReplicaDeletionIneligible, NewReplica)
}
@Test
def testReplicaDeletionIneligibleToOnlineReplicaTransition(): Unit = {
replicaState.put(replica, ReplicaDeletionIneligible)
controllerContext.partitionReplicaAssignment.put(partition, Seq(brokerId))
val leaderIsrAndControllerEpoch = LeaderIsrAndControllerEpoch(LeaderAndIsr(brokerId, List(brokerId)), controllerEpoch)
controllerContext.partitionLeadershipInfo.put(partition, leaderIsrAndControllerEpoch)
EasyMock.expect(mockControllerBrokerRequestBatch.newBatch())
EasyMock.expect(mockControllerBrokerRequestBatch.addLeaderAndIsrRequestForBrokers(Seq(brokerId),
partition, leaderIsrAndControllerEpoch, Seq(brokerId), isNew = false))
EasyMock.expect(mockControllerBrokerRequestBatch.sendRequestsToBrokers(controllerEpoch))
EasyMock.replay(mockZkClient, mockControllerBrokerRequestBatch)
replicaStateMachine.handleStateChanges(replicas, OnlineReplica)
EasyMock.verify(mockZkClient, mockControllerBrokerRequestBatch)
assertEquals(OnlineReplica, replicaState(replica))
}
@Test
def testInvalidReplicaDeletionIneligibleToReplicaDeletionStartedTransition(): Unit = {
testInvalidTransition(ReplicaDeletionIneligible, ReplicaDeletionStarted)
}
@Test
def testInvalidReplicaDeletionIneligibleToReplicaDeletionSuccessfulTransition(): Unit = {
testInvalidTransition(ReplicaDeletionIneligible, ReplicaDeletionSuccessful)
}
private def testInvalidTransition(fromState: ReplicaState, toState: ReplicaState): Unit = {
replicaState.put(replica, fromState)
replicaStateMachine.handleStateChanges(replicas, toState)
assertEquals(fromState, replicaState(replica))
}
}
| MyPureCloud/kafka | core/src/test/scala/unit/kafka/controller/ReplicaStateMachineTest.scala | Scala | apache-2.0 | 16,824 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.action.builder
import io.gatling.core.action.{ Action, TryMax }
import io.gatling.core.session.Expression
import io.gatling.core.structure.{ ChainBuilder, ScenarioContext }
private[core] final class TryMaxBuilder(times: Expression[Int], counterName: String, loopNext: ChainBuilder) extends ActionBuilder {
override def build(ctx: ScenarioContext, next: Action): Action = {
import ctx._
val tryMaxAction = new TryMax(times, counterName, coreComponents.statsEngine, coreComponents.clock, next)
val loopNextAction = loopNext.build(ctx, tryMaxAction)
tryMaxAction.initialize(loopNextAction)
tryMaxAction
}
}
| gatling/gatling | gatling-core/src/main/scala/io/gatling/core/action/builder/TryMaxBuilder.scala | Scala | apache-2.0 | 1,274 |
package com.larry.da.jobs.userdigest
import org.apache.spark.{SparkContext, SparkConf}
/**
* Created by larry on 14/12/15.
*/
object UserdigestTest {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("channelid-merge")
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.set("spark.kryoserializer.buffer.mb","128")
// conf.set("spark.kryo.registrationRequired", "true")
conf.registerKryoClasses(Array(
classOf[com.larry.da.jobs.idmap.Person],
classOf[com.larry.da.jobs.userdigest.UserMapping],//userdigest
classOf[scala.collection.mutable.WrappedArray.ofRef[_]]
))
val sc = new SparkContext(conf)
ChannelIdMerge.mergeIdMap(sc,"2016-01-17")
}
}
| larry88/spark_da | src/main/scala/com/larry/da/jobs/userdigest/UserdigestTest.scala | Scala | gpl-2.0 | 769 |
package com.github.wildprairie.common.actors.world
import akka.actor.Props
/**
* Created by jacek on 20.05.17.
*/
object CharacterIdentifierSupply {
def props: Props = Props(classOf[CharacterIdentifierSupply])
final case class State(idCounter: Long, reservedNames: List[String])
sealed trait Cmd
final case class ReserveCharacter(name: String) extends Cmd
sealed trait Evt
final case class ReservedCharacter(name: String) extends Evt
sealed trait ReservationResult
final case class Success(id: Long) extends ReservationResult
case object NameIsTaken extends ReservationResult
case object NameIsInvalid extends ReservationResult
}
class CharacterIdentifierSupply extends SemiPersistentActor {
import CharacterIdentifierSupply._
override type State = CharacterIdentifierSupply.State
override type Event = CharacterIdentifierSupply.Evt
override def initialState: State =
State(0, List())
override def persistenceId: String = "character-id-supply"
override def updateState(
st: State,
ev: Event
): State = ev match {
case ReservedCharacter(name) =>
st.copy(
idCounter = st.idCounter + 1,
reservedNames = name :: st.reservedNames
)
}
override def elseReceiveCommand: Receive = {
case ReserveCharacter(name) =>
if (getState.reservedNames.exists(_.compareToIgnoreCase(name) == 0)) {
sender() ! NameIsTaken
} else {
persist(ReservedCharacter(name))
sender() ! Success(getState.idCounter)
}
}
}
| OpenWakfu/wildprairie | common/src/main/scala/com/github/wildprairie/common/actors/world/CharacterIdentifierSupply.scala | Scala | mit | 1,537 |
package org.scalameta
class FileLine(val file: sourcecode.File, val line: sourcecode.Line) {
override def toString: String = {
val shortFilename = file.value.replaceAll("(.*/|\\.scala)", "")
Console.GREEN + s"$shortFilename:${line.value}" + Console.RESET
}
}
object FileLine {
implicit def generate(implicit file: sourcecode.File, line: sourcecode.Line): FileLine =
new FileLine(file, line)
}
object logger {
/** Same as println except includes the file+line number of call-site. */
def debug(x: Any)(implicit fileLine: FileLine): Unit = {
println(s"$fileLine $x")
}
/** Replaces whitespace characters with non-whitespace characters */
def revealWhitespace(s: String): String = s.map {
case '\t' => '†'
case '\n' => '¶'
case ' ' => '∙'
case ch => ch
}
/** Prints out the value with and it's source code representation
*
* Example: logger.elem(x) // prints "MyFile:24 [x]: 42"
**/
def elem(values: sourcecode.Text[Any]*)(implicit fileLine: FileLine): Unit = {
values.foreach { t =>
val value = {
val str = s"${t.value}"
if (str.contains("\n")) s"\n$str"
else str
}
println(s"$fileLine [${t.source}]: $value")
}
}
}
| DavidDudson/scalameta | scalameta/common/shared/src/main/scala/org/scalameta/logger.scala | Scala | bsd-3-clause | 1,242 |
package algorithms.machineLearning
/**
* Created by yujieshui on 2016/7/25.
*/
object CorrelationRegression {
def pearsonProduct(x: Seq[Double], y: Seq[Double]) = {
val x_mean = x.sum / x.size
val y_mean = y.sum / y.size
val omg = (x.map(_ - x_mean) zip y.map(_ - y_mean) map { case (xx, yy) => xx * yy }).sum
omg / (
math.sqrt(x.map(_ - x_mean).map(e => e * e).sum)
* math.sqrt(y.map(_ - y_mean).map(e => e * e).sum)
)
}
def main(args: Array[String]): Unit = {
println(
pearsonProduct(
x = Seq(15, 12, 8, 8, 7, 7, 7, 6, 5, 3),
y = Seq(10, 25, 17, 11, 13, 17, 20, 13, 9, 15)
)
)
}
}
| 1178615156/hackerrank | src/main/scala/algorithms/machineLearning/CorrelationRegression.scala | Scala | apache-2.0 | 671 |
package scavlink.link.parameter
import scavlink.link._
import scavlink.link.operation._
import scavlink.message.{From, Packet}
import scavlink.message.common.{ParamRequestRead, ParamValue}
case class GetNamedParameters(names: Set[String]) extends ParameterOp {
require(names.size > 0)
val actorType = classOf[GetNamedParametersActor]
}
case class GetNamedParametersResult(vehicle: Vehicle, op: GetNamedParameters, params: Parameters) extends ParameterOpResult {
val isAll = false
}
/**
* Retrieves named parameters from the vehicle.
* @author Nick Rossi
*/
class GetNamedParametersActor(vehicle: Vehicle) extends VehicleOpActor[GetNamedParameters](vehicle) {
private case class ParamData(received: Parameters, tries: Int = 0) extends ActionRetry with DefaultRetrySettings {
def action() = if (tries > 0) requestNames(op.names.diff(received.keySet))
def increment(): ActionRetry = copy(tries = tries + 1)
override def timeoutResult: OpResult = GetNamedParametersResult(vehicle, op, received)
}
val matcher = SubscribeTo.complete {
case Packet(From(`id`, _, _), msg: ParamValue) if msg.paramId != "" && msg.paramIndex == -1 => true
}
def requestNames(names: Set[String]) =
names foreach { name =>
link.send(ParamRequestRead(targetSystem, targetComponent, name, -1))
}
// FSM states
when(Idle) {
case Event(op: GetNamedParameters, Uninitialized) =>
start(op, sender())
link.events.subscribe(self, matcher)
requestNames(op.names)
goto(Active) using ParamData(Map.empty)
}
when(Active) {
case Event(Packet(_, msg: ParamValue), ParamData(received, _)) =>
val updated = received + (msg.paramId -> msg.paramValue)
if (updated.size < op.names.size) {
stay using ParamData(updated)
} else {
stop using Finish(GetNamedParametersResult(vehicle, op, updated))
}
}
}
| nickolasrossi/scavlink | src/main/scala/scavlink/link/parameter/GetNamedParameters.scala | Scala | mit | 1,895 |
/*
* A real-time collaborative tool to develop files over the network.
* Copyright (C) 2010 Mauro Ciancio and Leandro Gilioli
* {maurociancio,legilioli} at gmail dot com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ar.noxit.paralleleditor.common.operation
class NullOperation extends EditOperation {
def executeOn(documentData: DocumentData) = {}
override def toString = "NullOperation"
override def equals(obj: Any) = obj.isInstanceOf[NullOperation]
}
| maurociancio/parallel-editor | src/parallel-editor-common/src/main/scala/ar/noxit/paralleleditor/common/operation/NullOperation.scala | Scala | gpl-3.0 | 1,121 |
package daos
import scala.concurrent.Future
import javax.inject.Inject
import models.{Category, CategoryTable}
import play.api.db.slick.DatabaseConfigProvider
import slick.driver.JdbcProfile
import scala.concurrent.ExecutionContext.Implicits.global
/** Implementation of CategoryDao that uses slick for db access */
class CategoryDaoSlick @Inject() (
val dcp: DatabaseConfigProvider
) extends CategoryDao with CategoryTable
{
import CategoryDao.CategoryInfo
override val dc = dcp.get[JdbcProfile]
import dc.driver.api._
private[this] val db = dc.db
/* Saves the category and returns it untouched */
override def save(c: CategoryInfo): Future[Category] = {
val cat = Category(0, c.name)
val query = ( categories returning categories.map(_.id) ) into ((cat, id) => cat.copy(id = id))
db.run( query += cat )
}
override def all(): Future[Seq[Category]] = db.run( categories.result )
override def remove(id: Int): Future[Unit] =
db.run( categories.filter(_.id === id).delete ).map( _ => () )
}
| kdoomsday/doomcart | app/daos/CategoryDaoSlick.scala | Scala | unlicense | 1,034 |
package org.apache.spark.sql.snowflake
import net.snowflake.spark.snowflake.Utils
import org.apache.spark.SparkContext
import org.apache.spark.sql.{DataFrame, Encoder, SparkSession}
import scala.reflect.runtime.universe.TypeTag
import scala.util.Random
class SFTestWrapperSparkSession(sc: SparkContext, sfConfigs: Map[String, String])
extends SparkSession(sc) {
def seqToDF[T: Encoder](data: Seq[T]): DataFrame = {
createSnowflakeDFFromSparkDF(super.createDataset(data).toDF())
}
def seqToDF[T: Encoder](data: Seq[T], colNames: String*): DataFrame = {
createSnowflakeDFFromSparkDF(super.createDataset(data).toDF(colNames: _*))
}
private def createSnowflakeDFFromSparkDF(df: DataFrame): DataFrame = {
// Write this DataFrame to Snowflake, in effect testing the connector's write-out
// code path, and then read it back as a Snowflake DataFrame. This uses
// the temporary schema defined in the run properties, tempTestSchema
val tableName = s"tempTestTable_${Math.abs(Random.nextLong()).toString}"
df.write
.format(Utils.SNOWFLAKE_SOURCE_SHORT_NAME)
.options(sfConfigs)
.option("dbtable", tableName)
.save()
read
.format(Utils.SNOWFLAKE_SOURCE_SHORT_NAME)
.options(sfConfigs)
.option("dbtable", tableName)
.load()
}
override def createDataFrame[A <: Product: TypeTag](data: Seq[A]): DataFrame = {
createSnowflakeDFFromSparkDF(super.createDataFrame(data))
}
}
object SFTestWrapperSparkSession {
def apply(context: SparkContext, configs: Map[String, String]): SFTestWrapperSparkSession =
new SFTestWrapperSparkSession(context, configs)
}
| snowflakedb/spark-snowflake | src/it/scala/org/apache/spark/sql/snowflake/SFTestWrapperSparkSession.scala | Scala | apache-2.0 | 1,654 |
package epic.preprocess
import java.io.{File, FilenameFilter, StringReader}
import breeze.util.Iterators
import epic.corpora.MascSlab
import epic.slab._
import epic.trees.Span
import scala.collection.mutable.ArrayBuffer
@SerialVersionUID(1L)
class TreebankTokenizer() extends Tokenizer with Serializable {
override def apply[In <: Sentence](slab: StringSlab[In]): StringSlab[In with Token] = {
slab.addLayer[Token](slab.iterator[Sentence].flatMap { s =>
val content = slab.spanned(s._1)
val impl = new TreebankTokenizerImpl(new StringReader(content))
Iterators.fromProducer{
try {
Option(impl.getNextToken()).map { case (region, token) =>
val res = Span(region.begin + s._1.begin, region.end + s._1.begin) -> token
res
}
} catch {
case e: Throwable => throw new RuntimeException("Could not tokenize " + s, e)
}
}
})
}
}
object TreebankTokenizer extends TreebankTokenizer {
def treebankTokenToToken(s: String): String = reverseTreebankMappings.getOrElse(s, s)
private val treebankMappings = Map("(" -> "-LRB-", ")" -> "-RRB-", "{" -> "-LCB-", "}" -> "-RCB-", "[" -> "-LSB-", "]" -> "-RSB-")
private val reverseTreebankMappings = treebankMappings.map(_.swap)
/** Replaces symbols like ( with their penn treebank equivalent */
def tokensToTreebankTokens(toks: Seq[String]): IndexedSeq[String] = {
// have to deal with quotes, so we can't just use map.
val output = new ArrayBuffer[String]()
var inOpenQuote = false
for(t <- toks) t match {
case "“" => inOpenQuote = true; output += "``"
case "‘" => inOpenQuote = true; output += "`"
case "’" => inOpenQuote = false; output += "`"
case "”" => inOpenQuote = false; output += "``"
case "\\"" if inOpenQuote => inOpenQuote = false; output += "''"
case "\\"" => inOpenQuote = true; output += "``"
case _ => output += treebankMappings.getOrElse(t, t)
}
output
}
// Just to check how the tokenizer does.
def main(args: Array[String]) = {
val mascDir = new java.io.File(args(0))
val comps = for(dir <- new File(new File(mascDir,"data"), "written").listFiles();
f <- dir.listFiles(new FilenameFilter {
override def accept(dir: File, name: String): Boolean = name.endsWith(".txt")
})) yield {
val slab: StringSlab[Source] = MascSlab(f.toURI.toURL)
val slabWithSentences: Slab[String, Span, Source with Sentence] = MascSlab.s[Source](slab)
val slabWithTokens = MascSlab.seg(slabWithSentences)
slabWithTokens.iterator[Sentence].map{sent =>
val gold = slabWithTokens.covered[Segment](sent._1).map { case (span, tok) => slab.spanned(span)}
val guess = TreebankTokenizer(slab.spanned(sent._1))
(gold, guess, slab.spanned(sent._1))
}
}
for( (gold, guess, orig) <- comps.iterator.flatten if gold != guess) {
val gg = gold.map(treebankMappings.withDefault(identity[String])).mkString(" ").replaceAll("”","\\"").replaceAll("“", "\\"")
val gs = guess.mkString(" ").replaceAll("(``|'')","\\"").replaceAll("`","'")
if (gg != gs) {
println(gg)
println(gs)
println(orig)
println("=====================")
}
}
}
}
| briantopping/epic | src/main/scala/epic/preprocess/TreebankTokenizer.scala | Scala | apache-2.0 | 3,411 |
package com.datamountaineer.streamreactor.connect.redis.sink.writer
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util
import com.datamountaineer.streamreactor.connect.redis.sink.RedisSinkTask
import com.datamountaineer.streamreactor.connect.redis.sink.config.{RedisConfig, RedisConfigConstants, RedisConnectionInfo, RedisSinkSettings}
import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct}
import org.apache.kafka.connect.sink.SinkRecord
import org.mockito.MockitoSugar
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import redis.clients.jedis.{Jedis, StreamEntryID}
import scala.collection.JavaConverters._
class RedisStreamTest extends AnyWordSpec with Matchers with BeforeAndAfterAll with MockitoSugar {
//
// val redisServer = new RedisServer(6379)
//
// override def beforeAll() = redisServer.start()
//
// override def afterAll() = redisServer.stop()
"Redis Stream writer" should {
"write Kafka records to a Redis Stream" in {
val TOPIC = "cpuTopic"
val KCQL = s"INSERT INTO stream1 SELECT * from $TOPIC STOREAS STREAM"
println("Testing KCQL : " + KCQL)
val props = Map(
RedisConfigConstants.REDIS_HOST->"localhost",
RedisConfigConstants.REDIS_PORT->"6379",
RedisConfigConstants.KCQL_CONFIG->KCQL,
RedisConfigConstants.REDIS_PASSWORD -> ""
).asJava
val config = RedisConfig(props)
val connectionInfo = new RedisConnectionInfo("localhost", 6379, None)
val settings = RedisSinkSettings(config)
val writer = new RedisStreams(settings)
val schema = SchemaBuilder.struct().name("com.example.Cpu")
.field("type", Schema.STRING_SCHEMA)
.field("temperature", Schema.FLOAT64_SCHEMA)
.field("voltage", Schema.FLOAT64_SCHEMA)
.field("ts", Schema.INT64_SCHEMA).build()
val struct1 = new Struct(schema).put("type", "Xeon").put("temperature", 60.4).put("voltage", 90.1).put("ts", 1482180657010L)
val sinkRecord1 = new SinkRecord(TOPIC, 0, null, null, schema, struct1, 1)
val jedis = mock[Jedis]
writer.jedis = jedis
val map = new util.HashMap[String, String]()
map.put("type", "Xeon")
map.put("temperature", "60.4")
map.put("voltage", "90.1")
map.put("ts", 1482180657010L.toString)
when(jedis.auth("")).isLenient()
when(jedis.xadd("stream1", null, map)).thenReturn(mock[StreamEntryID])
writer.initialize(1, settings.errorPolicy)
writer.write(Seq(sinkRecord1))
}
}
}
| datamountaineer/stream-reactor | kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisStreamTest.scala | Scala | apache-2.0 | 3,232 |
package com.twitter.finagle
import com.twitter.conversions.time._
import com.twitter.finagle.builder.Cluster
import com.twitter.finagle.service.Backoff
import com.twitter.finagle.util.DefaultTimer
import com.twitter.util.{Closable, Future, Duration, Timer, Var}
import java.net.SocketAddress
import java.util.concurrent.atomic.AtomicReference
/**
* A group is a dynamic set of `T`-typed values. It is used to
* represent dynamic hosts and operations over such lists. Its
* flexibility is derived from the ability to ''map'', creating
* derived groups. The map operation ensures that each element is
* mapped over exactly once, allowing side-effecting operations to
* rely on this to implement safe semantics.
*
* '''Note:''' querying groups is nonblocking, which means that
* derived groups are effectively eventually consistent.
*
* '''Note:''' `T`s must be hashable, definining `hashCode` and
* `equals` to ensure that maps have exactly-once semantics.
*
* '''Note:''' Groups are invariant because Scala's Sets are. In
* the case of sets, this is an implementation artifact, and is
* unfortunate, but it's better to keep things simpler and
* consistent.
*/
@deprecated("Var[Addr], Name", "6.7.x")
trait Group[T] { outer =>
// Group is needlessly complex due to it transitioning to
// deprecation. In order to provide reasonable compatibility with
// forthcoming structrures, we base the group implementation on Var
// while retaining its two key semantics:
//
// (1) unchanged objects retain identity;
// (2) collect & map are idempotent.
//
// The following are semi-internal, to be accessed only by Finagle
// itself.
protected[finagle] val set: Var[Set[T]]
// We use the ref here to preserve group semantics. IE: retain object
// identity to repeated calls to Group.members
final protected[finagle] lazy val ref = {
val r = new AtomicReference[Set[T]]()
set.observeTo(r)
r
}
/**
* Create a new group by mapping each element of this group
* with `f`. `f` is guaranteed to be invoked exactly once for each
* element of the groups, even for dynamic groups.
*/
def map[U](f: T => U): Group[U] = collect { case e => f(e) }
/**
* Create a new group by collecting each element of this group
* with `f`. `f` is guaranteed to be invoked exactly once for each
* element of the group, even for dynamic groups.
*/
def collect[U](f: PartialFunction[T, U]): Group[U] = new Group[U] {
var mapped = Map[T, U]()
var last = Set[T]()
protected[finagle] val set = outer.set map { set =>
synchronized {
mapped ++= (set &~ last) collect {
case el if f.isDefinedAt(el) => el -> f(el)
}
mapped --= last &~ set
last = set
}
mapped.values.toSet
}
}
/**
* The current members of this group. If the group has not
* changed, the same object is returned. This allows a simple
* object identity check to be performed to see if the Group has
* been updated.
*/
final def members: Set[T] = ref.get
final def apply(): Set[T] = members
/**
* Name the group `n`.
*
* @return `this` mixed in with `LabelledGroup`, named `n`
*/
def named(n: String): Group[T] = LabelledGroup(this, n)
def +(other: Group[T]): Group[T] = new Group[T] {
protected[finagle] val set = for { a <- outer.set; b <- other.set } yield a++b
}
override def toString = "Group(%s)".format(this() mkString ", ")
}
/**
* A group that simply contains a name. Getting at the set binds the
* name, but mostly this is to ship names under the cover of old
* APIs. (And hopefully will be deprecated soon enough.)
*/
private[finagle] case class NameGroup(name: Name.Bound)
extends Group[SocketAddress] {
protected[finagle] lazy val set: Var[Set[SocketAddress]] = name.addr map {
case Addr.Bound(set) => set
case _ => Set()
}
}
trait MutableGroup[T] extends Group[T] {
def update(newMembers: Set[T])
}
/**
* A mixin trait to assign a ``name`` to the group. This is used
* to assign labels to groups that ascribe meaning to them.
*/
case class LabelledGroup[T](underlying: Group[T], name: String) extends Group[T] {
protected[finagle] lazy val set: Var[Set[T]] = underlying.set
}
object Group {
/**
* Construct a `T`-typed static group from the given elements.
*
* @param staticMembers the members of the returned static group
*/
def apply[T](staticMembers: T*): Group[T] = new Group[T] {
protected[finagle] val set = Var(Set(staticMembers:_*))
}
def fromVarAddr(va: Var[Addr]): Group[SocketAddress] = new Group[SocketAddress] {
protected[finagle] val set = va map {
case Addr.Bound(sockaddrs) => sockaddrs
case _ => Set[SocketAddress]()
}
}
def fromVar[T](v: Var[Set[T]]): Group[T] = new Group[T] {
protected[finagle] val set = v
}
/**
* The empty group of type `T`.
*/
def empty[T]: Group[T] = Group()
/**
* Creates a mutable group of type `T`.
*
* @param initial the initial elements of the group
*/
def mutable[T](initial: T*): MutableGroup[T] = new MutableGroup[T] {
protected[finagle] val set = Var(Set(initial:_*))
def update(newMembers: Set[T]) { set() = newMembers }
}
/**
* Construct a (dynamic) `Group` from the given
* [[com.twitter.finagle.builder.Cluster]]. Note that clusters
* are deprecated, so this constructor acts as a temporary
* bridge.
*/
def fromCluster[T](underlying: Cluster[T]): Group[T] = {
val (snap, edits) = underlying.snap
new Group[T] {
protected[finagle] val set = Var(snap.toSet)
edits foreach { spool =>
spool foreach {
case Cluster.Add(t) => set() += t
case Cluster.Rem(t) => set() -= t
}
}
}
}
}
| yancl/finagle-6.22.0 | finagle-core/src/main/scala/com/twitter/finagle/Group.scala | Scala | apache-2.0 | 5,814 |
/*
* Copyright 2016 Nikolay Donets
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.nikdon.telepooz.engine
import java.util.concurrent.ArrayBlockingQueue
import akka.NotUsed
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.Http.ServerBinding
import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server._
import akka.stream._
import akka.stream.scaladsl.Source
import akka.stream.stage.{GraphStage, GraphStageLogic, OutHandler}
import cats.implicits._
import com.github.nikdon.telepooz.api._
import com.github.nikdon.telepooz.json.CirceDecoders
import com.github.nikdon.telepooz.model.methods.SetWebhook
import com.github.nikdon.telepooz.model.{Response, Update}
import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._
import scala.concurrent.{Future, Promise}
import scala.util.Failure
/**
*
* @param endpoint HTTPS url's endpoint to send updates to. Use an empty string to remove webhook integration.
* Ex.: "example-endpoint"
* @param scheme HTTPS
* @param interface Host
* @param port Ports currently supported for Webhooks: 443, 80, 88, 8443.
* @param max_connections Maximum allowed number of simultaneous HTTPS connections to the webhook for update delivery,
* 1-100. Defaults to 40. Use lower values to limit the load on your bot‘s server, and higher
* values to increase your bot’s throughput.
* @param allowed_updates List the types of updates you want your bot to receive.
* For example, specify [“message”, “edited_channel_post”, “callback_query”] to only receive
* updates of these types. See Update for a complete list of available update types.
* Specify an empty list to receive all updates regardless of type (default).
* If not specified, the previous setting will be used.
* @param bufferSize Size of buffer in element count
*/
class Webhook(
endpoint: String,
scheme: String = "https",
interface: String = "::0",
port: Int = 443,
max_connections: Option[Int] = None,
allowed_updates: Option[List[String]] = None,
bufferSize: Int = 10000)(implicit are: ApiRequestExecutor, system: ActorSystem, materializer: ActorMaterializer) {
val source: Source[Update, NotUsed] =
Source.fromGraph(
new WebHookSource(endpoint, scheme, interface, port, max_connections, allowed_updates, bufferSize))
}
class WebHookSource(
endpoint: String,
scheme: String,
interface: String,
port: Int,
max_connections: Option[Int],
allowed_updates: Option[List[String]],
bufferSize: Int)(implicit are: ApiRequestExecutor, ast: ActorSystem, materializer: ActorMaterializer)
extends GraphStage[SourceShape[Update]]
with CirceDecoders {
require(bufferSize > 0, "Param `bufferSize` should be > 1.")
import ast.dispatcher
val out: Outlet[Update] = Outlet("Webhook.Updates")
override def shape: SourceShape[Update] = SourceShape(out)
val serverBindingP = Promise[ServerBinding]
serverBindingP.future.onComplete {
case Failure(ex) ⇒
println(s"[ERROR] ${ex.getMessage}")
sys.exit(1)
case _ ⇒ // ignore
}
def route(fn: Update ⇒ Boolean): Route = path(endpoint) {
entity(as[Update]) {
case update if fn(update) ⇒ complete(OK)
case _ ⇒ reject()
}
}
def http(fn: Update ⇒ Boolean): Future[ServerBinding] = {
SetWebhook(Uri.from(scheme, host = interface, port = port, path = s"/$endpoint").toString(),
max_connections,
allowed_updates)
.foldMap(are)
.flatMap {
case Response(true, Some(true), _, _) ⇒
val f = Http().bindAndHandle(route(fn), "::0", port)
serverBindingP.completeWith(f)
f
case response ⇒
val f = Future.failed(new IllegalStateException(s"Can't set webhook: $response"))
serverBindingP.completeWith(f)
f
}
}
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) {
val blockingQueue = new ArrayBlockingQueue[Update](bufferSize)
val serverBindingF = http(blockingQueue.offer)
setHandler(
out,
new OutHandler {
override def onPull(): Unit = {
Option(blockingQueue.take()) match {
case Some(element) ⇒ push(out, element)
case None ⇒ // do nothing as we waiting for the element
}
}
override def onDownstreamFinish(): Unit = {
super.onDownstreamFinish()
serverBindingF.map(_.unbind())(ast.dispatcher)
}
}
)
override def postStop(): Unit = {
super.postStop()
serverBindingF.map(_.unbind())(ast.dispatcher)
}
}
}
| nikdon/telepooz | src/main/scala/com/github/nikdon/telepooz/engine/Webhook.scala | Scala | apache-2.0 | 5,586 |
package com.twitter.finagle.http.filter
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.Service
import com.twitter.finagle.http.{Method, Request, Response, Status}
import com.twitter.util.{Await, Future}
import org.scalatest.FunSuite
class MethodRequiredFilterTest extends FunSuite {
val dummyService = new Service[Request, Response] {
def apply(request: Request): Future[Response] = {
val response = Response()
request.params
.get("exception")
.foreach(e => {
response.write("exception thrown")
throw new Exception()
})
request.params.get("code") match {
case Some(code) => response.statusCode = code.toInt
case None => response.status = Status.Ok
}
Future.value(response)
}
}
val filter = new MethodRequiredFilter[Request](Set(Method.Post))
test("return 407 when disallowed method is used") {
val request = Request()
request.method = Method.Get
val response = Await.result(filter(request, dummyService), 1.second)
assert(response.status == Status.MethodNotAllowed)
assert(response.headerMap.get("Allow") == Some("POST"))
}
test("return 200 when allowed method is used") {
val request = Request()
request.method = Method.Post
val response = Await.result(filter(request, dummyService), 1.second)
assert(response.status == Status.Ok)
}
}
| luciferous/finagle | finagle-http/src/test/scala/com/twitter/finagle/http/filter/MethodRequiredFilterTest.scala | Scala | apache-2.0 | 1,415 |
package com.teambytes.inflatable.raft.cluster
import com.teambytes.inflatable.raft.protocol._
import org.scalatest.{BeforeAndAfterAll, Matchers, FlatSpecLike}
import akka.remote.testkit.{MultiNodeConfig, MultiNodeSpec}
import akka.util.Timeout
import akka.actor.{Address, RootActorPath, ActorRef}
import scala.concurrent.{Future, Await}
import akka.pattern.ask
import org.scalatest.concurrent.Eventually
abstract class RaftClusterSpec(config: MultiNodeConfig) extends MultiNodeSpec(config)
with Eventually with ClusterPatience
with FlatSpecLike with Matchers with BeforeAndAfterAll {
import concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
implicit val defaultTimeout: Timeout
def selectActorRefMaybe(nodeAddress: Address): Option[ActorRef] = {
val selection = system.actorSelection(RootActorPath(nodeAddress) / "user" / s"raft-member-*")
try {
Some(Await.result(selection.resolveOne(1.second), 1.second))
} catch {
case ex: Exception =>
None
}
}
def selectActorRef(nodeAddress: Address, memberNr: Int): ActorRef = {
val selection = system.actorSelection(RootActorPath(nodeAddress) / "user" / s"raft-member-$memberNr*")
Await.result(selection.resolveOne(1.second), 1.second)
}
// todo can be implemented by listening to remote eventStream: http://doc.akka.io/docs/akka/snapshot/java/remoting.html#remote-events
def awaitLeaderElected(members: List[ActorRef]) {
val start = System.currentTimeMillis()
awaitCond(
askMembersForState(members).maybeLeader().isDefined,
defaultTimeout.duration,
20.millis,
"Leader election did not succeed happen within given time range!"
)
info(s"Waited for Leader election for ${System.currentTimeMillis() - start}ms")
}
def askMemberForState(refs: ActorRef): MemberAndState =
askMembersForState(refs).members.head
def askMembersForState(refs: List[ActorRef]): MemberCounter =
askMembersForState(refs: _*)
def askMembersForState(refs: ActorRef*): MemberCounter = {
val stateFutures = refs map { ref => (ref ? AskForState).mapTo[IAmInState] }
val statesFuture = Future.sequence(stateFutures)
val states = Await.result(statesFuture, atMost = defaultTimeout.duration).zip(refs) map { case (state, ref) =>
MemberAndState(ref, state.state)
}
MemberCounter(states.toList)
}
case class MemberAndState(member: ActorRef, state: RaftState) {
def simpleName = member.path.elements.last
}
case class MemberCounter(members: List[MemberAndState]) {
def followers() = members.filter(_.state.toString == Follower.toString)
def candidates() = members.filter(_.state.toString == Candidate.toString)
def leaders() = members.filter(_.state.toString == Leader.toString)
def leader() = maybeLeader().getOrElse { throw new Exception("Unable to find leader! Members: " + members) }
def maybeLeader() = {
val leads = leaders()
require(leads.size < 2, s"Must have 1 or 0 leaders, yet found ${leads.size}! Members: $members")
leads.headOption
}
def infoMemberStates() {
info(s"Members: ${members.map(m => s"""${m.simpleName}[${m.state}]""").mkString(", ")}")
}
}
def simpleName(ref: ActorRef): String = ref.path.elements.last
}
| grahamar/inflatable | src/multi-jvm/scala/com/teambytes/inflatable/raft/cluster/RaftClusterSpec.scala | Scala | apache-2.0 | 3,300 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
/** A partial function of type `PartialFunction[A, B]` is a unary function
* where the domain does not necessarily include all values of type `A`.
* The function `isDefinedAt` allows to test dynamically if a value is in
* the domain of the function.
*
* Even if `isDefinedAt` returns true for an `a: A`, calling `apply(a)` may
* still throw an exception, so the following code is legal:
*
* {{{
* val f: PartialFunction[Int, Any] = { case _ => 1/0 }
* }}}
*
* It is the responsibility of the caller to call `isDefinedAt` before
* calling `apply`, because if `isDefinedAt` is false, it is not guaranteed
* `apply` will throw an exception to indicate an error condition. If an
* exception is not thrown, evaluation may result in an arbitrary value.
*
* The main distinction between `PartialFunction` and [[scala.Function1]] is
* that the user of a `PartialFunction` may choose to do something different
* with input that is declared to be outside its domain. For example:
*
* {{{
* val sample = 1 to 10
* val isEven: PartialFunction[Int, String] = {
* case x if x % 2 == 0 => x+" is even"
* }
*
* // the method collect can use isDefinedAt to select which members to collect
* val evenNumbers = sample collect isEven
*
* val isOdd: PartialFunction[Int, String] = {
* case x if x % 2 == 1 => x+" is odd"
* }
*
* // the method orElse allows chaining another partial function to handle
* // input outside the declared domain
* val numbers = sample map (isEven orElse isOdd)
* }}}
*
*
* @author Martin Odersky, Pavel Pavlov, Adriaan Moors
* @version 1.0, 16/07/2003
*/
trait PartialFunction[-A, +B] extends (A => B) { self =>
import PartialFunction._
/** Checks if a value is contained in the function's domain.
*
* @param x the value to test
* @return `'''true'''`, iff `x` is in the domain of this function, `'''false'''` otherwise.
*/
def isDefinedAt(x: A): Boolean
/** Composes this partial function with a fallback partial function which
* gets applied where this partial function is not defined.
*
* @param that the fallback function
* @tparam A1 the argument type of the fallback function
* @tparam B1 the result type of the fallback function
* @return a partial function which has as domain the union of the domains
* of this partial function and `that`. The resulting partial function
* takes `x` to `this(x)` where `this` is defined, and to `that(x)` where it is not.
*/
def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]): PartialFunction[A1, B1] =
new OrElse[A1, B1] (this, that)
//TODO: why not overload it with orElse(that: F1): F1?
/** Composes this partial function with a transformation function that
* gets applied to results of this partial function.
* @param k the transformation function
* @tparam C the result type of the transformation function.
* @return a partial function with the same domain as this partial function, which maps
* arguments `x` to `k(this(x))`.
*/
override def andThen[C](k: B => C): PartialFunction[A, C] =
new AndThen[A, B, C] (this, k)
/** Turns this partial function into a plain function returning an `Option` result.
* @see Function.unlift
* @return a function that takes an argument `x` to `Some(this(x))` if `this`
* is defined for `x`, and to `None` otherwise.
*/
def lift: A => Option[B] = new Lifted(this)
/** Applies this partial function to the given argument when it is contained in the function domain.
* Applies fallback function where this partial function is not defined.
*
* Note that expression `pf.applyOrElse(x, default)` is equivalent to
* {{{ if(pf isDefinedAt x) pf(x) else default(x) }}}
* except that `applyOrElse` method can be implemented more efficiently.
* For all partial function literals the compiler generates an `applyOrElse` implementation which
* avoids double evaluation of pattern matchers and guards.
* This makes `applyOrElse` the basis for the efficient implementation for many operations and scenarios, such as:
*
* - combining partial functions into `orElse`/`andThen` chains does not lead to
* excessive `apply`/`isDefinedAt` evaluation
* - `lift` and `unlift` do not evaluate source functions twice on each invocation
* - `runWith` allows efficient imperative-style combining of partial functions
* with conditionally applied actions
*
* For non-literal partial function classes with nontrivial `isDefinedAt` method
* it is recommended to override `applyOrElse` with custom implementation that avoids
* double `isDefinedAt` evaluation. This may result in better performance
* and more predictable behavior w.r.t. side effects.
*
* @param x the function argument
* @param default the fallback function
* @return the result of this function or fallback function application.
* @since 2.10
*/
def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 =
if (isDefinedAt(x)) apply(x) else default(x)
/** Composes this partial function with an action function which
* gets applied to results of this partial function.
* The action function is invoked only for its side effects; its result is ignored.
*
* Note that expression `pf.runWith(action)(x)` is equivalent to
* {{{ if(pf isDefinedAt x) { action(pf(x)); true } else false }}}
* except that `runWith` is implemented via `applyOrElse` and thus potentially more efficient.
* Using `runWith` avoids double evaluation of pattern matchers and guards for partial function literals.
* @see `applyOrElse`.
*
* @param action the action function
* @return a function which maps arguments `x` to `isDefinedAt(x)`. The resulting function
* runs `action(this(x))` where `this` is defined.
* @since 2.10
*/
def runWith[U](action: B => U): A => Boolean = { x =>
val z = applyOrElse(x, checkFallback[B])
if (!fallbackOccurred(z)) { action(z); true } else false
}
}
/** A few handy operations which leverage the extra bit of information
* available in partial functions. Examples:
* {{{
* import PartialFunction._
*
* def strangeConditional(other: Any): Boolean = cond(other) {
* case x: String if x == "abc" || x == "def" => true
* case x: Int => true
* }
* def onlyInt(v: Any): Option[Int] = condOpt(v) { case x: Int => x }
* }}}
*
* @author Paul Phillips
* @since 2.8
*/
object PartialFunction {
/** Composite function produced by `PartialFunction#orElse` method
*/
private class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B])
extends scala.runtime.AbstractPartialFunction[A, B] with Serializable {
def isDefinedAt(x: A) = f1.isDefinedAt(x) || f2.isDefinedAt(x)
override def apply(x: A): B = f1.applyOrElse(x, f2)
override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = {
val z = f1.applyOrElse(x, checkFallback[B])
if (!fallbackOccurred(z)) z else f2.applyOrElse(x, default)
}
override def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]) =
new OrElse[A1, B1] (f1, f2 orElse that)
override def andThen[C](k: B => C) =
new OrElse[A, C] (f1 andThen k, f2 andThen k)
}
/** Composite function produced by `PartialFunction#andThen` method
*/
private class AndThen[-A, B, +C] (pf: PartialFunction[A, B], k: B => C) extends PartialFunction[A, C] with Serializable {
def isDefinedAt(x: A) = pf.isDefinedAt(x)
def apply(x: A): C = k(pf(x))
override def applyOrElse[A1 <: A, C1 >: C](x: A1, default: A1 => C1): C1 = {
val z = pf.applyOrElse(x, checkFallback[B])
if (!fallbackOccurred(z)) k(z) else default(x)
}
}
/** To implement patterns like {{{ if(pf isDefinedAt x) f1(pf(x)) else f2(x) }}} efficiently
* the following trick is used:
*
* To avoid double evaluation of pattern matchers & guards `applyOrElse` method is used here
* instead of `isDefinedAt`/`apply` pair.
*
* After call to `applyOrElse` we need both the function result it returned and
* the fact if the function's argument was contained in its domain. The only degree of freedom we have here
* to achieve this goal is tweaking with the continuation argument (`default`) of `applyOrElse` method.
* The obvious way is to throw an exception from `default` function and to catch it after
* calling `applyOrElse` but I consider this somewhat inefficient.
*
* I know only one way how you can do this task efficiently: `default` function should return unique marker object
* which never may be returned by any other (regular/partial) function. This way after calling `applyOrElse` you need
* just one reference comparison to distinguish if `pf isDefined x` or not.
*
* This correctly interacts with specialization as return type of `applyOrElse`
* (which is parameterized upper bound) can never be specialized.
*
* Here `fallback_pf` is used as both unique marker object and special fallback function that returns it.
*/
private[this] val fallback_pf: PartialFunction[Any, Any] = { case _ => fallback_pf }
private def checkFallback[B] = fallback_pf.asInstanceOf[PartialFunction[Any, B]]
private def fallbackOccurred[B](x: B) = (fallback_pf eq x.asInstanceOf[AnyRef])
private class Lifted[-A, +B] (val pf: PartialFunction[A, B])
extends scala.runtime.AbstractFunction1[A, Option[B]] with Serializable {
def apply(x: A): Option[B] = {
val z = pf.applyOrElse(x, checkFallback[B])
if (!fallbackOccurred(z)) Some(z) else None
}
}
private class Unlifted[A, B] (f: A => Option[B]) extends scala.runtime.AbstractPartialFunction[A, B] with Serializable {
def isDefinedAt(x: A): Boolean = f(x).isDefined
override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = {
val z = f(x)
if (!z.isEmpty) z.get else default(x)
}
override def lift = f
}
private[scala] def unlifted[A, B](f: A => Option[B]): PartialFunction[A, B] = f match {
case lf: Lifted[A, B] => lf.pf
case ff => new Unlifted(ff)
}
/** Converts ordinary function to partial one
* @since 2.10
*/
def apply[A, B](f: A => B): PartialFunction[A, B] = { case x => f(x) }
private[this] val constFalse: Any => Boolean = { _ => false}
private[this] val empty_pf: PartialFunction[Any, Nothing] = new PartialFunction[Any, Nothing] with Serializable {
def isDefinedAt(x: Any) = false
def apply(x: Any) = throw new MatchError(x)
override def orElse[A1, B1](that: PartialFunction[A1, B1]) = that
override def andThen[C](k: Nothing => C) = this
override val lift = (x: Any) => None
override def runWith[U](action: Nothing => U) = constFalse
}
/** The partial function with empty domain.
* Any attempt to invoke empty partial function leads to throwing [[scala.MatchError]] exception.
* @since 2.10
*/
def empty[A, B] : PartialFunction[A, B] = empty_pf
/** Creates a Boolean test based on a value and a partial function.
* It behaves like a 'match' statement with an implied 'case _ => false'
* following the supplied cases.
*
* @param x the value to test
* @param pf the partial function
* @return true, iff `x` is in the domain of `pf` and `pf(x) == true`.
*/
def cond[T](x: T)(pf: PartialFunction[T, Boolean]): Boolean = pf.applyOrElse(x, constFalse)
/** Transforms a PartialFunction[T, U] `pf` into Function1[T, Option[U]] `f`
* whose result is `Some(x)` if the argument is in `pf`'s domain and `None`
* otherwise, and applies it to the value `x`. In effect, it is a
* `'''match'''` statement which wraps all case results in `Some(_)` and
* adds `'''case''' _ => None` to the end.
*
* @param x the value to test
* @param pf the PartialFunction[T, U]
* @return `Some(pf(x))` if `pf isDefinedAt x`, `None` otherwise.
*/
def condOpt[T,U](x: T)(pf: PartialFunction[T, U]): Option[U] = pf.lift(x)
}
| felixmulder/scala | src/library/scala/PartialFunction.scala | Scala | bsd-3-clause | 12,746 |
package com.v_standard.vsp
/**
* テンプレート拡張トレイト。
*/
trait TemplateExtension {
}
/**
* デフォルトテンプレート拡張トレイト。
*/
class DefaultTemplateExtension extends TemplateExtension {
}
| VanishStandard/vsp | src/main/scala/com/v_standard/vsp/TemplateExtension.scala | Scala | bsd-3-clause | 237 |
package net.al3x.blog
import java.io.File
import scala.collection.{immutable, mutable}
class Index(posts: Seq[Post]) extends FileHelpers {
def indexBody = {
var out = ""
for (post <- posts) {
out += post.htmlBody
}
out
}
lazy val index = templatizeFile(new File(Config.template),
immutable.Map("XTITLE" -> "Home",
"XBODY" -> indexBody))
def write = writeFile(new File(Config.indexPath), index)
}
| al3x/simple-scala-blog | src/Index.scala | Scala | mit | 513 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler.AccumulableInfo
import org.apache.spark.shuffle.FetchFailedException
import org.apache.spark.util.{AccumulatorContext, AccumulatorV2}
class InternalAccumulatorSuite extends SparkFunSuite with LocalSparkContext {
import InternalAccumulator._
override def afterEach(): Unit = {
try {
AccumulatorContext.clear()
} finally {
super.afterEach()
}
}
test("internal accumulators in TaskContext") {
val taskContext = TaskContext.empty()
val accumUpdates = taskContext.taskMetrics.accumulators()
assert(accumUpdates.size > 0)
val testAccum = taskContext.taskMetrics.testAccum.get
assert(accumUpdates.exists(_.id == testAccum.id))
}
test("internal accumulators in a stage") {
val listener = new SaveInfoListener
val numPartitions = 10
sc = new SparkContext("local", "test")
sc.addSparkListener(listener)
// Have each task add 1 to the internal accumulator
val rdd = sc.parallelize(1 to 100, numPartitions).mapPartitions { iter =>
TaskContext.get().taskMetrics().testAccum.get.add(1)
iter
}
// Register asserts in job completion callback to avoid flakiness
listener.registerJobCompletionCallback { () =>
val stageInfos = listener.getCompletedStageInfos
val taskInfos = listener.getCompletedTaskInfos
assert(stageInfos.size === 1)
assert(taskInfos.size === numPartitions)
// The accumulator values should be merged in the stage
val stageAccum = findTestAccum(stageInfos.head.accumulables.values)
assert(stageAccum.value.get.asInstanceOf[Long] === numPartitions)
// The accumulator should be updated locally on each task
val taskAccumValues = taskInfos.map { taskInfo =>
val taskAccum = findTestAccum(taskInfo.accumulables)
assert(taskAccum.update.isDefined)
assert(taskAccum.update.get.asInstanceOf[Long] === 1L)
taskAccum.value.get.asInstanceOf[Long]
}
// Each task should keep track of the partial value on the way, i.e. 1, 2, ... numPartitions
assert(taskAccumValues.sorted === (1L to numPartitions))
}
rdd.count()
listener.awaitNextJobCompletion()
}
test("internal accumulators in multiple stages") {
val listener = new SaveInfoListener
val numPartitions = 10
sc = new SparkContext("local", "test")
sc.addSparkListener(listener)
// Each stage creates its own set of internal accumulators so the
// values for the same metric should not be mixed up across stages
val rdd = sc.parallelize(1 to 100, numPartitions)
.map { i => (i, i) }
.mapPartitions { iter =>
TaskContext.get().taskMetrics().testAccum.get.add(1)
iter
}
.reduceByKey { (x, y) => x + y }
.mapPartitions { iter =>
TaskContext.get().taskMetrics().testAccum.get.add(10)
iter
}
.repartition(numPartitions * 2)
.mapPartitions { iter =>
TaskContext.get().taskMetrics().testAccum.get.add(100)
iter
}
// Register asserts in job completion callback to avoid flakiness
listener.registerJobCompletionCallback { () =>
// We ran 3 stages, and the accumulator values should be distinct
val stageInfos = listener.getCompletedStageInfos
assert(stageInfos.size === 3)
val (firstStageAccum, secondStageAccum, thirdStageAccum) =
(findTestAccum(stageInfos(0).accumulables.values),
findTestAccum(stageInfos(1).accumulables.values),
findTestAccum(stageInfos(2).accumulables.values))
assert(firstStageAccum.value.get.asInstanceOf[Long] === numPartitions)
assert(secondStageAccum.value.get.asInstanceOf[Long] === numPartitions * 10)
assert(thirdStageAccum.value.get.asInstanceOf[Long] === numPartitions * 2 * 100)
}
rdd.count()
}
test("internal accumulators in resubmitted stages") {
val listener = new SaveInfoListener
val numPartitions = 10
sc = new SparkContext("local", "test")
sc.addSparkListener(listener)
// Simulate fetch failures in order to trigger a stage retry. Here we run 1 job with
// 2 stages. On the second stage, we trigger a fetch failure on the first stage attempt.
// This should retry both stages in the scheduler. Note that we only want to fail the
// first stage attempt because we want the stage to eventually succeed.
val x = sc.parallelize(1 to 100, numPartitions)
.mapPartitions { iter => TaskContext.get().taskMetrics().testAccum.get.add(1); iter }
.groupBy(identity)
val sid = x.dependencies.head.asInstanceOf[ShuffleDependency[_, _, _]].shuffleHandle.shuffleId
val rdd = x.mapPartitionsWithIndex { case (i, iter) =>
// Fail the first stage attempt. Here we use the task attempt ID to determine this.
// This job runs 2 stages, and we're in the second stage. Therefore, any task attempt
// ID that's < 2 * numPartitions belongs to the first attempt of this stage.
val taskContext = TaskContext.get()
val isFirstStageAttempt = taskContext.taskAttemptId() < numPartitions * 2L
if (isFirstStageAttempt) {
throw new FetchFailedException(
SparkEnv.get.blockManager.blockManagerId,
sid,
taskContext.partitionId(),
taskContext.partitionId(),
taskContext.partitionId(),
"simulated fetch failure")
} else {
iter
}
}
// Register asserts in job completion callback to avoid flakiness
listener.registerJobCompletionCallback { () =>
val stageInfos = listener.getCompletedStageInfos
assert(stageInfos.size === 4) // 1 shuffle map stage + 1 result stage, both are retried
val mapStageId = stageInfos.head.stageId
val mapStageInfo1stAttempt = stageInfos.head
val mapStageInfo2ndAttempt = {
stageInfos.tail.find(_.stageId == mapStageId).getOrElse {
fail("expected two attempts of the same shuffle map stage.")
}
}
val stageAccum1stAttempt = findTestAccum(mapStageInfo1stAttempt.accumulables.values)
val stageAccum2ndAttempt = findTestAccum(mapStageInfo2ndAttempt.accumulables.values)
// Both map stages should have succeeded, since the fetch failure happened in the
// result stage, not the map stage. This means we should get the accumulator updates
// from all partitions.
assert(stageAccum1stAttempt.value.get.asInstanceOf[Long] === numPartitions)
assert(stageAccum2ndAttempt.value.get.asInstanceOf[Long] === numPartitions)
// Because this test resubmitted the map stage with all missing partitions, we should have
// created a fresh set of internal accumulators in the 2nd stage attempt. Assert this is
// the case by comparing the accumulator IDs between the two attempts.
// Note: it would be good to also test the case where the map stage is resubmitted where
// only a subset of the original partitions are missing. However, this scenario is very
// difficult to construct without potentially introducing flakiness.
assert(stageAccum1stAttempt.id != stageAccum2ndAttempt.id)
}
rdd.count()
listener.awaitNextJobCompletion()
}
test("internal accumulators are registered for cleanups") {
sc = new SparkContext("local", "test") {
private val myCleaner = new SaveAccumContextCleaner(this)
override def cleaner: Option[ContextCleaner] = Some(myCleaner)
}
assert(AccumulatorContext.numAccums == 0)
sc.parallelize(1 to 100).map { i => (i, i) }.reduceByKey { _ + _ }.count()
val numInternalAccums = TaskMetrics.empty.internalAccums.length
// We ran 2 stages, so we should have 2 sets of internal accumulators, 1 for each stage
assert(AccumulatorContext.numAccums === numInternalAccums * 2)
val accumsRegistered = sc.cleaner match {
case Some(cleaner: SaveAccumContextCleaner) => cleaner.accumsRegisteredForCleanup
case _ => Seq.empty[Long]
}
// Make sure the same set of accumulators is registered for cleanup
assert(accumsRegistered.size === numInternalAccums * 2)
assert(accumsRegistered.toSet.size === AccumulatorContext.numAccums)
accumsRegistered.foreach(id => assert(AccumulatorContext.get(id) != None))
}
/**
* Return the accumulable info that matches the specified name.
*/
private def findTestAccum(accums: Iterable[AccumulableInfo]): AccumulableInfo = {
accums.find { a => a.name == Some(TEST_ACCUM) }.getOrElse {
fail(s"unable to find internal accumulator called $TEST_ACCUM")
}
}
/**
* A special [[ContextCleaner]] that saves the IDs of the accumulators registered for cleanup.
*/
private class SaveAccumContextCleaner(sc: SparkContext) extends
ContextCleaner(sc, null) {
private val accumsRegistered = new ArrayBuffer[Long]
override def registerAccumulatorForCleanup(a: AccumulatorV2[_, _]): Unit = {
accumsRegistered += a.id
super.registerAccumulatorForCleanup(a)
}
def accumsRegisteredForCleanup: Seq[Long] = accumsRegistered.toSeq
}
}
| maropu/spark | core/src/test/scala/org/apache/spark/InternalAccumulatorSuite.scala | Scala | apache-2.0 | 10,002 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.classification
import org.scalatest.Assertions._
import org.apache.spark.ml.attribute.NominalAttribute
import org.apache.spark.ml.classification.LogisticRegressionSuite._
import org.apache.spark.ml.feature.LabeledPoint
import org.apache.spark.ml.feature.StringIndexer
import org.apache.spark.ml.linalg.{Vector, Vectors}
import org.apache.spark.ml.param.{ParamMap, ParamsSuite}
import org.apache.spark.ml.util.{DefaultReadWriteTest, MetadataUtils, MLTest, MLTestingUtils}
import org.apache.spark.ml.util.TestingUtils._
import org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
import org.apache.spark.mllib.evaluation.MulticlassMetrics
import org.apache.spark.mllib.linalg.{Vectors => OldVectors}
import org.apache.spark.mllib.regression.{LabeledPoint => OldLabeledPoint}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Dataset
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.Metadata
class OneVsRestSuite extends MLTest with DefaultReadWriteTest {
import testImplicits._
@transient var dataset: Dataset[_] = _
@transient var rdd: RDD[LabeledPoint] = _
override def beforeAll(): Unit = {
super.beforeAll()
val nPoints = 1000
// The following coefficients and xMean/xVariance are computed from iris dataset with lambda=0.2
// As a result, we are drawing samples from probability distribution of an actual model.
val coefficients = Array(
-0.57997, 0.912083, -0.371077, -0.819866, 2.688191,
-0.16624, -0.84355, -0.048509, -0.301789, 4.170682)
val xMean = Array(5.843, 3.057, 3.758, 1.199)
val xVariance = Array(0.6856, 0.1899, 3.116, 0.581)
rdd = sc.parallelize(generateMultinomialLogisticInput(
coefficients, xMean, xVariance, true, nPoints, 42), 2)
dataset = rdd.toDF()
}
test("params") {
ParamsSuite.checkParams(new OneVsRest)
val lrModel = new LogisticRegressionModel("lr", Vectors.dense(0.0), 0.0)
val model = new OneVsRestModel("ovr", Metadata.empty, Array(lrModel))
ParamsSuite.checkParams(model)
}
test("one-vs-rest: default params") {
val numClasses = 3
val ova = new OneVsRest()
.setClassifier(new LogisticRegression)
assert(ova.getLabelCol === "label")
assert(ova.getPredictionCol === "prediction")
assert(ova.getRawPredictionCol === "rawPrediction")
val ovaModel = ova.fit(dataset)
MLTestingUtils.checkCopyAndUids(ova, ovaModel)
assert(ovaModel.numClasses === numClasses)
val transformedDataset = ovaModel.transform(dataset)
checkNominalOnDF(transformedDataset, "prediction", ovaModel.numClasses)
checkVectorSizeOnDF(transformedDataset, "rawPrediction", ovaModel.numClasses)
// check for label metadata in prediction col
val predictionColSchema = transformedDataset.schema(ovaModel.getPredictionCol)
assert(MetadataUtils.getNumClasses(predictionColSchema) === Some(3))
val lr = new LogisticRegressionWithLBFGS().setIntercept(true).setNumClasses(numClasses)
lr.optimizer.setRegParam(0.1).setNumIterations(100)
val model = lr.run(rdd.map(OldLabeledPoint.fromML))
val results = model.predict(rdd.map(p => OldVectors.fromML(p.features))).zip(rdd.map(_.label))
// determine the #confusion matrix in each class.
// bound how much error we allow compared to multinomial logistic regression.
val expectedMetrics = new MulticlassMetrics(results)
testTransformerByGlobalCheckFunc[(Double, Vector)](dataset.toDF(), ovaModel,
"prediction", "label") { rows =>
val ovaResults = rows.map { row => (row.getDouble(0), row.getDouble(1)) }
val ovaMetrics = new MulticlassMetrics(sc.makeRDD(ovaResults))
assert(expectedMetrics.confusionMatrix.asML ~== ovaMetrics.confusionMatrix.asML absTol 400)
}
}
test("one-vs-rest: tuning parallelism does not change output") {
val ovaPar1 = new OneVsRest()
.setClassifier(new LogisticRegression)
val ovaModelPar1 = ovaPar1.fit(dataset)
val transformedDatasetPar1 = ovaModelPar1.transform(dataset)
val ovaResultsPar1 = transformedDatasetPar1.select("prediction", "label").rdd.map {
row => (row.getDouble(0), row.getDouble(1))
}
val ovaPar2 = new OneVsRest()
.setClassifier(new LogisticRegression)
.setParallelism(2)
val ovaModelPar2 = ovaPar2.fit(dataset)
val transformedDatasetPar2 = ovaModelPar2.transform(dataset)
val ovaResultsPar2 = transformedDatasetPar2.select("prediction", "label").rdd.map {
row => (row.getDouble(0), row.getDouble(1))
}
val metricsPar1 = new MulticlassMetrics(ovaResultsPar1)
val metricsPar2 = new MulticlassMetrics(ovaResultsPar2)
assert(metricsPar1.confusionMatrix == metricsPar2.confusionMatrix)
ovaModelPar1.models.zip(ovaModelPar2.models).foreach {
case (lrModel1: LogisticRegressionModel, lrModel2: LogisticRegressionModel) =>
assert(lrModel1.coefficients ~== lrModel2.coefficients relTol 1E-3)
assert(lrModel1.intercept ~== lrModel2.intercept relTol 1E-3)
case other =>
fail("Loaded OneVsRestModel expected model of type LogisticRegressionModel " +
s"but found ${other.getClass.getName}")
}
}
test("one-vs-rest: pass label metadata correctly during train") {
val numClasses = 3
val ova = new OneVsRest()
ova.setClassifier(new MockLogisticRegression)
val labelMetadata = NominalAttribute.defaultAttr.withName("label").withNumValues(numClasses)
val labelWithMetadata = dataset("label").as("label", labelMetadata.toMetadata())
val features = dataset("features").as("features")
val datasetWithLabelMetadata = dataset.select(labelWithMetadata, features)
ova.fit(datasetWithLabelMetadata)
}
test("SPARK-8092: ensure label features and prediction cols are configurable") {
val labelIndexer = new StringIndexer()
.setInputCol("label")
.setOutputCol("indexed")
val indexedDataset = labelIndexer
.fit(dataset)
.transform(dataset)
.drop("label")
.withColumnRenamed("features", "f")
val ova = new OneVsRest()
ova.setClassifier(new LogisticRegression())
.setLabelCol(labelIndexer.getOutputCol)
.setFeaturesCol("f")
.setPredictionCol("p")
val ovaModel = ova.fit(indexedDataset)
val transformedDataset = ovaModel.transform(indexedDataset)
val outputFields = transformedDataset.schema.fieldNames.toSet
assert(outputFields.contains("p"))
}
test("SPARK-18625 : OneVsRestModel should support setFeaturesCol and setPredictionCol") {
val ova = new OneVsRest().setClassifier(new LogisticRegression)
val ovaModel = ova.fit(dataset)
val dataset2 = dataset.select(col("label").as("y"), col("features").as("fea"))
ovaModel.setFeaturesCol("fea")
ovaModel.setPredictionCol("pred")
ovaModel.setRawPredictionCol("")
val transformedDataset = ovaModel.transform(dataset2)
val outputFields = transformedDataset.schema.fieldNames.toSet
assert(outputFields === Set("y", "fea", "pred"))
}
test("SPARK-8049: OneVsRest shouldn't output temp columns") {
val logReg = new LogisticRegression()
.setMaxIter(1)
val ovr = new OneVsRest()
.setClassifier(logReg)
val output = ovr.fit(dataset).transform(dataset)
assert(output.schema.fieldNames.toSet
=== Set("label", "features", "prediction", "rawPrediction"))
}
test("SPARK-21306: OneVsRest should support setWeightCol") {
val dataset2 = dataset.withColumn("weight", lit(1))
// classifier inherits hasWeightCol
val ova = new OneVsRest().setWeightCol("weight").setClassifier(new LogisticRegression())
assert(ova.fit(dataset2) !== null)
// classifier doesn't inherit hasWeightCol
val ova2 = new OneVsRest().setWeightCol("weight").setClassifier(new DecisionTreeClassifier())
assert(ova2.fit(dataset2) !== null)
}
test("OneVsRest.copy and OneVsRestModel.copy") {
val lr = new LogisticRegression()
.setMaxIter(1)
val ovr = new OneVsRest()
withClue("copy with classifier unset should work") {
ovr.copy(ParamMap(lr.maxIter -> 10))
}
ovr.setClassifier(lr)
val ovr1 = ovr.copy(ParamMap(lr.maxIter -> 10))
require(ovr.getClassifier.getOrDefault(lr.maxIter) === 1, "copy should have no side-effects")
require(ovr1.getClassifier.getOrDefault(lr.maxIter) === 10,
"copy should handle extra classifier params")
val ovrModel = ovr1.fit(dataset).copy(ParamMap(lr.thresholds -> Array(0.9, 0.1)))
ovrModel.models.foreach { case m: LogisticRegressionModel =>
require(m.getThreshold === 0.1, "copy should handle extra model params")
}
}
test("read/write: OneVsRest") {
val lr = new LogisticRegression().setMaxIter(10).setRegParam(0.01)
val ova = new OneVsRest()
.setClassifier(lr)
.setLabelCol("myLabel")
.setFeaturesCol("myFeature")
.setPredictionCol("myPrediction")
val ova2 = testDefaultReadWrite(ova, testParams = false)
assert(ova.uid === ova2.uid)
assert(ova.getFeaturesCol === ova2.getFeaturesCol)
assert(ova.getLabelCol === ova2.getLabelCol)
assert(ova.getPredictionCol === ova2.getPredictionCol)
ova2.getClassifier match {
case lr2: LogisticRegression =>
assert(lr.uid === lr2.uid)
assert(lr.getMaxIter === lr2.getMaxIter)
assert(lr.getRegParam === lr2.getRegParam)
case other =>
fail("Loaded OneVsRest expected classifier of type LogisticRegression" +
s" but found ${other.getClass.getName}")
}
}
test("read/write: OneVsRestModel") {
def checkModelData(model: OneVsRestModel, model2: OneVsRestModel): Unit = {
assert(model.uid === model2.uid)
assert(model.getFeaturesCol === model2.getFeaturesCol)
assert(model.getLabelCol === model2.getLabelCol)
assert(model.getPredictionCol === model2.getPredictionCol)
val classifier = model.getClassifier.asInstanceOf[LogisticRegression]
model2.getClassifier match {
case lr2: LogisticRegression =>
assert(classifier.uid === lr2.uid)
assert(classifier.getMaxIter === lr2.getMaxIter)
assert(classifier.getRegParam === lr2.getRegParam)
case other =>
fail("Loaded OneVsRestModel expected classifier of type LogisticRegression" +
s" but found ${other.getClass.getName}")
}
assert(model.labelMetadata === model2.labelMetadata)
model.models.zip(model2.models).foreach {
case (lrModel1: LogisticRegressionModel, lrModel2: LogisticRegressionModel) =>
assert(lrModel1.uid === lrModel2.uid)
assert(lrModel1.coefficients === lrModel2.coefficients)
assert(lrModel1.intercept === lrModel2.intercept)
case other =>
fail(s"Loaded OneVsRestModel expected model of type LogisticRegressionModel" +
s" but found ${other.getClass.getName}")
}
}
val lr = new LogisticRegression().setMaxIter(10).setRegParam(0.01)
val ova = new OneVsRest().setClassifier(lr)
val ovaModel = ova.fit(dataset)
val newOvaModel = testDefaultReadWrite(ovaModel, testParams = false)
checkModelData(ovaModel, newOvaModel)
}
test("should ignore empty output cols") {
val lr = new LogisticRegression().setMaxIter(1)
val ovr = new OneVsRest().setClassifier(lr)
val ovrModel = ovr.fit(dataset)
val output1 = ovrModel.setPredictionCol("").setRawPredictionCol("")
.transform(dataset)
assert(output1.schema.fieldNames.toSet ===
Set("label", "features"))
val output2 = ovrModel.setPredictionCol("prediction").setRawPredictionCol("")
.transform(dataset)
assert(output2.schema.fieldNames.toSet ===
Set("label", "features", "prediction"))
val output3 = ovrModel.setPredictionCol("").setRawPredictionCol("rawPrediction")
.transform(dataset)
assert(output3.schema.fieldNames.toSet ===
Set("label", "features", "rawPrediction"))
val output4 = ovrModel.setPredictionCol("prediction").setRawPredictionCol("rawPrediction")
.transform(dataset)
assert(output4.schema.fieldNames.toSet ===
Set("label", "features", "prediction", "rawPrediction"))
}
test("should support all NumericType labels and not support other types") {
val ovr = new OneVsRest().setClassifier(new LogisticRegression().setMaxIter(1))
MLTestingUtils.checkNumericTypes[OneVsRestModel, OneVsRest](
ovr, spark) { (expected, actual) =>
val expectedModels = expected.models.map(m => m.asInstanceOf[LogisticRegressionModel])
val actualModels = actual.models.map(m => m.asInstanceOf[LogisticRegressionModel])
assert(expectedModels.length === actualModels.length)
expectedModels.zip(actualModels).foreach { case (e, a) =>
assert(e.intercept === a.intercept)
assert(e.coefficients.toArray === a.coefficients.toArray)
}
}
}
}
private class MockLogisticRegression(uid: String) extends LogisticRegression(uid) {
def this() = this("mockLogReg")
setMaxIter(1)
override protected[spark] def train(dataset: Dataset[_]): LogisticRegressionModel = {
val labelSchema = dataset.schema($(labelCol))
// check for label attribute propagation.
assert(MetadataUtils.getNumClasses(labelSchema).forall(_ == 2))
super.train(dataset)
}
}
| jkbradley/spark | mllib/src/test/scala/org/apache/spark/ml/classification/OneVsRestSuite.scala | Scala | apache-2.0 | 14,152 |
package org.locationtech.geomesa.plugin
import org.apache.wicket.behavior.SimpleAttributeModifier
import org.apache.wicket.markup.html.form.{Form, FormComponent}
import org.apache.wicket.markup.html.panel.Panel
import org.apache.wicket.model.{IModel, ResourceModel}
import org.geoserver.web.data.store.StoreEditPanel
import org.geoserver.web.data.store.panel.{CheckBoxParamPanel, ParamPanel, PasswordParamPanel, TextParamPanel}
import org.geoserver.web.util.MapModel
import org.geotools.data.DataAccessFactory.Param
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
abstract class GeoMesaStoreEditPanel (componentId: String, storeEditForm: Form[_])
extends StoreEditPanel(componentId, storeEditForm) {
def addTextPanel(paramsModel: IModel[_], param: Param): FormComponent[_] = {
val paramName = param.key
val resourceKey = getClass.getSimpleName + "." + paramName
val required = param.required
val textParamPanel =
new TextParamPanel(paramName,
new MapModel(paramsModel, paramName).asInstanceOf[IModel[_]],
new ResourceModel(resourceKey, paramName), required)
addPanel(textParamPanel, param, resourceKey)
}
def addPasswordPanel(paramsModel: IModel[_], param: Param): FormComponent[_] = {
val paramName = param.key
val resourceKey = getClass.getSimpleName + "." + paramName
val required = param.required
val passParamPanel =
new PasswordParamPanel(paramName,
new MapModel(paramsModel, paramName).asInstanceOf[IModel[_]],
new ResourceModel(resourceKey, paramName), required)
addPanel(passParamPanel, param, resourceKey)
}
def addCheckBoxPanel(paramsModel: IModel[_], param: Param): FormComponent[_] = {
val paramName = param.key
val resourceKey = getClass.getSimpleName + "." + paramName
val checkboxPanel =
new CheckBoxParamPanel(paramName,
new MapModel(paramsModel, paramName).asInstanceOf[IModel[_]],
new ResourceModel(resourceKey, paramName))
addPanel(checkboxPanel, param, resourceKey)
}
def addPanel(paramPanel: Panel with ParamPanel, param: Param, resourceKey: String): FormComponent[_] = {
paramPanel.getFormComponent.setType(classOf[String])
val defaultTitle = String.valueOf(param.description)
val titleModel = new ResourceModel(resourceKey + ".title", defaultTitle)
val title = String.valueOf(titleModel.getObject)
paramPanel.add(new SimpleAttributeModifier("title", title))
add(paramPanel)
paramPanel.getFormComponent
}
}
| mcharles/geomesa | geomesa-plugin/src/main/scala/org/locationtech/geomesa/plugin/GeoMesaStoreEditPanel.scala | Scala | apache-2.0 | 2,919 |
/* Copyright 2009-2021 EPFL, Lausanne */
import stainless.lang._
import stainless.proof._
object Lists4 {
sealed abstract class List[T]
case class Cons[T](head: T, tail: List[T]) extends List[T]
case class Nil[T]() extends List[T]
def map[F,T](list: List[F], f: F => T): List[T] = list match {
case Cons(head, tail) => Cons(f(head), map(tail, f))
case Nil() => Nil()
}
def map_lemma[A,B,C](list: List[A], f: A => B, g: B => C): Boolean = {
map(list, (x: A) => g(f(x))) == map(map(list, f), g)
}
def map_lemma_induct[D,E,F](list: List[D], f: D => E, g: E => F): Boolean = {
map_lemma(list, f, g) because (list match {
case Nil() => true
case Cons(head, tail) => map_lemma_induct(tail, f, g)
})
}.holds
}
// vim: set ts=4 sw=4 et:
| epfl-lara/stainless | frontends/benchmarks/verification/valid/MicroTests/Lists4.scala | Scala | apache-2.0 | 786 |
import gh2011.models.Repository
import gh2013.models.{CommitRepository}
import net.liftweb.json._
import org.scalatest.{FlatSpec, Matchers}
class CommitRepositoryTest extends FlatSpec with Matchers
{
"A valid EventBody" must "be correctly parsed" in {
val json = parse(
"""
| {
|
| "events_url":"https://api.github.com/repos/tnm/rugged/events",
| "comments_url":"https://api.github.com/repos/tnm/rugged/comments{/number}",
| "full_name":"tnm/rugged",
| "open_issues_count":0,
| "hooks_url":"https://api.github.com/repos/tnm/rugged/hooks",
| "git_commits_url":"https://api.github.com/repos/tnm/rugged/git/commits{/sha}",
| "created_at":"2012-12-17T22:00:58Z",
| "git_refs_url":"https://api.github.com/repos/tnm/rugged/git/refs{/sha}",
| "git_url":"git://github.com/tnm/rugged.git",
| "size":248,
| "pushed_at":"2012-12-31T23:19:30Z",
| "url":"https://api.github.com/repos/tnm/rugged",
| "open_issues":0,
| "merges_url":"https://api.github.com/repos/tnm/rugged/merges",
| "statuses_url":"https://api.github.com/repos/tnm/rugged/statuses/{sha}",
| "id":7213068,
| "git_tags_url":"https://api.github.com/repos/tnm/rugged/git/tags{/sha}",
| "contributors_url":"https://api.github.com/repos/tnm/rugged/contributors",
| "has_downloads":true,
| "stargazers_url":"https://api.github.com/repos/tnm/rugged/stargazers",
| "subscription_url":"https://api.github.com/repos/tnm/rugged/subscription",
| "has_issues":false,
| "compare_url":"https://api.github.com/repos/tnm/rugged/compare/{base}...{head}",
| "forks_count":0,
| "clone_url":"https://github.com/tnm/rugged.git",
| "contents_url":"https://api.github.com/repos/tnm/rugged/contents/{+path}",
| "master_branch":"development",
| "mirror_url":null,
| "fork":true,
| "blobs_url":"https://api.github.com/repos/tnm/rugged/git/blobs{/sha}",
| "branches_url":"https://api.github.com/repos/tnm/rugged/branches{/branch}",
| "trees_url":"https://api.github.com/repos/tnm/rugged/git/trees{/sha}",
| "private":false,
| "watchers":1,
| "teams_url":"https://api.github.com/repos/tnm/rugged/teams",
| "downloads_url":"https://api.github.com/repos/tnm/rugged/downloads",
| "default_branch":"development",
| "forks_url":"https://api.github.com/repos/tnm/rugged/forks",
| "assignees_url":"https://api.github.com/repos/tnm/rugged/assignees{/user}",
| "svn_url":"https://github.com/tnm/rugged",
| "archive_url":"https://api.github.com/repos/tnm/rugged/{archive_format}{/ref}",
| "subscribers_url":"https://api.github.com/repos/tnm/rugged/subscribers",
| "commits_url":"https://api.github.com/repos/tnm/rugged/commits{/sha}",
| "issues_url":"https://api.github.com/repos/tnm/rugged/issues{/number}",
| "html_url":"https://github.com/tnm/rugged",
| "tags_url":"https://api.github.com/repos/tnm/rugged/tags{/tag}",
| "updated_at":"2012-12-31T23:19:33Z",
| "milestones_url":"https://api.github.com/repos/tnm/rugged/milestones{/number}",
| "watchers_count":1,
| "languages_url":"https://api.github.com/repos/tnm/rugged/languages",
| "notifications_url":"https://api.github.com/repos/tnm/rugged/notifications{?since,all,participating}",
| "ssh_url":"git@github.com:tnm/rugged.git",
| "name":"rugged",
| "description":"ruby bindings to libgit2",
| "labels_url":"https://api.github.com/repos/tnm/rugged/labels{/name}",
| "language":"C",
| "collaborators_url":"https://api.github.com/repos/tnm/rugged/collaborators{/collaborator}",
| "homepage":"",
| "keys_url":"https://api.github.com/repos/tnm/rugged/keys{/key_id}",
| "issue_events_url":"https://api.github.com/repos/tnm/rugged/issues/events{/number}",
| "forks":0,
| "pulls_url":"https://api.github.com/repos/tnm/rugged/pulls{/number}",
| "issue_comment_url":"https://api.github.com/repos/tnm/rugged/issues/comments/{number}",
| "owner":{
| "events_url":"https://api.github.com/users/tnm/events{/privacy}",
| "repos_url":"https://api.github.com/users/tnm/repos",
| "url":"https://api.github.com/users/tnm",
| "organizations_url":"https://api.github.com/users/tnm/orgs",
| "id":126905,
| "avatar_url":"https://secure.gravatar.com/avatar/803993dc11765d89aee137f29aa05d3a?d=https://a248.e.akamai.net/assets.github.com%2Fimages%2Fgravatars%2Fgravatar-user-420.png",
| "gravatar_id":"803993dc11765d89aee137f29aa05d3a",
| "type":"User",
| "received_events_url":"https://api.github.com/users/tnm/received_events",
| "subscriptions_url":"https://api.github.com/users/tnm/subscriptions",
| "starred_url":"https://api.github.com/users/tnm/starred{/owner}{/repo}",
| "login":"tnm",
| "followers_url":"https://api.github.com/users/tnm/followers",
| "following_url":"https://api.github.com/users/tnm/following",
| "gists_url":"https://api.github.com/users/tnm/gists{/gist_id}"
| },
| "has_wiki":false
|
|}
""".stripMargin)
CommitRepository(json) shouldBe 'defined
}
"An other valid CommitRepository" must "be correctly parsed" in {
val json = parse(
"""
| {
| "fork":false,
| "contributors_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/contributors",
| "archive_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/{archive_format}{/ref}",
| "watchers_count":1095,
| "clone_url":"https://github.com/apneadiving/Google-Maps-for-Rails.git",
| "issue_events_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/issues/events{/number}",
| "contents_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/contents/{+path}",
| "teams_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/teams",
| "downloads_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/downloads",
| "collaborators_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/collaborators{/collaborator}",
| "updated_at":"2013-01-01T18:25:02Z",
| "milestones_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/milestones{/number}",
| "git_url":"git://github.com/apneadiving/Google-Maps-for-Rails.git",
| "git_commits_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/git/commits{/sha}",
| "forks":163,
| "subscription_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/subscription",
| "language":"JavaScript",
| "tags_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/tags{/tag}",
| "branches_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/branches{/branch}",
| "issue_comment_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/issues/comments/{number}",
| "mirror_url":null,
| "assignees_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/assignees{/user}",
| "issues_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/issues{/number}",
| "notifications_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/notifications{?since,all,participating}",
| "id":1367656,
| "open_issues_count":31,
| "labels_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/labels{/name}",
| "keys_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/keys{/key_id}",
| "forks_count":163,
| "private":false,
| "html_url":"https://github.com/apneadiving/Google-Maps-for-Rails",
| "open_issues":31,
| "subscribers_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/subscribers",
| "homepage":"https://rubygems.org/gems/gmaps4rails",
| "compare_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/compare/{base}...{head}",
| "trees_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/git/trees{/sha}",
| "name":"Google-Maps-for-Rails",
| "url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails",
| "merges_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/merges",
| "has_downloads":true,
| "git_tags_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/git/tags{/sha}",
| "comments_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/comments{/number}",
| "svn_url":"https://github.com/apneadiving/Google-Maps-for-Rails",
| "commits_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/commits{/sha}",
| "created_at":"2011-02-15T01:05:14Z",
| "statuses_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/statuses/{sha}",
| "owner":{
| "followers_url":"https://api.github.com/users/apneadiving/followers",
| "gravatar_id":"808bec1c640143bd7091888d9edfb2f2",
| "id":247583,
| "subscriptions_url":"https://api.github.com/users/apneadiving/subscriptions",
| "received_events_url":"https://api.github.com/users/apneadiving/received_events",
| "following_url":"https://api.github.com/users/apneadiving/following",
| "url":"https://api.github.com/users/apneadiving",
| "starred_url":"https://api.github.com/users/apneadiving/starred{/owner}{/repo}",
| "avatar_url":"https://secure.gravatar.com/avatar/808bec1c640143bd7091888d9edfb2f2?d=https://a248.e.akamai.net/assets.github.com%2Fimages%2Fgravatars%2Fgravatar-user-420.png",
| "login":"apneadiving",
| "events_url":"https://api.github.com/users/apneadiving/events{/privacy}",
| "organizations_url":"https://api.github.com/users/apneadiving/orgs",
| "gists_url":"https://api.github.com/users/apneadiving/gists{/gist_id}",
| "repos_url":"https://api.github.com/users/apneadiving/repos",
| "type":"User"
| },
| "size":200,
| "full_name":"apneadiving/Google-Maps-for-Rails",
| "has_wiki":true,
| "ssh_url":"git@github.com:apneadiving/Google-Maps-for-Rails.git",
| "git_refs_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/git/refs{/sha}",
| "hooks_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/hooks",
| "events_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/events",
| "description":"Enables easy display of items (taken from a Rails 3 model) with Google Map, OpenLayers, Bing or Mapquest. Geocoding + Directions included. Provides much options: markers customization, infowindows, auto-adjusted zoom, polylines, polygons, circles etc... See wiki for full description and examples.",
| "pushed_at":"2013-01-01T18:25:02Z",
| "stargazers_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/stargazers",
| "forks_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/forks",
| "has_issues":true,
| "languages_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/languages",
| "watchers":1095,
| "pulls_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/pulls{/number}",
| "blobs_url":"https://api.github.com/repos/apneadiving/Google-Maps-for-Rails/git/blobs{/sha}"
| }
""".stripMargin)
CommitRepository(json) shouldBe 'defined
}
}
| mgoeminne/github_etl | src/test/scala/gh/test/gh2013/model/CommitRepositoryTest.scala | Scala | mit | 14,027 |
package com.dominikgruber.fpinscala.chapter03
import org.scalatest._
class Exercise03Spec extends FlatSpec with Matchers {
"setHead" should "return List(1) if 1 gets set on Nil" in {
List.setHead(1, Nil) should be (List(1))
}
it should "return List(0, 2, 3) if 0 gets set on List(1, 2, 3)" in {
List.setHead(0, List(1, 2, 3)) should be (List(0, 2, 3))
}
} | TheDom/functional-programming-in-scala | src/test/scala/com/dominikgruber/fpinscala/chapter03/Exercise03Spec.scala | Scala | mit | 375 |
package net.softler.server
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.coding.{Deflate, Encoder, Gzip}
import akka.http.scaladsl.model.HttpMethods._
import akka.http.scaladsl.model.TransferEncodings.{deflate, gzip}
import akka.http.scaladsl.model.headers.`Access-Control-Allow-Methods`
import akka.http.scaladsl.model.{ContentTypes, HttpResponse, StatusCodes}
import akka.http.scaladsl.server.Route
import akka.stream.ActorMaterializer
import net.softler.marshalling.Models.User
import net.softler.marshalling.{JsonSupport, Models}
import scala.concurrent.{ExecutionContextExecutor, Future}
trait HttpServer extends JsonSupport with Models {
def port: Int
import akka.http.scaladsl.server.Directives._
implicit val system: ActorSystem = ActorSystem("test-actor-system")
implicit val materializer: ActorMaterializer = ActorMaterializer()
implicit val executionContext: ExecutionContextExecutor = system.dispatcher
lazy val routes: Route = path("test") {
pathEndOrSingleSlash {
get {
complete(user)
}
}
} ~ path("error") {
get {
complete(HttpResponse(StatusCodes.InternalServerError, entity = "Test Error"))
}
} ~ path("post") {
post {
entity(as[User]) { user =>
complete(StatusCodes.Created -> user)
}
}
} ~ path("all") {
entity(as[User]) { u =>
complete(u)
}
} ~ path("delete") {
delete {
complete(user)
}
} ~ path("head") {
head {
complete(StatusCodes.OK)
}
} ~ path("options") {
options {
complete(
HttpResponse(200).withHeaders(
`Access-Control-Allow-Methods`(OPTIONS, POST, PUT, GET, DELETE, HEAD, PATCH)
))
}
} ~ path("encoded") {
encodeResponseWith(Gzip, Deflate) {
complete("Hello World")
}
}
val server: Future[Http.ServerBinding] = Http().bindAndHandle(routes, "localhost", port)
}
| Freshwood/akka-http-rest-client | client/akka-http/src/test/scala/net/softler/server/HttpServer.scala | Scala | mit | 1,940 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.scheduler
import org.apache.spark.Logging
import scala.collection.mutable.{SynchronizedBuffer, ArrayBuffer}
import java.util.concurrent.LinkedBlockingQueue
/** Asynchronously passes StreamingListenerEvents to registered StreamingListeners. */
private[spark] class StreamingListenerBus() extends Logging {
private val listeners = new ArrayBuffer[StreamingListener]() with SynchronizedBuffer[StreamingListener]
/* Cap the capacity of the SparkListenerEvent queue so we get an explicit error (rather than
* an OOM exception) if it's perpetually being added to more quickly than it's being drained. */
private val EVENT_QUEUE_CAPACITY = 10000
private val eventQueue = new LinkedBlockingQueue[StreamingListenerEvent](EVENT_QUEUE_CAPACITY)
private var queueFullErrorMessageLogged = false
val listenerThread = new Thread("StreamingListenerBus") {
setDaemon(true)
override def run() {
while (true) {
val event = eventQueue.take
event match {
case batchStarted: StreamingListenerBatchStarted =>
listeners.foreach(_.onBatchStarted(batchStarted))
case batchCompleted: StreamingListenerBatchCompleted =>
listeners.foreach(_.onBatchCompleted(batchCompleted))
case StreamingListenerShutdown =>
// Get out of the while loop and shutdown the daemon thread
return
case _ =>
}
}
}
}
def start() {
listenerThread.start()
}
def addListener(listener: StreamingListener) {
listeners += listener
}
def post(event: StreamingListenerEvent) {
val eventAdded = eventQueue.offer(event)
if (!eventAdded && !queueFullErrorMessageLogged) {
logError("Dropping StreamingListenerEvent because no remaining room in event queue. " +
"This likely means one of the StreamingListeners is too slow and cannot keep up with the " +
"rate at which events are being started by the scheduler.")
queueFullErrorMessageLogged = true
}
}
/**
* Waits until there are no more events in the queue, or until the specified time has elapsed.
* Used for testing only. Returns true if the queue has emptied and false is the specified time
* elapsed before the queue emptied.
*/
def waitUntilEmpty(timeoutMillis: Int): Boolean = {
val finishTime = System.currentTimeMillis + timeoutMillis
while (!eventQueue.isEmpty) {
if (System.currentTimeMillis > finishTime) {
return false
}
/* Sleep rather than using wait/notify, because this is used only for testing and wait/notify
* add overhead in the general case. */
Thread.sleep(10)
}
true
}
def stop(): Unit = post(StreamingListenerShutdown)
}
| dotunolafunmiloye/spark | streaming/src/main/scala/org/apache/spark/streaming/scheduler/StreamingListenerBus.scala | Scala | apache-2.0 | 3,567 |
package benchmarks
import lift.arithmetic.SizeVar
import ir._
import ir.ast._
import opencl.ir._
import opencl.ir.pattern._
@deprecated("Uses an old benchmark infrastructure", "")
class SumAbsoluteValues(override val name: String,
override val defaultInputSizes: Seq[Int],
override val delta: Float,
override val f: Seq[(String, Array[Lambda])]) extends DeprecatedBenchmark(name, defaultInputSizes, f, delta) {
override def runScala(inputs: Any*): Array[Float] = {
Array(inputs(0).asInstanceOf[Array[Float]].sum)
}
override def generateInputs(): Seq[Any] = {
val inputSize = inputSizes().head
val inputData = Array.fill(inputSize)(util.Random.nextInt(2).toFloat)
Seq(inputData)
}
}
object SumAbsoluteValues {
val intelDerivedNoWarp1 = fun(ArrayTypeWSWC(Float, SizeVar("N")), (in) => {
Join() o MapWrg(
asScalar() o Join() o MapLcl(
toGlobal(MapSeq(id.vectorize(4))) o ReduceSeq(absAndSumUp.vectorize(4), Value(0.0f).vectorize(4))
) o Split(8192) o asVector(4)
) o Split(32768) $ in
})
val intelDerivedNoWarp1_ = fun(ArrayTypeWSWC(Float, SizeVar("N")), (in) => {
in :>>
Split(32768) :>>
MapWrg(
asVector(4) >>>
Split(8192) >>>
MapLcl(
ReduceSeq(absAndSumUp.vectorize(4), Value(0.0f).vectorize(4)) >>>
toGlobal(MapSeq(id.vectorize(4)))
) >>>
Join() >>>
asScalar()
) :>>
Join()
})
val intelDerived2 = fun(ArrayTypeWSWC(Float, SizeVar("N")), (in) => {
Join() o MapWrg(
Join() o MapLcl(
toGlobal(MapSeq(id)) o ReduceSeq(add, 0.0f)
) o Split(2048)
) o Split(2048) $ in
})
val intelDerived2_ = fun(ArrayTypeWSWC(Float, SizeVar("N")), (in) => {
in :>>
Split(2048) :>>
MapWrg(
Split(2048) >>>
MapLcl(
ReduceSeq(add, 0.0f) >>>
toGlobal(MapSeq(id))
) >>>
Join()
) :>>
Join()
})
val nvidiaDerived1 = fun(ArrayTypeWSWC(Float, SizeVar("N")), (in) => {
// the original derived one does not generate correct code ...
Join() o MapWrg( Join() o
MapLcl(toGlobal(MapSeq(id)) o ReduceSeq(add, 0.0f)) o Split(2048) o ReorderStride(128)
// toGlobal(MapLcl(Iterate(7)(MapSeq(id) o ReduceSeq(sumUp, 0.0f)) o ReduceSeq(sumUp, 0.0f))) o ReorderStride()
) o Split(2048*128) $ in
})
val nvidiaDerived1_ = fun(ArrayTypeWSWC(Float, SizeVar("N")), (in) => {
in :>>
Split(2048*128) :>>
MapWrg(
ReorderStride(128) >>>
Split(2048) >>>
MapLcl(
ReduceSeq(add, 0.0f) >>>
toGlobal(MapSeq(id))
) >>>
Join()
) :>>
Join()
})
val amdNvidiaDerived2 = fun(ArrayTypeWSWC(Float, SizeVar("N")), (in) => {
Join() o MapWrg(
Join() o toGlobal(MapLcl(MapSeq(id))) o Split(1) o
Iterate(6)( Join() o MapLcl(toLocal(MapSeq(id)) o ReduceSeq(add, 0.0f)) o Split(2) ) o
Join() o toLocal(MapLcl(toLocal(MapSeq(id)) o ReduceSeq(add, 0.0f))) o Split(128)
) o Split(8192) $ in
})
val amdNvidiaDerived2_ = fun(ArrayTypeWSWC(Float, SizeVar("N")), (in) => {
in :>>
Split(8192) :>>
MapWrg(
Split(128) >>>
toLocal(MapLcl(
ReduceSeq(add, 0.0f) >>>
toLocal(MapSeq(id))
)) >>>
Join() >>>
Iterate(6)(
Split(2) >>>
MapLcl(
ReduceSeq(add, 0.0f) >>>
toLocal(MapSeq(id))
) >>>
Join()
) >>>
Split(1) >>>
toGlobal(MapLcl(MapSeq(id))) >>>
Join()
) :>>
Join()
})
val amdDerived1 = fun(ArrayTypeWSWC(Float, SizeVar("N")), (in) => {
Join() o MapWrg(
asScalar() o Join() o
MapLcl(toGlobal(MapSeq(id.vectorize(2))) o ReduceSeq(add.vectorize(2), Value(0.0f).vectorize(2)))
o Split(2048) o ReorderStride(64) o asVector(2)
) o Split(4096*128) $ in
})
val amdDerived1_ = fun(ArrayTypeWSWC(Float, SizeVar("N")), (in) => {
in :>>
Split(4096*128) :>>
MapWrg(
asVector(2) >>>
ReorderStride(64) >>>
Split(2048) >>>
MapLcl(
ReduceSeq(add.vectorize(2), Value(0.0f).vectorize(2)) >>>
toGlobal(MapSeq(id.vectorize(2)))
) >>>
Join() >>>
asScalar()
) :>>
Join()
})
def apply() = new SumAbsoluteValues("Sum of absolute values",
Seq(16777216),
0.001f,
Seq(("INTEL_DERIVED_NO_WARP", Array[Lambda](intelDerivedNoWarp1, intelDerived2)),
("NVIDIA_DERIVED", Array[Lambda](nvidiaDerived1, amdNvidiaDerived2)),
("AMD_DERIVED", Array[Lambda](amdDerived1, amdNvidiaDerived2))))
def main(args: Array[String]): Unit = {
SumAbsoluteValues().run(args)
}
}
| lift-project/lift | src/main/benchmarks/SumAbsoluteValues.scala | Scala | mit | 4,680 |
package net.ssanj.dabble
import scalaz._
sealed trait ExitCode { val code: Int }
case object SuccessfulAction extends ExitCode { val code = 0}
case object UnsuccessfulAction extends ExitCode { val code = -1 }
//TODO: Replace with ER2
//TODO: Make this a list of messages
case class ExecutionResult(message: Option[String], code: Int)
//type X[F, W, S] ValidationNel[F, SuccessResult[W, S]]
//type ExecutionResult[A] = X[String, String, A]
//String \\/ A => X[String, SuccessResult[W, A]]
case class ExecutionResult2(message: Option[String], code: ExitCode)
object ExecutionResult2 {
def withResult(ec: ExitCode): ExecutionResult2 = ExecutionResult2(None, ec)
}
sealed trait DabbleResult extends Product with Serializable
final case class DabbleSuccess(warnings: Seq[String]) extends DabbleResult
final case class DabbleFailure(failures: NonEmptyList[String]) extends DabbleResult
//We could encode the errors in the type system instead of using Strings.
//sealed trait DabbleError
//sealed trait IoErrorType
//final case class FileNotFoundError(filename: String, message: String) extends IoErrorType
//final case class ReadError(filename: String, message: String, e: Option[Throwable]) extends IoErrorType
//final case class WriteError(filename: String, content: Seq[String], message: String, e: Option[Throwable]) extends IoErrorType
//final case class CallError(executable: String, arguments: String, message: String, e: Option[Throwable]) extends IoErrorType
//final case class IoError(error: IoErrorType) extends DabbleError
object DabbleResult {
def dabbleSuccess(warnings: Seq[String]): DabbleResult = DabbleSuccess(warnings)
val dabbleSuccess: DabbleResult = DabbleSuccess(Seq.empty)
def dabbleFailure(failure: String, others: String *): DabbleResult =
DabbleFailure(NonEmptyList(failure, others:_*))
}
| ssanj/dabble | src/main/scala/net/ssanj/dabble/Models.scala | Scala | mit | 1,829 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.crossdata.kryo
import com.esotericsoftware.kryo.Kryo
import org.apache.spark.serializer.KryoRegistrator
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
import org.apache.spark.sql.types._
class CrossdataRegistrator extends KryoRegistrator{
override def registerClasses(kryo: Kryo): Unit = {
kryo.register(Nil.getClass)
kryo.register(StringType.getClass)
kryo.register(DoubleType.getClass)
kryo.register(IntegerType.getClass)
kryo.register(BooleanType.getClass)
kryo.register(LongType.getClass)
kryo.register(classOf[ArrayType])
kryo.register(classOf[StructType])
kryo.register(classOf[StructField])
kryo.register(classOf[Metadata])
kryo.register(classOf[GenericRowWithSchema])
kryo.register(classOf[Array[Object]])
kryo.register(classOf[Array[Row]])
kryo.register(classOf[scala.collection.immutable.Map$EmptyMap$])
kryo.register(classOf[scala.collection.immutable.$colon$colon[_]])
kryo.register(classOf[Array[org.apache.spark.sql.types.StructField]])
}
}
| darroyocazorla/crossdata | server/src/main/scala/com/stratio/crossdata/kryo/CrossdataRegistrator.scala | Scala | apache-2.0 | 1,720 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import scala.collection.mutable.ArrayBuffer
import org.apache.commons.lang3.StringUtils
import org.apache.hadoop.fs.{BlockLocation, FileStatus, LocatedFileStatus, Path}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.{InternalRow, TableIdentifier}
import org.apache.spark.sql.catalyst.catalog.BucketSpec
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.physical.{HashPartitioning, Partitioning, UnknownPartitioning}
import org.apache.spark.sql.execution.datasources._
import org.apache.spark.sql.execution.datasources.parquet.{ParquetFileFormat => ParquetSource}
import org.apache.spark.sql.execution.metric.SQLMetrics
import org.apache.spark.sql.sources.{BaseRelation, Filter}
import org.apache.spark.sql.types.StructType
import org.apache.spark.util.Utils
import org.apache.spark.util.collection.BitSet
trait DataSourceScanExec extends LeafExecNode with CodegenSupport {
val relation: BaseRelation
val tableIdentifier: Option[TableIdentifier]
protected val nodeNamePrefix: String = ""
override val nodeName: String = {
s"Scan $relation ${tableIdentifier.map(_.unquotedString).getOrElse("")}"
}
// Metadata that describes more details of this scan.
protected def metadata: Map[String, String]
override def simpleString: String = {
val metadataEntries = metadata.toSeq.sorted.map {
case (key, value) =>
key + ": " + StringUtils.abbreviate(redact(value), 100)
}
val metadataStr = Utils.truncatedString(metadataEntries, " ", ", ", "")
s"$nodeNamePrefix$nodeName${Utils.truncatedString(output, "[", ",", "]")}$metadataStr"
}
override def verboseString: String = redact(super.verboseString)
override def treeString(verbose: Boolean, addSuffix: Boolean): String = {
redact(super.treeString(verbose, addSuffix))
}
/**
* Shorthand for calling redactString() without specifying redacting rules
*/
private def redact(text: String): String = {
Utils.redact(sqlContext.sessionState.conf.stringRedactionPattern, text)
}
}
/** Physical plan node for scanning data from a relation. */
case class RowDataSourceScanExec(
fullOutput: Seq[Attribute],
requiredColumnsIndex: Seq[Int],
filters: Set[Filter],
handledFilters: Set[Filter],
rdd: RDD[InternalRow],
@transient relation: BaseRelation,
override val tableIdentifier: Option[TableIdentifier])
extends DataSourceScanExec {
def output: Seq[Attribute] = requiredColumnsIndex.map(fullOutput)
override lazy val metrics =
Map("numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
protected override def doExecute(): RDD[InternalRow] = {
val numOutputRows = longMetric("numOutputRows")
rdd.mapPartitionsWithIndexInternal { (index, iter) =>
val proj = UnsafeProjection.create(schema)
proj.initialize(index)
iter.map( r => {
numOutputRows += 1
proj(r)
})
}
}
override def inputRDDs(): Seq[RDD[InternalRow]] = {
rdd :: Nil
}
override protected def doProduce(ctx: CodegenContext): String = {
val numOutputRows = metricTerm(ctx, "numOutputRows")
// PhysicalRDD always just has one input
val input = ctx.addMutableState("scala.collection.Iterator", "input", v => s"$v = inputs[0];")
val exprRows = output.zipWithIndex.map{ case (a, i) =>
BoundReference(i, a.dataType, a.nullable)
}
val row = ctx.freshName("row")
ctx.INPUT_ROW = row
ctx.currentVars = null
val columnsRowInput = exprRows.map(_.genCode(ctx))
s"""
|while ($input.hasNext()) {
| InternalRow $row = (InternalRow) $input.next();
| $numOutputRows.add(1);
| ${consume(ctx, columnsRowInput).trim}
| if (shouldStop()) return;
|}
""".stripMargin
}
override val metadata: Map[String, String] = {
val markedFilters = for (filter <- filters) yield {
if (handledFilters.contains(filter)) s"*$filter" else s"$filter"
}
Map(
"ReadSchema" -> output.toStructType.catalogString,
"PushedFilters" -> markedFilters.mkString("[", ", ", "]"))
}
// Don't care about `rdd` and `tableIdentifier` when canonicalizing.
override def doCanonicalize(): SparkPlan =
copy(
fullOutput.map(QueryPlan.normalizeExprId(_, fullOutput)),
rdd = null,
tableIdentifier = None)
}
/**
* Physical plan node for scanning data from HadoopFsRelations.
*
* @param relation The file-based relation to scan.
* @param output Output attributes of the scan, including data attributes and partition attributes.
* @param requiredSchema Required schema of the underlying relation, excluding partition columns.
* @param partitionFilters Predicates to use for partition pruning.
* @param optionalBucketSet Bucket ids for bucket pruning
* @param dataFilters Filters on non-partition columns.
* @param tableIdentifier identifier for the table in the metastore.
*/
case class FileSourceScanExec(
@transient relation: HadoopFsRelation,
output: Seq[Attribute],
requiredSchema: StructType,
partitionFilters: Seq[Expression],
optionalBucketSet: Option[BitSet],
dataFilters: Seq[Expression],
override val tableIdentifier: Option[TableIdentifier])
extends DataSourceScanExec with ColumnarBatchScan {
// Note that some vals referring the file-based relation are lazy intentionally
// so that this plan can be canonicalized on executor side too. See SPARK-23731.
override lazy val supportsBatch: Boolean = {
relation.fileFormat.supportBatch(relation.sparkSession, schema)
}
private lazy val needsUnsafeRowConversion: Boolean = {
if (relation.fileFormat.isInstanceOf[ParquetSource]) {
SparkSession.getActiveSession.get.sessionState.conf.parquetVectorizedReaderEnabled
} else {
false
}
}
override def vectorTypes: Option[Seq[String]] =
relation.fileFormat.vectorTypes(
requiredSchema = requiredSchema,
partitionSchema = relation.partitionSchema,
relation.sparkSession.sessionState.conf)
@transient private lazy val selectedPartitions: Seq[PartitionDirectory] = {
val optimizerMetadataTimeNs = relation.location.metadataOpsTimeNs.getOrElse(0L)
val startTime = System.nanoTime()
val ret = relation.location.listFiles(partitionFilters, dataFilters)
val timeTakenMs = ((System.nanoTime() - startTime) + optimizerMetadataTimeNs) / 1000 / 1000
metrics("numFiles").add(ret.map(_.files.size.toLong).sum)
metrics("metadataTime").add(timeTakenMs)
val executionId = sparkContext.getLocalProperty(SQLExecution.EXECUTION_ID_KEY)
SQLMetrics.postDriverMetricUpdates(sparkContext, executionId,
metrics("numFiles") :: metrics("metadataTime") :: Nil)
ret
}
override lazy val (outputPartitioning, outputOrdering): (Partitioning, Seq[SortOrder]) = {
val bucketSpec = if (relation.sparkSession.sessionState.conf.bucketingEnabled) {
relation.bucketSpec
} else {
None
}
bucketSpec match {
case Some(spec) =>
// For bucketed columns:
// -----------------------
// `HashPartitioning` would be used only when:
// 1. ALL the bucketing columns are being read from the table
//
// For sorted columns:
// ---------------------
// Sort ordering should be used when ALL these criteria's match:
// 1. `HashPartitioning` is being used
// 2. A prefix (or all) of the sort columns are being read from the table.
//
// Sort ordering would be over the prefix subset of `sort columns` being read
// from the table.
// eg.
// Assume (col0, col2, col3) are the columns read from the table
// If sort columns are (col0, col1), then sort ordering would be considered as (col0)
// If sort columns are (col1, col0), then sort ordering would be empty as per rule #2
// above
def toAttribute(colName: String): Option[Attribute] =
output.find(_.name == colName)
val bucketColumns = spec.bucketColumnNames.flatMap(n => toAttribute(n))
if (bucketColumns.size == spec.bucketColumnNames.size) {
val partitioning = HashPartitioning(bucketColumns, spec.numBuckets)
val sortColumns =
spec.sortColumnNames.map(x => toAttribute(x)).takeWhile(x => x.isDefined).map(_.get)
val sortOrder = if (sortColumns.nonEmpty) {
// In case of bucketing, its possible to have multiple files belonging to the
// same bucket in a given relation. Each of these files are locally sorted
// but those files combined together are not globally sorted. Given that,
// the RDD partition will not be sorted even if the relation has sort columns set
// Current solution is to check if all the buckets have a single file in it
val files = selectedPartitions.flatMap(partition => partition.files)
val bucketToFilesGrouping =
files.map(_.getPath.getName).groupBy(file => BucketingUtils.getBucketId(file))
val singleFilePartitions = bucketToFilesGrouping.forall(p => p._2.length <= 1)
if (singleFilePartitions) {
// TODO Currently Spark does not support writing columns sorting in descending order
// so using Ascending order. This can be fixed in future
sortColumns.map(attribute => SortOrder(attribute, Ascending))
} else {
Nil
}
} else {
Nil
}
(partitioning, sortOrder)
} else {
(UnknownPartitioning(0), Nil)
}
case _ =>
(UnknownPartitioning(0), Nil)
}
}
@transient
private val pushedDownFilters = dataFilters.flatMap(DataSourceStrategy.translateFilter)
logInfo(s"Pushed Filters: ${pushedDownFilters.mkString(",")}")
override lazy val metadata: Map[String, String] = {
def seqToString(seq: Seq[Any]) = seq.mkString("[", ", ", "]")
val location = relation.location
val locationDesc =
location.getClass.getSimpleName + seqToString(location.rootPaths)
val metadata =
Map(
"Format" -> relation.fileFormat.toString,
"ReadSchema" -> requiredSchema.catalogString,
"Batched" -> supportsBatch.toString,
"PartitionFilters" -> seqToString(partitionFilters),
"PushedFilters" -> seqToString(pushedDownFilters),
"DataFilters" -> seqToString(dataFilters),
"Location" -> locationDesc)
val withOptPartitionCount =
relation.partitionSchemaOption.map { _ =>
metadata + ("PartitionCount" -> selectedPartitions.size.toString)
} getOrElse {
metadata
}
val withSelectedBucketsCount = relation.bucketSpec.map { spec =>
val numSelectedBuckets = optionalBucketSet.map { b =>
b.cardinality()
} getOrElse {
spec.numBuckets
}
withOptPartitionCount + ("SelectedBucketsCount" ->
s"$numSelectedBuckets out of ${spec.numBuckets}")
} getOrElse {
withOptPartitionCount
}
withSelectedBucketsCount
}
private lazy val inputRDD: RDD[InternalRow] = {
val readFile: (PartitionedFile) => Iterator[InternalRow] =
relation.fileFormat.buildReaderWithPartitionValues(
sparkSession = relation.sparkSession,
dataSchema = relation.dataSchema,
partitionSchema = relation.partitionSchema,
requiredSchema = requiredSchema,
filters = pushedDownFilters,
options = relation.options,
hadoopConf = relation.sparkSession.sessionState.newHadoopConfWithOptions(relation.options))
relation.bucketSpec match {
case Some(bucketing) if relation.sparkSession.sessionState.conf.bucketingEnabled =>
createBucketedReadRDD(bucketing, readFile, selectedPartitions, relation)
case _ =>
createNonBucketedReadRDD(readFile, selectedPartitions, relation)
}
}
override def inputRDDs(): Seq[RDD[InternalRow]] = {
inputRDD :: Nil
}
override lazy val metrics =
Map("numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"),
"numFiles" -> SQLMetrics.createMetric(sparkContext, "number of files"),
"metadataTime" -> SQLMetrics.createMetric(sparkContext, "metadata time (ms)"),
"scanTime" -> SQLMetrics.createTimingMetric(sparkContext, "scan time"))
protected override def doExecute(): RDD[InternalRow] = {
if (supportsBatch) {
// in the case of fallback, this batched scan should never fail because of:
// 1) only primitive types are supported
// 2) the number of columns should be smaller than spark.sql.codegen.maxFields
WholeStageCodegenExec(this)(codegenStageId = 0).execute()
} else {
val numOutputRows = longMetric("numOutputRows")
if (needsUnsafeRowConversion) {
inputRDD.mapPartitionsWithIndexInternal { (index, iter) =>
val proj = UnsafeProjection.create(schema)
proj.initialize(index)
iter.map( r => {
numOutputRows += 1
proj(r)
})
}
} else {
inputRDD.map { r =>
numOutputRows += 1
r
}
}
}
}
override val nodeNamePrefix: String = "File"
/**
* Create an RDD for bucketed reads.
* The non-bucketed variant of this function is [[createNonBucketedReadRDD]].
*
* The algorithm is pretty simple: each RDD partition being returned should include all the files
* with the same bucket id from all the given Hive partitions.
*
* @param bucketSpec the bucketing spec.
* @param readFile a function to read each (part of a) file.
* @param selectedPartitions Hive-style partition that are part of the read.
* @param fsRelation [[HadoopFsRelation]] associated with the read.
*/
private def createBucketedReadRDD(
bucketSpec: BucketSpec,
readFile: (PartitionedFile) => Iterator[InternalRow],
selectedPartitions: Seq[PartitionDirectory],
fsRelation: HadoopFsRelation): RDD[InternalRow] = {
logInfo(s"Planning with ${bucketSpec.numBuckets} buckets")
val filesGroupedToBuckets =
selectedPartitions.flatMap { p =>
p.files.map { f =>
val hosts = getBlockHosts(getBlockLocations(f), 0, f.getLen)
PartitionedFile(p.values, f.getPath.toUri.toString, 0, f.getLen, hosts)
}
}.groupBy { f =>
BucketingUtils
.getBucketId(new Path(f.filePath).getName)
.getOrElse(sys.error(s"Invalid bucket file ${f.filePath}"))
}
val prunedFilesGroupedToBuckets = if (optionalBucketSet.isDefined) {
val bucketSet = optionalBucketSet.get
filesGroupedToBuckets.filter {
f => bucketSet.get(f._1)
}
} else {
filesGroupedToBuckets
}
val filePartitions = Seq.tabulate(bucketSpec.numBuckets) { bucketId =>
FilePartition(bucketId, prunedFilesGroupedToBuckets.getOrElse(bucketId, Nil))
}
new FileScanRDD(fsRelation.sparkSession, readFile, filePartitions)
}
/**
* Create an RDD for non-bucketed reads.
* The bucketed variant of this function is [[createBucketedReadRDD]].
*
* @param readFile a function to read each (part of a) file.
* @param selectedPartitions Hive-style partition that are part of the read.
* @param fsRelation [[HadoopFsRelation]] associated with the read.
*/
private def createNonBucketedReadRDD(
readFile: (PartitionedFile) => Iterator[InternalRow],
selectedPartitions: Seq[PartitionDirectory],
fsRelation: HadoopFsRelation): RDD[InternalRow] = {
val defaultMaxSplitBytes =
fsRelation.sparkSession.sessionState.conf.filesMaxPartitionBytes
val openCostInBytes = fsRelation.sparkSession.sessionState.conf.filesOpenCostInBytes
val defaultParallelism = fsRelation.sparkSession.sparkContext.defaultParallelism
val totalBytes = selectedPartitions.flatMap(_.files.map(_.getLen + openCostInBytes)).sum
val bytesPerCore = totalBytes / defaultParallelism
val maxSplitBytes = Math.min(defaultMaxSplitBytes, Math.max(openCostInBytes, bytesPerCore))
logInfo(s"Planning scan with bin packing, max size: $maxSplitBytes bytes, " +
s"open cost is considered as scanning $openCostInBytes bytes.")
val splitFiles = selectedPartitions.flatMap { partition =>
partition.files.flatMap { file =>
val blockLocations = getBlockLocations(file)
if (fsRelation.fileFormat.isSplitable(
fsRelation.sparkSession, fsRelation.options, file.getPath)) {
(0L until file.getLen by maxSplitBytes).map { offset =>
val remaining = file.getLen - offset
val size = if (remaining > maxSplitBytes) maxSplitBytes else remaining
val hosts = getBlockHosts(blockLocations, offset, size)
PartitionedFile(
partition.values, file.getPath.toUri.toString, offset, size, hosts)
}
} else {
val hosts = getBlockHosts(blockLocations, 0, file.getLen)
Seq(PartitionedFile(
partition.values, file.getPath.toUri.toString, 0, file.getLen, hosts))
}
}
}.toArray.sortBy(_.length)(implicitly[Ordering[Long]].reverse)
val partitions = new ArrayBuffer[FilePartition]
val currentFiles = new ArrayBuffer[PartitionedFile]
var currentSize = 0L
/** Close the current partition and move to the next. */
def closePartition(): Unit = {
if (currentFiles.nonEmpty) {
val newPartition =
FilePartition(
partitions.size,
currentFiles.toArray.toSeq) // Copy to a new Array.
partitions += newPartition
}
currentFiles.clear()
currentSize = 0
}
// Assign files to partitions using "Next Fit Decreasing"
splitFiles.foreach { file =>
if (currentSize + file.length > maxSplitBytes) {
closePartition()
}
// Add the given file to the current partition.
currentSize += file.length + openCostInBytes
currentFiles += file
}
closePartition()
new FileScanRDD(fsRelation.sparkSession, readFile, partitions)
}
private def getBlockLocations(file: FileStatus): Array[BlockLocation] = file match {
case f: LocatedFileStatus => f.getBlockLocations
case f => Array.empty[BlockLocation]
}
// Given locations of all blocks of a single file, `blockLocations`, and an `(offset, length)`
// pair that represents a segment of the same file, find out the block that contains the largest
// fraction the segment, and returns location hosts of that block. If no such block can be found,
// returns an empty array.
private def getBlockHosts(
blockLocations: Array[BlockLocation], offset: Long, length: Long): Array[String] = {
val candidates = blockLocations.map {
// The fragment starts from a position within this block
case b if b.getOffset <= offset && offset < b.getOffset + b.getLength =>
b.getHosts -> (b.getOffset + b.getLength - offset).min(length)
// The fragment ends at a position within this block
case b if offset <= b.getOffset && offset + length < b.getLength =>
b.getHosts -> (offset + length - b.getOffset).min(length)
// The fragment fully contains this block
case b if offset <= b.getOffset && b.getOffset + b.getLength <= offset + length =>
b.getHosts -> b.getLength
// The fragment doesn't intersect with this block
case b =>
b.getHosts -> 0L
}.filter { case (hosts, size) =>
size > 0L
}
if (candidates.isEmpty) {
Array.empty[String]
} else {
val (hosts, _) = candidates.maxBy { case (_, size) => size }
hosts
}
}
override def doCanonicalize(): FileSourceScanExec = {
FileSourceScanExec(
relation,
output.map(QueryPlan.normalizeExprId(_, output)),
requiredSchema,
QueryPlan.normalizePredicates(partitionFilters, output),
optionalBucketSet,
QueryPlan.normalizePredicates(dataFilters, output),
None)
}
}
| ahnqirage/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/DataSourceScanExec.scala | Scala | apache-2.0 | 21,130 |
/*
* Licensed to Intel Corporation under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Intel Corporation licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.models
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.nn._
import com.intel.analytics.bigdl.numeric.NumericDouble
import com.intel.analytics.bigdl.utils.{T, Table}
/**
* models in this file is only for gradient check
*/
@com.intel.analytics.bigdl.tags.Serial
object GoogleNet_v1_test {
def apply(classNum: Int): Module[Double] = {
val feature1 = Sequential()
feature1.add(SpatialConvolution(3, 64, 7, 7, 2, 2, 3, 3, 1, true).setInitMethod(Xavier)
.setName("conv1/7x7_s2"))
feature1.add(ReLU(true).setName("conv1/relu_7x7"))
feature1.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName("pool1/3x3_s2"))
feature1.add(SpatialCrossMapLRN(5, 0.0001, 0.75).setName("pool1/norm1"))
feature1.add(SpatialConvolution(64, 64, 1, 1, 1, 1).setInitMethod(Xavier)
.setName("conv2/3x3_reduce"))
feature1.add(ReLU(true).setName("conv2/relu_3x3_reduce"))
feature1.add(SpatialConvolution(64, 192, 3, 3, 1, 1, 1, 1).setInitMethod(Xavier)
.setName("conv2/3x3"))
feature1.add(ReLU(true).setName("conv2/relu_3x3"))
feature1.add(SpatialCrossMapLRN(5, 0.0001, 0.75). setName("conv2/norm2"))
feature1.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName("pool2/3x3_s2"))
feature1.add(inception(192, T(T(64), T(96, 128), T(16, 32), T(32)), "inception_3a/"))
feature1.add(inception(256, T(T(128), T(128, 192), T(32, 96), T(64)), "inception_3b/"))
feature1.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName("pool3/3x3_s2"))
feature1.add(inception(480, T(T(192), T(96, 208), T(16, 48), T(64)), "inception_4a/"))
val output1 = Sequential()
output1.add(SpatialAveragePooling(5, 5, 3, 3).ceil().setName("loss1/ave_pool"))
output1.add(SpatialConvolution(512, 128, 1, 1, 1, 1).setName("loss1/conv"))
output1.add(ReLU(true).setName("loss1/relu_conv"))
output1.add(View(128 * 4 * 4).setNumInputDims(3))
output1.add(Linear(128 * 4 * 4, 1024).setName("loss1/fc"))
output1.add(ReLU(true).setName("loss1/relu_fc"))
// output1.add(Dropout(0.7).setName("loss1/drop_fc"))
output1.add(Linear(1024, classNum).setName("loss1/classifier"))
output1.add(LogSoftMax().setName("loss1/loss"))
val feature2 = Sequential()
feature2.add(inception(512, T(T(160), T(112, 224), T(24, 64), T(64)), "inception_4b/"))
feature2.add(inception(512, T(T(128), T(128, 256), T(24, 64), T(64)), "inception_4c/"))
feature2.add(inception(512, T(T(112), T(144, 288), T(32, 64), T(64)), "inception_4d/"))
val output2 = Sequential()
output2.add(SpatialAveragePooling(5, 5, 3, 3).setName("loss2/ave_pool"))
output2.add(SpatialConvolution(528, 128, 1, 1, 1, 1).setName("loss2/conv"))
output2.add(ReLU(true).setName("loss2/relu_conv"))
output2.add(View(128 * 4 * 4).setNumInputDims(3))
output2.add(Linear(128 * 4 * 4, 1024).setName("loss2/fc"))
output2.add(ReLU(true).setName("loss2/relu_fc"))
// output2.add(Dropout(0.7).setName("loss2/drop_fc"))
output2.add(Linear(1024, classNum).setName("loss2/classifier"))
output2.add(LogSoftMax().setName("loss2/loss"))
val output3 = Sequential()
output3.add(inception(528, T(T(256), T(160, 320), T(32, 128), T(128)), "inception_4e/"))
output3.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName("pool4/3x3_s2"))
output3.add(inception(832, T(T(256), T(160, 320), T(32, 128), T(128)), "inception_5a/"))
output3.add(inception(832, T(T(384), T(192, 384), T(48, 128), T(128)), "inception_5b/"))
output3.add(SpatialAveragePooling(7, 7, 1, 1).setName("pool5/7x7_s1"))
// output3.add(Dropout(0.4).setName("pool5/drop_7x7_s1"))
output3.add(View(1024).setNumInputDims(3))
output3.add(Linear(1024, classNum).setInitMethod(Xavier).setName("loss3/classifier"))
output3.add(LogSoftMax().setName("loss3/loss3"))
val split2 = Concat(2).setName("split2")
split2.add(output3)
split2.add(output2)
val mainBranch = Sequential()
mainBranch.add(feature2)
mainBranch.add(split2)
val split1 = Concat(2).setName("split1")
split1.add(mainBranch)
split1.add(output1)
val model = Sequential()
model.add(feature1)
model.add(split1)
model.reset()
model
}
def inception(inputSize: Int, config: Table, namePrefix : String = "") : Module[Double] = {
val concat = Concat(2)
val conv1 = Sequential()
conv1.add(SpatialConvolution(inputSize,
config[Table](1)(1), 1, 1, 1, 1).setInitMethod(Xavier).setName(namePrefix + "1x1"))
conv1.add(ReLU(true).setName(namePrefix + "relu_1x1"))
concat.add(conv1)
val conv3 = Sequential()
conv3.add(SpatialConvolution(inputSize,
config[Table](2)(1), 1, 1, 1, 1).setInitMethod(Xavier).setName(namePrefix + "3x3_reduce"))
conv3.add(ReLU(true).setName(namePrefix + "relu_3x3_reduce"))
conv3.add(SpatialConvolution(config[Table](2)(1),
config[Table](2)(2), 3, 3, 1, 1, 1, 1).setInitMethod(Xavier).setName(namePrefix + "3x3"))
conv3.add(ReLU(true).setName(namePrefix + "relu_3x3"))
concat.add(conv3)
val conv5 = Sequential()
conv5.add(SpatialConvolution(inputSize,
config[Table](3)(1), 1, 1, 1, 1).setInitMethod(Xavier).setName(namePrefix + "5x5_reduce"))
conv5.add(ReLU(true).setName(namePrefix + "relu_5x5_reduce"))
conv5.add(SpatialConvolution(config[Table](3)(1),
config[Table](3)(2), 5, 5, 1, 1, 2, 2).setInitMethod(Xavier).setName(namePrefix + "5x5"))
conv5.add(ReLU(true).setName(namePrefix + "relu_5x5"))
concat.add(conv5)
val pool = Sequential()
pool.add(SpatialMaxPooling(3, 3, 1, 1, 1, 1).ceil().setName(namePrefix + "pool"))
pool.add(SpatialConvolution(inputSize,
config[Table](4)(1), 1, 1, 1, 1).setInitMethod(Xavier).setName(namePrefix + "pool_proj"))
pool.add(ReLU(true).setName(namePrefix + "relu_pool_proj"))
concat.add(pool).setName(namePrefix + "output")
concat
}
}
object GoogleNet_v2_test {
def apply(classNum: Int): Module[Double] = {
val features1 = Sequential()
features1.add(SpatialConvolution(3, 64, 7, 7, 2, 2, 3, 3, 1, true)
.setName("conv1/7x7_s2"))
features1.add(SpatialBatchNormalization(64, 1e-3).setInit().setName("conv1/7x7_s2/bn"))
features1.add(ReLU(true).setName("conv1/7x7_s2/bn/sc/relu"))
features1.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName("pool1/3x3_s2"))
features1.add(SpatialConvolution(64, 64, 1, 1).setName("conv2/3x3_reduce"))
features1.add(SpatialBatchNormalization(64, 1e-3).setInit().setName("conv2/3x3_reduce/bn"))
features1.add(ReLU(true).setName("conv2/3x3_reduce/bn/sc/relu"))
features1.add(SpatialConvolution(64, 192, 3, 3, 1, 1, 1, 1).setName("conv2/3x3"))
features1.add(SpatialBatchNormalization(192, 1e-3).setInit().setName("conv2/3x3/bn"))
features1.add(ReLU(true).setName("conv2/3x3/bn/sc/relu"))
features1.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName("pool2/3x3_s2"))
features1.add(inception(192, T(T(64), T(64, 64), T(64, 96), T("avg", 32)), "inception_3a/"))
features1.add(inception(256, T(T(64), T(64, 96), T(64, 96), T("avg", 64)), "inception_3b/"))
features1.add(inception(320, T(T(0), T(128, 160), T(64, 96), T("max", 0)), "inception_3c/"))
val output1 = Sequential()
output1.add(SpatialAveragePooling(5, 5, 3, 3).ceil().setName("pool3/5x5_s3"))
output1.add(SpatialConvolution(576, 128, 1, 1, 1, 1).setName("loss1/conv"))
output1.add(SpatialBatchNormalization(128, 1e-3).setInit().setName("loss1/conv/bn"))
output1.add(ReLU(true).setName("loss1/conv/bn/sc/relu"))
output1.add(View(128 * 4 * 4).setNumInputDims(3))
output1.add(Linear(128 * 4 * 4, 1024).setName("loss1/fc"))
output1.add(ReLU(true).setName("loss1/fc/bn/sc/relu"))
output1.add(Linear(1024, classNum).setName("loss1/classifier"))
output1.add(LogSoftMax().setName("loss1/loss"))
val features2 = Sequential()
features2
.add(inception(576, T(T(224), T(64, 96), T(96, 128), T("avg", 128)), "inception_4a/"))
.add(inception(576, T(T(192), T(96, 128), T(96, 128), T("avg", 128)), "inception_4b/"))
.add(inception(576, T(T(160), T(128, 160), T(128, 160), T("avg", 96)), "inception_4c/"))
.add(inception(576, T(T(96), T(128, 192), T(160, 192), T("avg", 96)), "inception_4d/"))
.add(inception(576, T(T(0), T(128, 192), T(192, 256), T("max", 0)), "inception_4e/"))
val output2 = Sequential()
output2.add(SpatialAveragePooling(5, 5, 3, 3).ceil().setName("pool4/5x5_s3"))
output2.add(SpatialConvolution(1024, 128, 1, 1, 1, 1).setName("loss2/conv"))
output2.add(SpatialBatchNormalization(128, 1e-3).setInit().setName("loss2/conv/bn"))
output2.add(ReLU(true).setName("loss2/conv/bn/sc/relu"))
output2.add(View(128 * 2 * 2).setNumInputDims(3))
output2.add(Linear(128 * 2 * 2, 1024).setName("loss2/fc"))
output2.add(ReLU(true).setName("loss2/fc/bn/sc/relu"))
output2.add(Linear(1024, classNum).setName("loss2/classifier"))
output2.add(LogSoftMax().setName("loss2/loss"))
val output3 = Sequential()
output3.add(inception(1024, T(T(352), T(192, 320), T(160, 224), T("avg", 128)),
"inception_5a/"))
output3.add(inception(1024, T(T(352), T(192, 320), T(192, 224), T("max", 128)),
"inception_5b/"))
output3.add(SpatialAveragePooling(7, 7, 1, 1).ceil().setName("pool5/7x7_s1"))
output3.add(View(1024).setNumInputDims(3))
output3.add(Linear(1024, classNum).setName("loss3/classifier"))
output3.add(LogSoftMax().setName("loss3/loss"))
val split2 = Concat(2)
split2.add(output3)
split2.add(output2)
val mainBranch = Sequential()
mainBranch.add(features2)
mainBranch.add(split2)
val split1 = Concat(2)
split1.add(mainBranch)
split1.add(output1)
val model = Sequential()
model.add(features1)
model.add(split1)
model.reset()
model
}
def applyNoBn(classNum: Int): Module[Double] = {
val features1 = Sequential()
features1.add(SpatialConvolution(3, 64, 7, 7, 2, 2, 3, 3, 1, true)
.setName("conv1/7x7_s2"))
features1.add(ReLU(true).setName("conv1/7x7_s2/bn/sc/relu"))
features1.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName("pool1/3x3_s2"))
features1.add(SpatialConvolution(64, 64, 1, 1).setName("conv2/3x3_reduce"))
features1.add(ReLU(true).setName("conv2/3x3_reduce/bn/sc/relu"))
features1.add(SpatialConvolution(64, 192, 3, 3, 1, 1, 1, 1).setName("conv2/3x3"))
features1.add(ReLU(true).setName("conv2/3x3/bn/sc/relu"))
features1.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName("pool2/3x3_s2"))
features1.add(inceptionNoBn(192, T(T(64), T(64, 64), T(64, 96), T("avg", 32)),
"inception_3a/"))
features1.add(inceptionNoBn(256, T(T(64), T(64, 96), T(64, 96), T("avg", 64)),
"inception_3b/"))
features1.add(inceptionNoBn(320, T(T(0), T(128, 160), T(64, 96), T("max", 0)),
"inception_3c/"))
val output1 = Sequential()
output1.add(SpatialAveragePooling(5, 5, 3, 3).ceil().setName("pool3/5x5_s3"))
output1.add(SpatialConvolution(576, 128, 1, 1, 1, 1).setName("loss1/conv"))
output1.add(ReLU(true).setName("loss1/conv/bn/sc/relu"))
output1.add(View(128 * 4 * 4).setNumInputDims(3))
output1.add(Linear(128 * 4 * 4, 1024).setName("loss1/fc"))
output1.add(ReLU(true).setName("loss1/fc/bn/sc/relu"))
output1.add(Linear(1024, classNum).setName("loss1/classifier"))
output1.add(LogSoftMax().setName("loss1/loss"))
val features2 = Sequential()
features2
.add(inceptionNoBn(576, T(T(224), T(64, 96), T(96, 128), T("avg", 128)),
"inception_4a/"))
.add(inceptionNoBn(576, T(T(192), T(96, 128), T(96, 128), T("avg", 128)),
"inception_4b/"))
.add(inceptionNoBn(576, T(T(160), T(128, 160), T(128, 160), T("avg", 96)),
"inception_4c/"))
.add(inceptionNoBn(576, T(T(96), T(128, 192), T(160, 192), T("avg", 96)),
"inception_4d/"))
.add(inceptionNoBn(576, T(T(0), T(128, 192), T(192, 256), T("max", 0)),
"inception_4e/"))
val output2 = Sequential()
output2.add(SpatialAveragePooling(5, 5, 3, 3).ceil().setName("pool4/5x5_s3"))
output2.add(SpatialConvolution(1024, 128, 1, 1, 1, 1).setName("loss2/conv"))
output2.add(ReLU(true).setName("loss2/conv/bn/sc/relu"))
output2.add(View(128 * 2 * 2).setNumInputDims(3))
output2.add(Linear(128 * 2 * 2, 1024).setName("loss2/fc"))
output2.add(ReLU(true).setName("loss2/fc/bn/sc/relu"))
output2.add(Linear(1024, classNum).setName("loss2/classifier"))
output2.add(LogSoftMax().setName("loss2/loss"))
val output3 = Sequential()
output3.add(inceptionNoBn(1024, T(T(352), T(192, 320), T(160, 224), T("avg", 128)),
"inception_5a/"))
output3.add(inceptionNoBn(1024, T(T(352), T(192, 320), T(192, 224), T("max", 128)),
"inception_5b/"))
output3.add(SpatialAveragePooling(7, 7, 1, 1).ceil().setName("pool5/7x7_s1"))
output3.add(View(1024).setNumInputDims(3))
output3.add(Linear(1024, classNum).setName("loss3/classifier"))
output3.add(LogSoftMax().setName("loss3/loss"))
val split2 = Concat(2)
split2.add(output3)
split2.add(output2)
val mainBranch = Sequential()
mainBranch.add(features2)
mainBranch.add(split2)
val split1 = Concat(2)
split1.add(mainBranch)
split1.add(output1)
val model = Sequential()
model.add(features1)
model.add(split1)
model.reset()
model
}
def inception(inputSize: Int, config: Table, namePrefix : String): Module[Double] = {
val concat = Concat(2)
if (config[Table](1)[Int](1) != 0) {
val conv1 = Sequential()
conv1.add(SpatialConvolution(inputSize, config[Table](1)(1), 1, 1, 1, 1)
.setName(namePrefix + "1x1"))
conv1.add(SpatialBatchNormalization(config[Table](1)(1), 1e-3).setInit()
.setName(namePrefix + "1x1/bn"))
conv1.add(ReLU(true).setName(namePrefix + "1x1/bn/sc/relu"))
concat.add(conv1)
}
val conv3 = Sequential()
conv3.add(SpatialConvolution(inputSize, config[Table](2)(1), 1, 1, 1, 1)
.setName(namePrefix + "3x3_reduce"))
conv3.add(SpatialBatchNormalization(config[Table](2)(1), 1e-3).setInit()
.setName(namePrefix + "3x3_reduce/bn"))
conv3.add(ReLU(true). setName(namePrefix + "3x3_reduce/bn/sc/relu"))
if(config[Table](4)[String](1) == "max" && config[Table](4)[Int](2) == 0) {
conv3.add(SpatialConvolution(config[Table](2)(1),
config[Table](2)(2), 3, 3, 2, 2, 1, 1).setName(namePrefix + "3x3"))
} else {
conv3.add(SpatialConvolution(config[Table](2)(1),
config[Table](2)(2), 3, 3, 1, 1, 1, 1).setName(namePrefix + "3x3"))
}
conv3.add(SpatialBatchNormalization(config[Table](2)(2), 1e-3).setInit()
.setName(namePrefix + "3x3/bn"))
conv3.add(ReLU(true).setName(namePrefix + "3x3/bn/sc/relu"))
concat.add(conv3)
val conv3xx = Sequential()
conv3xx.add(SpatialConvolution(inputSize, config[Table](3)(1), 1, 1, 1, 1)
.setName(namePrefix + "double3x3_reduce"))
conv3xx.add(SpatialBatchNormalization(config[Table](3)(1), 1e-3).setInit()
.setName(namePrefix + "double3x3_reduce/bn"))
conv3xx.add(ReLU(true).setName(namePrefix + "double3x3_reduce/bn/sc/relu"))
conv3xx.add(SpatialConvolution(config[Table](3)(1),
config[Table](3)(2), 3, 3, 1, 1, 1, 1).setName(namePrefix + "double3x3a"))
conv3xx.add(SpatialBatchNormalization(config[Table](3)(2), 1e-3).setInit()
.setName(namePrefix + "double3x3a/bn"))
conv3xx.add(ReLU(true).setName(namePrefix + "double3x3a/bn/sc/relu"))
if(config[Table](4)[String](1) == "max" && config[Table](4)[Int](2) == 0) {
conv3xx.add(SpatialConvolution(config[Table](3)(2),
config[Table](3)(2), 3, 3, 2, 2, 1, 1).setName(namePrefix + "double3x3b"))
} else {
conv3xx.add(SpatialConvolution(config[Table](3)(2),
config[Table](3)(2), 3, 3, 1, 1, 1, 1).setName(namePrefix + "double3x3b"))
}
conv3xx.add(SpatialBatchNormalization(config[Table](3)(2), 1e-3).setInit()
.setName(namePrefix + "double3x3b/bn"))
conv3xx.add(ReLU(true).setName(namePrefix + "double3x3b/bn/sc/relu"))
concat.add(conv3xx)
val pool = Sequential()
config[Table](4)[String](1) match {
case "max" =>
if (config[Table](4)[Int](2) != 0) {
pool.add(SpatialMaxPooling(3, 3, 1, 1, 1, 1).ceil().setName(namePrefix + "pool"))
} else {
pool.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName(namePrefix + "pool"))
}
case "avg" => pool.add(SpatialAveragePooling(3, 3, 1, 1, 1, 1).ceil()
.setName(namePrefix + "pool"))
case _ => throw new IllegalArgumentException
}
if (config[Table](4)[Int](2) != 0) {
pool.add(SpatialConvolution(inputSize, config[Table](4)[Int](2), 1, 1, 1, 1)
.setName(namePrefix + "pool_proj"))
pool.add(SpatialBatchNormalization(config[Table](4)(2), 1e-3).setInit()
.setName(namePrefix + "pool_proj/bn"))
pool.add(ReLU(true).setName(namePrefix + "pool_proj/bn/sc/relu"))
}
concat.add(pool)
concat.setName(namePrefix + "output")
}
def inceptionNoBn(inputSize: Int, config: Table, namePrefix : String): Module[Double] = {
val concat = Concat(2)
if (config[Table](1)[Int](1) != 0) {
val conv1 = Sequential()
conv1.add(SpatialConvolution(inputSize, config[Table](1)(1), 1, 1, 1, 1)
.setName(namePrefix + "1x1"))
conv1.add(ReLU(true).setName(namePrefix + "1x1/bn/sc/relu"))
concat.add(conv1)
}
val conv3 = Sequential()
conv3.add(SpatialConvolution(inputSize, config[Table](2)(1), 1, 1, 1, 1)
.setName(namePrefix + "3x3_reduce"))
conv3.add(ReLU(true). setName(namePrefix + "3x3_reduce/bn/sc/relu"))
if(config[Table](4)[String](1) == "max" && config[Table](4)[Int](2) == 0) {
conv3.add(SpatialConvolution(config[Table](2)(1),
config[Table](2)(2), 3, 3, 2, 2, 1, 1).setName(namePrefix + "3x3"))
} else {
conv3.add(SpatialConvolution(config[Table](2)(1),
config[Table](2)(2), 3, 3, 1, 1, 1, 1).setName(namePrefix + "3x3"))
}
conv3.add(ReLU(true).setName(namePrefix + "3x3/bn/sc/relu"))
concat.add(conv3)
val conv3xx = Sequential()
conv3xx.add(SpatialConvolution(inputSize, config[Table](3)(1), 1, 1, 1, 1)
.setName(namePrefix + "double3x3_reduce"))
conv3xx.add(ReLU(true).setName(namePrefix + "double3x3_reduce/bn/sc/relu"))
conv3xx.add(SpatialConvolution(config[Table](3)(1),
config[Table](3)(2), 3, 3, 1, 1, 1, 1).setName(namePrefix + "double3x3a"))
conv3xx.add(ReLU(true).setName(namePrefix + "double3x3a/bn/sc/relu"))
if(config[Table](4)[String](1) == "max" && config[Table](4)[Int](2) == 0) {
conv3xx.add(SpatialConvolution(config[Table](3)(2),
config[Table](3)(2), 3, 3, 2, 2, 1, 1).setName(namePrefix + "double3x3b"))
} else {
conv3xx.add(SpatialConvolution(config[Table](3)(2),
config[Table](3)(2), 3, 3, 1, 1, 1, 1).setName(namePrefix + "double3x3b"))
}
conv3xx.add(ReLU(true).setName(namePrefix + "double3x3b/bn/sc/relu"))
concat.add(conv3xx)
val pool = Sequential()
config[Table](4)[String](1) match {
case "max" =>
if (config[Table](4)[Int](2) != 0) {
pool.add(SpatialMaxPooling(3, 3, 1, 1, 1, 1).ceil().setName(namePrefix + "pool"))
} else {
pool.add(SpatialMaxPooling(3, 3, 2, 2).ceil().setName(namePrefix + "pool"))
}
case "avg" => pool.add(SpatialAveragePooling(3, 3, 1, 1, 1, 1).ceil()
.setName(namePrefix + "pool"))
case _ => throw new IllegalArgumentException
}
if (config[Table](4)[Int](2) != 0) {
pool.add(SpatialConvolution(inputSize, config[Table](4)[Int](2), 1, 1, 1, 1)
.setName(namePrefix + "pool_proj"))
pool.add(ReLU(true).setName(namePrefix + "pool_proj/bn/sc/relu"))
}
concat.add(pool)
concat.setName(namePrefix + "output")
}
}
object VggLike_test {
def apply(classNum: Int): Module[Double] = {
val vggBnDo = Sequential[Double]()
def convBNReLU(nInputPlane: Int, nOutPutPlane: Int): Sequential[Double] = {
vggBnDo.add(SpatialConvolution(nInputPlane, nOutPutPlane, 3, 3, 1, 1, 1, 1))
vggBnDo.add(SpatialBatchNormalization[Double](nOutPutPlane, 1e-3).setInit())
vggBnDo.add(ReLU(true))
vggBnDo
}
convBNReLU(3, 64) // .add(Dropout((0.3)))
convBNReLU(64, 64)
vggBnDo.add(SpatialMaxPooling[Double](2, 2, 2, 2).ceil())
convBNReLU(64, 128) // .add(Dropout(0.4))
convBNReLU(128, 128)
vggBnDo.add(SpatialMaxPooling[Double](2, 2, 2, 2).ceil())
convBNReLU(128, 256)// .add(Dropout(0.4))
convBNReLU(256, 256) // .add(Dropout(0.4))
convBNReLU(256, 256)
vggBnDo.add(SpatialMaxPooling[Double](2, 2, 2, 2).ceil())
convBNReLU(256, 512) // .add(Dropout(0.4))
convBNReLU(512, 512) // .add(Dropout(0.4))
convBNReLU(512, 512)
vggBnDo.add(SpatialMaxPooling[Double](2, 2, 2, 2).ceil())
convBNReLU(512, 512) // .add(Dropout(0.4))
convBNReLU(512, 512) // .add(Dropout(0.4))
convBNReLU(512, 512)
vggBnDo.add(SpatialMaxPooling[Double](2, 2, 2, 2).ceil())
vggBnDo.add(View(512))
val classifier = Sequential[Double]()
// classifier.add(Dropout(0.5))
classifier.add(Linear(512, 512))
classifier.add(BatchNormalization[Double](512).setInit())
classifier.add(ReLU(true))
// classifier.add(Dropout(0.5))
classifier.add(Linear(512, classNum))
classifier.add(LogSoftMax())
vggBnDo.add(classifier)
vggBnDo
}
}
object LeNet5_test {
def apply(classNum: Int): Module[Double] = {
val model = Sequential()
model.add(Reshape(Array(1, 28, 28)))
model.add(SpatialConvolution(1, 6, 5, 5))
model.add(Tanh())
model.add(SpatialMaxPooling(2, 2, 2, 2))
model.add(Tanh())
model.add(SpatialConvolution(6, 12, 5, 5))
model.add(SpatialMaxPooling(2, 2, 2, 2))
model.add(Reshape(Array(12 * 4 * 4)))
model.add(Linear(12 * 4 * 4, 100))
model.add(Tanh())
model.add(Linear(100, classNum))
model.add(LogSoftMax())
model
}
}
object SimpleCNN_test {
val rowN = 28
val colN = 28
val featureSize = rowN * colN
def apply(classNum: Int): Module[Double] = {
val model = Sequential()
model.add(Reshape(Array(1, rowN, colN)))
model.add(SpatialConvolution(1, 32, 5, 5))
model.add(Tanh())
model.add(SpatialMaxPooling(3, 3, 3, 3))
model.add(SpatialConvolution(32, 64, 5, 5))
model.add(Tanh())
model.add(SpatialMaxPooling(2, 2, 2, 2))
val linearInputNum = 64 * 2 * 2
val hiddenNum = 200
model.add(Reshape(Array(linearInputNum)))
model.add(Linear(linearInputNum, hiddenNum))
model.add(Tanh())
model.add(Linear(hiddenNum, classNum))
model.add(LogSoftMax())
model
}
}
| zhichao-li/BigDL | dl/src/test/scala/com/intel/analytics/bigdl/models/ModelforCheck.scala | Scala | apache-2.0 | 23,628 |
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.deeplang.params
import spray.json._
class PrefixBasedColumnCreatorParamSpec
extends AbstractParamSpec[String, PrefixBasedColumnCreatorParam] {
override def className: String = "PrefixBasedColumnCreatorParam"
override def paramFixture: (PrefixBasedColumnCreatorParam, JsValue) = {
val description = "Prefix based column creator description"
val param = PrefixBasedColumnCreatorParam(
name = "Prefix based column creator name",
description = Some(description)
)
val expectedJson = JsObject(
"type" -> JsString("prefixBasedCreator"),
"name" -> JsString(param.name),
"description" -> JsString(description),
"isGriddable" -> JsFalse,
"default" -> JsNull
)
(param, expectedJson)
}
override def valueFixture: (String, JsValue) = {
val value = "abc"
(value, JsString(value))
}
}
| deepsense-io/seahorse-workflow-executor | deeplang/src/test/scala/io/deepsense/deeplang/params/PrefixBasedColumnCreatorParamSpec.scala | Scala | apache-2.0 | 1,482 |
package org.bitcoins.bundle.gui
import akka.actor.ActorSystem
import com.typesafe.config._
import grizzled.slf4j.Logging
import org.bitcoins.bundle.gui.BundleGUI._
import org.bitcoins.commons.config.AppConfig
import org.bitcoins.commons.util.{DatadirUtil, ServerArgParser}
import org.bitcoins.core.api.node.InternalImplementationNodeType
import org.bitcoins.gui._
import org.bitcoins.core.api.node.NodeType._
import org.bitcoins.server._
import scalafx.beans.property.ObjectProperty
import scalafx.stage.Window
import java.nio.file.Files
import scala.concurrent._
import scala.concurrent.duration.DurationInt
class LandingPaneModel(serverArgParser: ServerArgParser)(implicit
system: ActorSystem)
extends Logging {
var taskRunner: TaskRunner = _
// Sadly, it is a Java "pattern" to pass null into
// constructors to signal that you want some default
val parentWindow: ObjectProperty[Window] = {
ObjectProperty[Window](null.asInstanceOf[Window])
}
def launchWallet(bundleConf: Config, appConfig: BitcoinSAppConfig): Unit = {
taskRunner.run(
"Launching Wallet",
op = {
import system.dispatcher
val file =
appConfig.nodeConf.baseDatadir.resolve("bitcoin-s-bundle.conf")
val bundleConfStr = AppConfig.configToString(bundleConf)
logger.info(s"Writing bundle config to $file")
Files.write(file, bundleConfStr.getBytes)
val networkConfigF: Future[Config] = {
val tmpConf =
BitcoinSAppConfig.fromConfig(
bundleConf.withFallback(appConfig.config))
val netConfF: Future[Config] = tmpConf.nodeConf.nodeType match {
case _: InternalImplementationNodeType =>
// If we are connecting to a node we cannot
// know what network it is on now
Future.successful(ConfigFactory.empty())
case BitcoindBackend =>
if (!appConfig.torConf.enabled) {
val bitcoindF = tmpConf.bitcoindRpcConf.clientF
bitcoindF
.flatMap(_.getBlockChainInfo)
.map { info =>
val networkStr =
DatadirUtil.networkStrToDirName(info.chain.name)
ConfigFactory.parseString(
s"bitcoin-s.network = $networkStr")
}
} else {
//we cannot connect to bitcoind and determine
//the network over tor since tor isn't started
//yet
Future.successful(ConfigFactory.empty())
}
}
netConfF.map { netConf =>
serverArgParser.toConfig
.withFallback(netConf)
.withFallback(bundleConf)
}
}
// Launch wallet
val promise = Promise[Unit]()
val startedF: Future[Unit] = networkConfigF.flatMap { networkConfig =>
val finalAppConfig =
BitcoinSAppConfig.fromDatadir(appConfig.nodeConf.baseDatadir,
networkConfig)
// use class base constructor to share the actor system
GlobalData.setBitcoinNetwork(
finalAppConfig.nodeConf.network,
finalAppConfig.nodeConf.socks5ProxyParams.isDefined)
val runF =
new BitcoinSServerMain(serverArgParser)(system, finalAppConfig)
.run()
runF.map { _ =>
fetchStartingData()
changeToWalletGUIScene()
promise.success(())
}
}
startedF.failed.foreach { case err =>
throw err
}
Await.result(promise.future, 120.seconds)
}
)
}
}
| bitcoin-s/bitcoin-s | app/bundle/src/main/scala/org/bitcoins/bundle/gui/LandingPaneModel.scala | Scala | mit | 3,727 |
package examples
object monadtransformer1 {
}
| adilakhter/scalaznoob | src/main/scala/examples/monadtransformer1.scala | Scala | apache-2.0 | 48 |
package vggames.webdev
import vggames.shared.task.{ Tasks, TaskGroup }
import vggames.shared.GameEngine
import vggames.shared.task.TaskGroup
import vggames.shared.task.Task
import vggames.shared.task.status.Ok
class WebdevGame extends GameEngine {
val tasks = new Tasks(
design ++
html ++
siteGenerator ++
css ++
siteGeneratorCss ++
js ++
codeReview : _*)
private def design = Seq(
new GithubProject("Setup inicial", "design.setup"),
new GithubProject("Tartaruga", "design.logo.turtle"),
new GithubProject("Tartaruga com obstáculos", "design.logo.obstacles"),
new GithubProject("Imutabilidade", "design.imutability"),
new GithubProject("Mapear coleções", "design.collection.map"),
new GithubProject("Filtrar coleções", "design.collection.filter"),
new GithubProject("Testes e TDD", "design.test.tdd"),
new GithubProject("Conversão de Linguagem (__, // e **)", "design.markdown.text.decoration"),
new GithubProject("Conversão de Linguagem (imagens e links)", "design.markdown.image.link"),
new GithubProject("Conversão de Linguagem (lista)", "design.markdown.list"),
new GithubProject("Api de Requisições web", "design.request.api"))
private def html = Seq(
new GithubProject("Página de autores", "html.authors"),
new GithubProject("Uma receita", "html.recipe"),
new GithubProject("Um post de um blog", "html.post"),
new GithubProject("Uma tabela", "html.table"),
new GithubProject("Uma página completa", "html.page"))
private def siteGenerator = Seq(
new GithubProject("Um gerador de sites", "project.generator"))
private def css = Seq(
new GithubProject("Uma página com formulário", "css.form"),
new GithubProject("A página de um blog", "css.blog"),
new GithubProject("Uma página com menu", "css.menu"),
new GithubProject("Outra página com menu", "css.other.menu"))
private def siteGeneratorCss = Seq(
new GithubProject("Melhorar a cara do site genrado", "project.generator.css"),
new GithubProject("Layout do site gerado", "project.generator.layout"))
private def js = Seq(
new GithubProject("Setup do Jasmine", "js.jasmine.setup"),
new GithubProject("Matchers do Jasmine", "js.jasmine.matchers"),
new GithubProject("Declarar Funções", "js.declarar.funcoes"),
new GithubProject("If", "js.if"),
new GithubProject("`For`...in", "js.for.in"),
new GithubProject("Funções construtoras", "js.constructor.function"),
new GithubProject("Hashes", "js.hash"),
new GithubProject("`For` em um objeto", "js.for.object"),
new GithubProject("`For` em um hash", "js.for.hash"),
new GithubProject("Filter", "js.filter"),
new GithubProject("Map", "js.map"),
new GithubProject("Reduce", "js.reduce"),
new GithubProject("Criar Tags", "js.criar.tags"),
new GithubProject("Inserir Tags", "js.inserir.tags"),
new GithubProject("Remover Tags", "js.remover.tags"),
new GithubProject("Criar Tags com JQuery", "js.criar.tags"),
new GithubProject("Inserir Tags com JQuery", "js.inserir.tags"),
new GithubProject("Remover Tags com JQuery", "js.remover.tags"))
private def codeReview = Seq(
new GithubProject("Code Review", "project.review.setup"),
new GithubProject("Tela que recebe diretório", "project.review.dir"),
new GithubProject("Exibir os arquivos", "project.review.show.files"),
new GithubProject("Syntax Highlighting", "project.review.highlight"),
new GithubProject("Anotações", "project.review.annotation"),
new GithubProject("Exibir o resultado do code review", "project.review.result"))
def description = "Aprenda a construir aplicações web usando os frameworks que quiser"
override def hasTutor = true
override def name = "Desenvolvimento Web"
}
| vidageek/games | games/webdev/src/main/scala/vggames/webdev/WebdevGame.scala | Scala | gpl-3.0 | 3,824 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// Licence: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.indexer
import java.sql.SQLException
import akka.actor._
import akka.event.slf4j.SLF4JLogging
import org.apache.commons.vfs2._
import org.ensime.api._
import org.ensime.indexer.DatabaseService._
import org.ensime.util.file._
import scala.util.Failure
import scala.util.Success
import scala.concurrent._
import scala.concurrent.duration._
/**
* Provides methods to perform ENSIME-specific indexing tasks,
* receives events that require an index update, and provides
* searches against the index.
*
* We have an H2 database for storing relational information
* and Lucene for advanced indexing.
*/
class SearchService(
config: EnsimeConfig,
resolver: SourceResolver
)(
implicit
actorSystem: ActorSystem,
vfs: EnsimeVFS
) extends ClassfileIndexer
with FileChangeListener
with SLF4JLogging {
private val QUERY_TIMEOUT = 30 seconds
/**
* Changelog:
*
* 1.0 - reverted index due to negative impact to startup time. The
* workaround to large scale deletions is to just nuke the
* .ensime_cache.
*
* 1.1 - added index to FileCheck.file to speed up delete.
*
* 1.0 - initial schema
*/
private val version = "1.0"
private val index = new IndexService(config.cacheDir / ("index-" + version))
private val db = new DatabaseService(config.cacheDir / ("sql-" + version))
implicit val workerEC = actorSystem.dispatchers.lookup("akka.search-service-dispatcher")
private def scan(f: FileObject) = f.findFiles(EnsimeVFS.ClassfileSelector) match {
case null => Nil
case res => res.toList
}
/**
* Indexes everything, making best endeavours to avoid scanning what
* is unnecessary (e.g. we already know that a jar or classfile has
* been indexed).
*
* @return the number of rows (removed, indexed) from the database.
*/
def refresh(): Future[(Int, Int)] = {
// it is much faster during startup to obtain the full list of
// known files from the DB then and check against the disk, than
// check each file against DatabaseService.outOfDate
def findStaleFileChecks(checks: Seq[FileCheck]): List[FileCheck] = {
log.info("findStaleFileChecks")
for {
check <- checks
name = check.file.getName.getURI
if !check.file.exists || check.changed
} yield check
}.toList
// delete the stale data before adding anything new
// returns number of rows deleted
def deleteReferences(checks: List[FileCheck]): Future[Int] = {
log.info(s"removing ${checks.size} stale files from the index")
deleteInBatches(checks.map(_.file))
}
// a snapshot of everything that we want to index
def findBases(): Set[FileObject] = {
log.info("findBases")
config.modules.flatMap {
case (name, m) =>
m.targetDirs.flatMap {
case d if !d.exists() => Nil
case d if d.isJar => List(vfs.vfile(d))
case d => scan(vfs.vfile(d))
} ::: m.testTargetDirs.flatMap {
case d if !d.exists() => Nil
case d if d.isJar => List(vfs.vfile(d))
case d => scan(vfs.vfile(d))
} :::
m.compileJars.map(vfs.vfile) ::: m.testJars.map(vfs.vfile)
}
}.toSet ++ config.javaLibs.map(vfs.vfile)
def indexBase(base: FileObject, fileCheck: Option[FileCheck]): Future[Option[Int]] = {
val outOfDate = fileCheck.map(_.changed).getOrElse(true)
if (!outOfDate) Future.successful(None)
else {
val check = FileCheck(base)
extractSymbolsFromClassOrJar(base).flatMap(persist(check, _, commitIndex = false))
}
}
// index all the given bases and return number of rows written
def indexBases(bases: Set[FileObject], checks: Seq[FileCheck]): Future[Int] = {
log.info("Indexing bases...")
val checksLookup: Map[String, FileCheck] = checks.map(check => (check.filename -> check)).toMap
val basesWithChecks: Set[(FileObject, Option[FileCheck])] = bases.map { base =>
(base, checksLookup.get(base.getName().getURI()))
}
Future.sequence(basesWithChecks.map { case (file, check) => indexBase(file, check) }).map(_.flatten.sum)
}
def commitIndex(): Future[Unit] = Future {
blocking {
log.debug("committing index to disk...")
index.commit()
log.debug("...done committing index")
}
}
// chain together all the future tasks
for {
checks <- db.knownFiles()
stale = findStaleFileChecks(checks)
deletes <- deleteReferences(stale)
bases = findBases()
added <- indexBases(bases, checks)
_ <- commitIndex()
} yield (deletes, added)
}
def refreshResolver(): Unit = resolver.update()
def persist(check: FileCheck, symbols: List[FqnSymbol], commitIndex: Boolean): Future[Option[Int]] = {
val iwork = Future { blocking { index.persist(check, symbols, commitIndex) } }
val dwork = db.persist(check, symbols)
iwork.flatMap { _ => dwork }
}
def extractSymbolsFromClassOrJar(file: FileObject): Future[List[FqnSymbol]] = file match {
case classfile if classfile.getName.getExtension == "class" =>
// too noisy to log
val check = FileCheck(classfile)
Future {
blocking {
try extractSymbols(classfile, classfile)
finally classfile.close()
}
}
case jar =>
log.debug(s"indexing $jar")
val check = FileCheck(jar)
Future {
blocking {
val vJar = vfs.vjar(jar)
try scan(vJar) flatMap (extractSymbols(jar, _))
finally vJar.close()
}
}
}
private val blacklist = Set("sun/", "sunw/", "com/sun/")
private val ignore = Set("$$anon$", "$$anonfun$", "$worker$")
import org.ensime.util.RichFileObject._
private def extractSymbols(container: FileObject, f: FileObject): List[FqnSymbol] = {
f.pathWithinArchive match {
case Some(relative) if blacklist.exists(relative.startsWith) => Nil
case _ =>
val name = container.getName.getURI
val path = f.getName.getURI
val (clazz, refs) = indexClassfile(f)
val depickler = new ClassfileDepickler(f)
val source = resolver.resolve(clazz.name.pack, clazz.source)
val sourceUri = source.map(_.getName.getURI)
// TODO: other types of visibility when we get more sophisticated
if (clazz.access != Public) Nil
else FqnSymbol(None, name, path, clazz.name.fqnString, None, None, sourceUri, clazz.source.line) ::
clazz.methods.toList.filter(_.access == Public).map { method =>
val descriptor = method.descriptor.descriptorString
FqnSymbol(None, name, path, method.name.fqnString, Some(descriptor), None, sourceUri, method.line)
} ::: clazz.fields.toList.filter(_.access == Public).map { field =>
val internal = field.clazz.internalString
FqnSymbol(None, name, path, field.name.fqnString, None, Some(internal), sourceUri, clazz.source.line)
} ::: depickler.getTypeAliases.toList.filter(_.access == Public).map { rawType =>
FqnSymbol(None, name, path, rawType.fqnString, None, None, sourceUri, None)
}
}
}.filterNot(sym => ignore.exists(sym.fqn.contains))
// TODO: provide context (user's current module and main/test)
/** free-form search for classes */
def searchClasses(query: String, max: Int): List[FqnSymbol] = {
val fqns = index.searchClasses(query, max)
Await.result(db.find(fqns), QUERY_TIMEOUT) take max
}
/** free-form search for classes and methods */
def searchClassesMethods(terms: List[String], max: Int): List[FqnSymbol] = {
val fqns = index.searchClassesMethods(terms, max)
Await.result(db.find(fqns), QUERY_TIMEOUT) take max
}
/** only for exact fqns */
def findUnique(fqn: String): Option[FqnSymbol] = Await.result(db.find(fqn), QUERY_TIMEOUT)
/* DELETE then INSERT in H2 is ridiculously slow, so we put all modifications
* into a blocking queue and dedicate a thread to block on draining the queue.
* This has the effect that we always react to a single change on disc but we
* will work through backlogs in bulk.
*
* We always do a DELETE, even if the entries are new, but only INSERT if
* the list of symbols is non-empty.
*/
val backlogActor = actorSystem.actorOf(Props(new IndexingQueueActor(this)), "ClassfileIndexer")
// deletion in both Lucene and H2 is really slow, batching helps
def deleteInBatches(
files: List[FileObject],
batchSize: Int = 1000
): Future[Int] = {
val removing = files.grouped(batchSize).map(delete)
Future.sequence(removing).map(_.sum)
}
// returns number of rows removed
def delete(files: List[FileObject]): Future[Int] = {
// this doesn't speed up Lucene deletes, but it means that we
// don't wait for Lucene before starting the H2 deletions.
val iwork = Future { blocking { index.remove(files) } }
val dwork = db.removeFiles(files)
iwork.flatMap(_ => dwork)
}
def fileChanged(f: FileObject): Unit = backlogActor ! IndexFile(f)
def fileRemoved(f: FileObject): Unit = fileChanged(f)
def fileAdded(f: FileObject): Unit = fileChanged(f)
def shutdown(): Future[Unit] = {
db.shutdown()
}
}
case class IndexFile(f: FileObject)
class IndexingQueueActor(searchService: SearchService) extends Actor with ActorLogging {
import context.system
import scala.concurrent.duration._
case object Process
// De-dupes files that have been updated since we were last told to
// index them. No need to aggregate values: the latest wins. Key is
// the URI because FileObject doesn't implement equals
var todo = Map.empty[String, FileObject]
// debounce and give us a chance to batch (which is *much* faster)
var worker: Cancellable = _
private def debounce(): Unit = {
Option(worker).foreach(_.cancel())
import context.dispatcher
worker = system.scheduler.scheduleOnce(5 seconds, self, Process)
}
override def receive: Receive = {
case IndexFile(f) =>
todo += f.getName.getURI -> f
debounce()
case Process if todo.isEmpty => // nothing to do
case Process =>
val (batch, remaining) = todo.splitAt(500)
todo = remaining
if (remaining.nonEmpty)
debounce()
import searchService.workerEC
log.debug(s"Indexing ${batch.size} files")
Future.sequence(batch.map {
case (_, f) =>
if (!f.exists()) Future.successful(f -> Nil)
else searchService.extractSymbolsFromClassOrJar(f).map(f -> )
}).onComplete {
case Failure(t) =>
log.error(s"failed to index batch of ${batch.size} files", t)
case Success(indexed) =>
searchService.delete(indexed.map(_._1)(collection.breakOut)).onComplete {
case Failure(t) => log.error(s"failed to remove stale entries in ${batch.size} files", t)
case Success(_) => indexed.collect {
case (file, syms) if syms.isEmpty =>
case (file, syms) =>
searchService.persist(FileCheck(file), syms, commitIndex = true).onComplete {
case Failure(t) => log.error(s"failed to persist entries in $file", t)
case Success(_) =>
}
}
}
}
}
}
| j-mckitrick/ensime-sbt | src/sbt-test/ensime-sbt/ensime-server/core/src/main/scala/org/ensime/indexer/SearchService.scala | Scala | apache-2.0 | 11,496 |
package reactivemongo.api.bson.collection
import reactivemongo.api.{
CollectionMetaCommands,
DB,
FailoverStrategy,
ReadPreference,
Serialization
}
import reactivemongo.api.collections.GenericCollection
/**
* A Collection that interacts with the BSON library.
*/
private[reactivemongo] final class CollectionImpl(
val db: DB,
val name: String,
val failoverStrategy: FailoverStrategy,
override val readPreference: ReadPreference) extends GenericCollection[Serialization.Pack] with CollectionMetaCommands { self =>
val pack: Serialization.Pack = Serialization.internalSerializationPack
def withReadPreference(pref: ReadPreference): Serialization.DefaultCollection = new CollectionImpl(db, name, failoverStrategy, pref)
}
| ReactiveMongo/ReactiveMongo | driver/src/main/scala-2.13-/api/bson/collection/CollectionImpl.scala | Scala | apache-2.0 | 747 |
package org.kimbasoft.akka.dispatcher
import akka.actor.{Actor, Props}
import org.kimbasoft.akka.dispatcher.DispatcherActor.Exceptions.IllegalRequestException
import org.kimbasoft.akka.dispatcher.DispatcherActor.Messages.{DispatcherResponse, DispatcherRequest}
import scala.util.{Failure, Try}
/**
* Missing documentation.
*
* @author <a href="steffen.krause@soabridge.com">Steffen Krause</a>
* @since 1.0
*/
class DispatcherActor extends Actor {
val dispatcher = context.dispatcher
val name = self.path.name
def receive: Receive = {
case DispatcherRequest(message) =>
Thread.sleep(1500)
println(s"$name [$dispatcher]: $message")
case _ =>
sender ! DispatcherResponse(Failure(IllegalRequestException))
}
}
object DispatcherActor {
val props = Props[DispatcherActor]
object Exceptions {
case object IllegalRequestException extends RuntimeException
}
object Messages {
case class DispatcherRequest(message: String)
case class DispatcherResponse(response: Try[String])
}
}
| kimba74/sandbox-scala | src/main/scala/org/kimbasoft/akka/dispatcher/DispatcherActor.scala | Scala | gpl-3.0 | 1,042 |
package com.stovokor.editor.state
import com.jme3.app.state.AppStateManager
import com.jme3.app.Application
import com.jme3.math.Vector3f
import com.jme3.input.controls.ActionListener
import com.jme3.input.KeyInput
import com.jme3.input.controls.KeyTrigger
import com.simsilica.lemur.input.AnalogFunctionListener
import com.simsilica.lemur.input.StateFunctionListener
import com.simsilica.lemur.input.FunctionId
import com.simsilica.lemur.input.InputState
import com.stovokor.editor.input.InputFunction
import com.jme3.input.InputManager
import com.jme3.input.FlyByCamera
import com.jme3.input.CameraInput
import com.jme3.input.controls.MouseAxisTrigger
import com.jme3.input.controls.MouseButtonTrigger
import com.jme3.input.MouseInput
import com.jme3.light.AmbientLight
import com.jme3.math.ColorRGBA
import com.jme3.post.filters.FogFilter
import com.jme3.post.ssao.SSAOFilter
import com.jme3.post.FilterPostProcessor
import com.stovokor.util.EditorEventListener
import com.stovokor.util.ToggleEffects
import com.stovokor.util.EditorEvent
import com.stovokor.util.EventBus
class EffectsState extends BaseState
with CanMapInput
with AnalogFunctionListener
with StateFunctionListener
with EditorEventListener {
var fpp: FilterPostProcessor = null
var effectsEnabled = false
override def initialize(stateManager: AppStateManager, simpleApp: Application) {
super.initialize(stateManager, simpleApp)
fpp = new FilterPostProcessor(assetManager)
fpp.addFilter(new SSAOFilter(4, 1.2f, 0.2f, 0.1f))
fpp.addFilter(new FogFilter(ColorRGBA.Black, 2f, 100))
EventBus.subscribe(this, ToggleEffects())
setupInput
setEffectsEnabled(true)
}
override def cleanup() {
EventBus.removeFromAll(this)
setEffectsEnabled(false)
}
def onEvent(event: EditorEvent) = event match {
case ToggleEffects() => setEffectsEnabled(!effectsEnabled)
case _ =>
}
def setEffectsEnabled(value: Boolean) {
effectsEnabled = value
if (value) {
app.getViewPort.addProcessor(fpp)
} else {
if (fpp != null) app.getViewPort.removeProcessor(fpp)
}
}
override def update(tpf: Float) {
}
def setupInput() = {
}
def valueActive(func: FunctionId, value: Double, tpf: Double) {
}
def valueChanged(func: FunctionId, value: InputState, tpf: Double) {
}
} | jcfandino/leveleditor | src/main/scala/com/stovokor/editor/state/EffectsState.scala | Scala | bsd-3-clause | 2,352 |
package akka.http.extensions.utils
import scala.collection.immutable.Map
object BiMap {
def apply[A,B](elems:(A,B)*): BiMap[A, B] ={
val mp = Map(elems:_*)
val inv = Map(elems.map{case (key,value)=>value->key}:_*)
DirectBiMap[A,B](mp,inv)
}
def empty[A,B]: BiMap[A, B] = DirectBiMap(Map.empty[A,B],Map.empty[B,A])
def apply[A,B](forward:Map[A,B], backward: Map[B,A]): BiMap[A,B] = DirectBiMap(forward, backward)
def unapply[A,B](mp: BiMap[A,B]): Option[(Map[A,B],Map[B,A])] = Some((mp.forward, mp.backward))
case class DirectBiMap[A,B](forward: Map[A,B], backward: Map[B,A]) extends BiMap[A,B]{
lazy val inverse:BiMap[B,A] = InverseBiMap(this)
}
case class InverseBiMap[A,B](inverse: BiMap[B,A]) extends BiMap[A,B]{
override def forward: Map[A, B] = inverse.backward
override def backward: Map[B, A] = inverse.forward
}
}
/**
* This allows a bi-directional map to be created from any two maps.
* These maps must be the inverse of each other to work.
*/
trait BiMap[A,B] extends Map[A,B] {
self=>
def forward:Map[A,B]
def backward:Map[B,A]
def inverse: BiMap[B,A]
override def +[B1 >: B](kv: (A, B1)): BiMap[A,B1] = {
val binv = backward.toMap[B1,A] + (kv._2, kv._1).asInstanceOf[(B1,A)] //does not compile without this
BiMap[A,B1](self.forward + kv, binv)
}
override def get(key: A): Option[B] = forward.get(key)
override def iterator: Iterator[(A, B)] = forward.iterator
def containsValue(value: B): Boolean = inverse.contains(value)
override def -(key: A): BiMap[A, B] = self.forward.get(key) match {
case Some(value)=>
val b = self.backward-value
BiMap[A,B](self.forward -key, b)
case None=> this
}
}
| denigma/akka-http-extensions | extensions/src/main/scala/akka/http/extensions/utils/BiMap.scala | Scala | mpl-2.0 | 1,716 |
package com.github.ldaniels528.trifecta.sjs.services
import io.scalajs.npm.angularjs.Service
import io.scalajs.npm.angularjs.http.{Http, HttpResponse}
import scala.scalajs.js
/**
* Configuration Service
* @author lawrence.daniels@gmail.com
*/
class ConfigService($http: Http) extends Service {
/**
* Retrieves the current configuration from the server
* @return an HTTP response containing the [[js.Dictionary configuration]]
*/
def getConfig: HttpResponse[js.Dictionary[js.Any]] = {
$http.get[js.Dictionary[js.Any]]("/api/config")
}
}
| ldaniels528/trifecta | app-js/src/main/scala/com/github/ldaniels528/trifecta/sjs/services/ConfigService.scala | Scala | apache-2.0 | 569 |
package extruder.metrics.dropwizard.keyed
import extruder.cats.effect.EvalValidation
import extruder.core.Encode
import extruder.metrics.data.Metrics
import io.dropwizard.metrics5.MetricRegistry
trait DropwizardKeyedEncoder extends Encode { self: DropwizardKeyedDataSource =>
override type EncodeData = Metrics
override type OutputData = MetricRegistry
override type EncodeDefault[A] = EvalValidation[A]
}
| janstenpickle/extruder | metrics/dropwizard/src/main/scala/extruder/metrics/dropwizard/keyed/DropwizardKeyedEncoder.scala | Scala | mit | 414 |
package metermen.client.util
import java.io.PrintWriter
import java.nio.file.Paths
import com.jeff.dsl.util.Util._
import scala.collection.mutable.ListBuffer
/**
* Class to manage writing data down to a csv file.
*/
object CSVMan {
def write(name: String, results: List[(String, List[Double])]): Unit = {
val file = Paths.get(name).toFile
file.createNewFile()
file.setWritable(true)
val writer = new PrintWriter(file)
val iters = new ListBuffer[List[Double]]()
var count: Int = 0
results.foreach((x) => {
writer.write(if (count != 0) s",${x._1}" else s"${x._1}")
count += 1
iters += x._2
})
writer.write("\n")
loop(iters.head.size, (i) => {
loop(iters.length, (j) => {
writer.write(if (j == 0) s"${iters(j)(i)}" else s",${iters(j)(i)}")
})
writer.write("\n")
})
writer.flush()
writer.close()
}
}
| jregistr/Academia | CSC445-Computer-Networks/HW1/client/src/main/scala/metermen/client/util/CSVMan.scala | Scala | mit | 911 |
/**
* Copyright (C) 2009-2017 Lightbend Inc. <http://www.lightbend.com>
*/
package akka.actor
import akka.actor.Deploy.{ NoDispatcherGiven, NoMailboxGiven }
import akka.dispatch._
import akka.routing._
import scala.collection.immutable
import scala.language.existentials
import scala.reflect.ClassTag
/**
* Factory for Props instances.
*
* Props is a ActorRef configuration object, that is immutable, so it is thread safe and fully sharable.
*
* Used when creating new actors through <code>ActorSystem.actorOf</code> and <code>ActorContext.actorOf</code>.
*/
object Props extends AbstractProps {
/**
* The defaultCreator, simply throws an UnsupportedOperationException when applied, which is used when creating a Props
*/
final val defaultCreator: () ⇒ Actor = () ⇒ throw new UnsupportedOperationException("No actor creator specified!")
/**
* The defaultRoutedProps is NoRouter which is used when creating a Props
*/
final val defaultRoutedProps: RouterConfig = NoRouter
/**
* The default Deploy instance which is used when creating a Props
*/
final val defaultDeploy = Deploy()
/**
* A Props instance whose creator will create an actor that doesn't respond to any message
*/
final val empty = Props[EmptyActor]
/**
* The default Props instance, uses the settings from the Props object starting with default*.
*/
final val default = Props(defaultDeploy, classOf[CreatorFunctionConsumer], List(defaultCreator))
/**
* INTERNAL API
*
* (Not because it is so immensely complicated, only because we might remove it if no longer needed internally)
*/
private[akka] class EmptyActor extends Actor {
def receive = Actor.emptyBehavior
}
/**
* Scala API: Returns a Props that has default values except for "creator" which will be a function that creates an instance
* of the supplied type using the default constructor.
*/
def apply[T <: Actor: ClassTag](): Props = apply(defaultDeploy, implicitly[ClassTag[T]].runtimeClass, List.empty)
/**
* Scala API: Returns a Props that has default values except for "creator" which will be a function that creates an instance
* using the supplied thunk.
*
* CAVEAT: Required mailbox type cannot be detected when using anonymous mixin composition
* when creating the instance. For example, the following will not detect the need for
* `DequeBasedMessageQueueSemantics` as defined in `Stash`:
* {{{
* 'Props(new Actor with Stash { ... })
* }}}
* Instead you must create a named class that mixin the trait,
* e.g. `class MyActor extends Actor with Stash`.
*/
def apply[T <: Actor: ClassTag](creator: ⇒ T): Props =
mkProps(implicitly[ClassTag[T]].runtimeClass, () ⇒ creator)
private def mkProps(classOfActor: Class[_], ctor: () ⇒ Actor): Props =
Props(classOf[TypedCreatorFunctionConsumer], classOfActor, ctor)
/**
* Scala API: create a Props given a class and its constructor arguments.
*/
def apply(clazz: Class[_], args: Any*): Props = apply(defaultDeploy, clazz, args.toList)
}
/**
* Props is a configuration object using in creating an [[Actor]]; it is
* immutable, so it is thread-safe and fully shareable.
*
* Examples on Scala API:
* {{{
* val props = Props.empty
* val props = Props[MyActor]
* val props = Props(classOf[MyActor], arg1, arg2)
*
* val otherProps = props.withDispatcher("dispatcher-id")
* val otherProps = props.withDeploy(<deployment info>)
* }}}
*
* Examples on Java API:
* {{{
* final Props props = Props.empty();
* final Props props = Props.create(MyActor.class, arg1, arg2);
*
* final Props otherProps = props.withDispatcher("dispatcher-id");
* final Props otherProps = props.withDeploy(<deployment info>);
* }}}
*/
@SerialVersionUID(2L)
final case class Props(deploy: Deploy, clazz: Class[_], args: immutable.Seq[Any]) {
Props.validate(clazz)
// derived property, does not need to be serialized
@transient
private[this] var _producer: IndirectActorProducer = _
// derived property, does not need to be serialized
@transient
private[this] var _cachedActorClass: Class[_ <: Actor] = _
/**
* INTERNAL API
*/
private[akka] def producer: IndirectActorProducer = {
if (_producer eq null)
_producer = IndirectActorProducer(clazz, args)
_producer
}
private[this] def cachedActorClass: Class[_ <: Actor] = {
if (_cachedActorClass eq null)
_cachedActorClass = producer.actorClass
_cachedActorClass
}
// validate producer constructor signature; throws IllegalArgumentException if invalid
producer
/**
* Convenience method for extracting the dispatcher information from the
* contained [[Deploy]] instance.
*/
def dispatcher: String = deploy.dispatcher match {
case NoDispatcherGiven ⇒ Dispatchers.DefaultDispatcherId
case x ⇒ x
}
/**
* Convenience method for extracting the mailbox information from the
* contained [[Deploy]] instance.
*/
def mailbox: String = deploy.mailbox match {
case NoMailboxGiven ⇒ Mailboxes.DefaultMailboxId
case x ⇒ x
}
/**
* Convenience method for extracting the router configuration from the
* contained [[Deploy]] instance.
*/
def routerConfig: RouterConfig = deploy.routerConfig
/**
* Returns a new Props with the specified dispatcher set.
*/
def withDispatcher(d: String): Props = deploy.dispatcher match {
case NoDispatcherGiven ⇒ copy(deploy = deploy.copy(dispatcher = d))
case x ⇒ if (x == d) this else copy(deploy = deploy.copy(dispatcher = d))
}
/**
* Returns a new Props with the specified mailbox set.
*/
def withMailbox(m: String): Props = deploy.mailbox match {
case NoMailboxGiven ⇒ copy(deploy = deploy.copy(mailbox = m))
case x ⇒ if (x == m) this else copy(deploy = deploy.copy(mailbox = m))
}
/**
* Returns a new Props with the specified router config set.
*/
def withRouter(r: RouterConfig): Props = copy(deploy = deploy.copy(routerConfig = r))
/**
* Returns a new Props with the specified deployment configuration.
*/
def withDeploy(d: Deploy): Props = copy(deploy = d withFallback deploy)
/**
* Obtain an upper-bound approximation of the actor class which is going to
* be created by these Props. In other words, the actor factory method will
* produce an instance of this class or a subclass thereof. This is used by
* the actor system to select special dispatchers or mailboxes in case
* dependencies are encoded in the actor type.
*/
def actorClass(): Class[_ <: Actor] = cachedActorClass
/**
* INTERNAL API
*
* Create a new actor instance. This method is only useful when called during
* actor creation by the ActorSystem, i.e. for user-level code it can only be
* used within the implementation of [[IndirectActorProducer#produce]].
*/
private[akka] def newActor(): Actor = {
producer.produce()
}
}
| rorygraves/perf_tester | corpus/akka/akka-actor/src/main/scala/akka/actor/Props.scala | Scala | apache-2.0 | 6,968 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.