code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
package com.geishatokyo.tezcatlipoca.assign
import com.geishatokyo.tezcatlipoca.description.ClassDesc
/**
*
* User: takeshita
* Create: 12/07/01 19:10
*/
trait AssignTemplateBuilder {
def buildAssignTemplate( from : ClassDesc[_], to : ClassDesc[_]) : AssignTemplate[_,_]
}
| takezoux2/tezcatlipoca | src/main/scala/com/geishatokyo/tezcatlipoca/assign/AssignTemplateBuilder.scala | Scala | mit | 284 |
package sclib.ops
import org.scalatest.{FunSuite, Matchers}
import sclib.ops.list._
class ListOpsSuite extends FunSuite with Matchers {
test("unfoldRight") {
ListOps.unfoldRight(0) { i =>
if (i > 5) None else Some((i, i + 1))
} should be(List(0, 1, 2, 3, 4, 5))
}
test("unfoldLeft") {
ListOps.unfoldLeft(0) { i =>
if (i > 5) None else Some((i + 1, i))
} should be(List(5, 4, 3, 2, 1, 0))
}
}
| j-keck/sclib | src/test/scala/sclib/ops/ListOpsSuite.scala | Scala | mit | 432 |
package epic.sequences
import java.io._
import breeze.config.{Configuration, CommandLineParser}
import epic.ontonotes.{NerType, ConllOntoReader}
import collection.mutable.ArrayBuffer
import breeze.linalg.DenseVector
import epic.framework.{Example, ModelObjective}
import breeze.optimize._
import breeze.util.Encoder
import epic.trees.Span
import breeze.optimize.FirstOrderMinimizer.OptParams
import breeze.util.Implicits._
import com.typesafe.scalalogging.slf4j.LazyLogging
import epic.preprocess.TreebankTokenizer
import epic.corpora.CONLLSequenceReader
import epic.framework.Example
/**
*
* @author dlwh
*/
object SemiNerPipeline extends LazyLogging {
case class Params(path: File,
modelOut: File = new File("ner.model.gz"),
nfiles: Int = 100000,
iterPerEval: Int = 20,
nthreads: Int = -1,
opt: OptParams,
checkGradient: Boolean = false,
lowerCaseAndStripPunct: Boolean = false,
exclude: String = "")
def main(args: Array[String]) {
val params = CommandLineParser.readIn[Params](args)
logger.info("Command line arguments for recovery:\n" + Configuration.fromObject(params).toCommandLineString)
val excludeSet = params.exclude.split(",").map(NerType.fromString(_)).toSet
val (train, test) = {
var instances = for {
file <- params.path.listFiles take params.nfiles
doc <- ConllOntoReader.readDocuments(file)
s <- doc.sentences
} yield s.nerSegmentation
instances = instances.map(_.filterLabels(x => !excludeSet(x)))
if (params.lowerCaseAndStripPunct) {
instances = instances.map(_.filterWords(_.exists(_.isLetterOrDigit)).mapWords(_.toLowerCase))
}
instances.splitAt(instances.length * 9 / 10)
}
val gazetteer = None//Gazetteer.ner("en")
// build feature Index
val model = new SegmentationModelFactory(gazetteer = gazetteer).makeModel(train)
val obj = new ModelObjective(model, train, params.nthreads)
val cached = new CachedBatchDiffFunction(obj)
if (params.checkGradient) {
GradientTester.test(cached, obj.initialWeightVector(true), toString = {(x: Int) => model.featureIndex.get(x).toString})
}
def eval(state: FirstOrderMinimizer[DenseVector[Double], BatchDiffFunction[DenseVector[Double]]]#State) {
val crf = model.extractCRF(state.x)
println("Eval + " + (state.iter+1) + " " + SegmentationEval.eval(crf, test))
}
val finalState = params.opt.iterations(cached, obj.initialWeightVector(randomize=false)).tee(state => if ((state.iter +1) % params.iterPerEval == 0) eval(state)).take(params.opt.maxIterations).last
eval(finalState)
breeze.util.writeObject(params.modelOut, model.extractCRF(finalState.x))
}
}
object SemiConllNerPipeline extends LazyLogging {
def makeSegmentation(ex: Example[IndexedSeq[String],IndexedSeq[IndexedSeq[String]]]): Segmentation[String, String] = {
val labels = ex.label
val words = ex.features.map(_ apply 0)
assert(labels.length == words.length)
val out = new ArrayBuffer[(String, Span)]()
var start = labels.length
var i = 0
while (i < labels.length) {
val l = labels(i)
l(0) match {
case 'O' =>
if (start < i)
out += (labels(start).replaceAll(".-","").intern -> Span(start, i))
// out += ("O".intern -> Span(i, i+1))
start = i + 1
case 'B' =>
if (start < i)
out += (labels(start).replaceAll(".-","").intern -> Span(start, i))
start = i
case 'I' =>
if (start >= i) {
start = i
} else if (labels(start) != l){
out += (labels(start).replaceAll(".-","").intern -> Span(start, i))
start = i
} // else, still in a field, do nothing.
case _ =>
sys.error("weird label?!?" + l)
}
i += 1
}
if (start < i)
out += (labels(start).replaceAll(".-","").intern -> Span(start, i))
// assert(out.nonEmpty && out.last._2.end == words.length, out + " " + words + " " + labels)
Segmentation(out, words, ex.id)
}
case class Params(train: File,
test: File,
nsents: Int = 100000,
nthreads: Int = -1,
iterPerEval: Int = 20,
modelOut: File = new File("ner-conll.ser.gz"),
opt: OptParams,
checkGradient: Boolean = false)
def main(args: Array[String]) {
val params:Params = CommandLineParser.readIn[Params](args)
logger.info("Command line arguments for recovery:\n" + Configuration.fromObject(params).toCommandLineString)
val (train,test) = {
val standardTrain = CONLLSequenceReader.readTrain(new FileInputStream(params.train), params.train.getName).toIndexedSeq
val standardTest = CONLLSequenceReader.readTrain(new FileInputStream(params.test), params.train.getName).toIndexedSeq
standardTrain.take(params.nsents).map(makeSegmentation) -> standardTest.map(makeSegmentation)
}
// build feature Index
val model: SemiCRFModel[String, String] = new SegmentationModelFactory(/*, gazetteer = Gazetteer.ner("en" )*/).makeModel(train)
val obj = new ModelObjective(model, train, params.nthreads)
val cached = new CachedBatchDiffFunction(obj)
if (params.checkGradient) {
GradientTester.test(cached, obj.initialWeightVector(true), toString={(i: Int) => model.featureIndex.get(i).toString})
}
//
def eval(state: FirstOrderMinimizer[DenseVector[Double], BatchDiffFunction[DenseVector[Double]]]#State) = {
val out = new PrintWriter(new BufferedOutputStream(new FileOutputStream("weights.txt")))
Encoder.fromIndex(model.featureIndex).decode(state.x).iterator.toIndexedSeq.sortBy(-_._2.abs).takeWhile(_._2.abs > 1E-4) foreach {case (x, v) => out.println(v + "\t" + x)}
val crf: SemiCRF[String, String] = model.extractCRF(state.x)
println("Eval + " + (state.iter+1))
val stats = SegmentationEval.eval(crf, test)
println("Final: " + stats)
out.close()
stats
}
val weights = params.opt.iterations(cached, obj.initialWeightVector(randomize=false)).tee(state => if ((state.iter +1) % params.iterPerEval == 0) eval(state)).take(params.opt.maxIterations).last
val stats = eval(weights)
breeze.util.writeObject(params.modelOut, model.extractCRF(weights.x))
println(stats)
}
}
| jovilius/epic | src/main/scala/epic/sequences/SemiNERPipeline.scala | Scala | apache-2.0 | 6,560 |
package de.kaufhof.hajobs
import java.util.concurrent.CountDownLatch
import akka.actor.ActorSystem
import de.kaufhof.hajobs.testutils.CassandraSpec
import org.quartz.Scheduler
import org.scalatest.mock.MockitoSugar
import play.api.test._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Promise, Future}
import scala.language.postfixOps
class JobManagerIntegrationSpec extends CassandraSpec with DefaultAwaitTimeout with FutureAwaits with MockitoSugar {
import JobManagerIntegrationSpec._
private lazy val jobStatusRepository = new JobStatusRepository(session, jobTypes = JobManagerIntegrationSpec.TestJobTypes)
private lazy val lockRepository = new LockRepository(session, TestLockTypes)
private lazy val actorSystem = ActorSystem("JobManagerIntegrationSpec")
override protected def beforeEach(): Unit = await(lockRepository.clear())
override protected def afterAll(): Unit = actorSystem.terminate()
"JobManager locking" should {
"should prevent 2 jobs sharing the same LockType from running in parallel" in {
val cdl = new CountDownLatch(1)
val mockedScheduler = mock[Scheduler]
val manager = new JobManager(Seq(new Job1(jobStatusRepository, cdl), new Job12(jobStatusRepository)), lockRepository, jobStatusRepository, actorSystem, mockedScheduler, false)
await(manager.triggerJob(JobType1)) should be (a[Started])
eventually {
await(lockRepository.getIdForType(JobType1)) should be ('defined)
}
await(manager.triggerJob(JobType12)) should be (a[LockedStatus])
cdl.countDown()
}
}
}
object JobManagerIntegrationSpec {
class Job1(jobStatusRepository: JobStatusRepository, cdl: CountDownLatch) extends Job(JobType1, 3) {
override def run()(implicit context: JobContext): JobExecution = new JobExecution() {
private val promise = Promise[Unit]()
override val result = promise.future
Future {
cdl.await()
promise.success(())
}
override def cancel(): Unit = ()
}
}
object JobType12 extends JobType("testJob12", JobType1.lockType)
class Job12(jobStatusRepository: JobStatusRepository) extends Job(JobType12, 3) {
override def run()(implicit context: JobContext): JobExecution = new JobExecution() {
override val result = Future.successful(())
override def cancel(): Unit = ()
}
}
val TestJobTypes = JobTypes(JobType1, JobType12)
}
| bryanriddle/ha-jobs | ha-jobs-core/src/test/scala/de/kaufhof/hajobs/JobManagerIntegrationSpec.scala | Scala | apache-2.0 | 2,446 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.stream.table
import java.lang.{Boolean => JBool, Integer => JInt, Long => JLong}
import org.apache.calcite.runtime.SqlFunctions.{internalToTimestamp => toTimestamp}
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.typeutils.{GenericTypeInfo, RowTypeInfo}
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.datastream.DataStream
import org.apache.flink.streaming.api.environment.{StreamExecutionEnvironment => JExecEnv}
import org.apache.flink.streaming.api.functions.ProcessFunction
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.util.StreamingMultipleProgramsTestBase
import org.apache.flink.table.api.{TableEnvironment, TableException, TableSchema, Types}
import org.apache.flink.table.api.scala._
import org.apache.flink.table.runtime.utils.{CommonTestData, StreamITCase}
import org.apache.flink.table.sources.StreamTableSource
import org.apache.flink.table.utils._
import org.apache.flink.types.Row
import org.apache.flink.util.Collector
import org.junit.Assert._
import org.junit.Test
import scala.collection.JavaConverters._
import scala.collection.mutable
class TableSourceITCase extends StreamingMultipleProgramsTestBase {
@Test(expected = classOf[TableException])
def testInvalidDatastreamType(): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
val tEnv = TableEnvironment.getTableEnvironment(env)
val tableSource = new StreamTableSource[Row]() {
private val fieldNames: Array[String] = Array("name", "id", "value")
private val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING, Types.LONG, Types.INT)
.asInstanceOf[Array[TypeInformation[_]]]
override def getDataStream(execEnv: JExecEnv): DataStream[Row] = {
val data = List(Row.of("Mary", new JLong(1L), new JInt(1))).asJava
// return DataStream[Row] with GenericTypeInfo
execEnv.fromCollection(data, new GenericTypeInfo[Row](classOf[Row]))
}
override def getReturnType: TypeInformation[Row] = new RowTypeInfo(fieldTypes, fieldNames)
override def getTableSchema: TableSchema = new TableSchema(fieldNames, fieldTypes)
}
tEnv.registerTableSource("T", tableSource)
tEnv.scan("T")
.select('value, 'name)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
// test should fail because type info of returned DataStream does not match type return type
// info.
}
@Test
def testCsvTableSource(): Unit = {
val csvTable = CommonTestData.getCsvTableSource
StreamITCase.testResults = mutable.MutableList()
val env = StreamExecutionEnvironment.getExecutionEnvironment
val tEnv = TableEnvironment.getTableEnvironment(env)
tEnv.registerTableSource("csvTable", csvTable)
tEnv.scan("csvTable")
.where('id > 4)
.select('last, 'score * 2)
.toAppendStream[Row]
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Williams,69.0",
"Miller,13.56",
"Smith,180.2",
"Williams,4.68")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testCsvTableSourceWithFilterable(): Unit = {
StreamITCase.testResults = mutable.MutableList()
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
val tEnv = TableEnvironment.getTableEnvironment(env)
tEnv.registerTableSource(tableName, TestFilterableTableSource())
tEnv.scan(tableName)
.where("amount > 4 && price < 9")
.select("id, name")
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq("5,Record_5", "6,Record_6", "7,Record_7", "8,Record_8")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testRowtimeRowTableSource(): Unit = {
StreamITCase.testResults = mutable.MutableList()
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val tEnv = TableEnvironment.getTableEnvironment(env)
val data = Seq(
Row.of("Mary", new JLong(1L), new JInt(10)),
Row.of("Bob", new JLong(2L), new JInt(20)),
Row.of("Mary", new JLong(2L), new JInt(30)),
Row.of("Liz", new JLong(2001L), new JInt(40)))
val fieldNames = Array("name", "rtime", "amount")
val schema = new TableSchema(fieldNames, Array(Types.STRING, Types.SQL_TIMESTAMP, Types.INT))
val rowType = new RowTypeInfo(
Array(Types.STRING, Types.LONG, Types.INT).asInstanceOf[Array[TypeInformation[_]]],
fieldNames)
val tableSource = new TestTableSourceWithTime(schema, rowType, data, "rtime", null)
tEnv.registerTableSource(tableName, tableSource)
tEnv.scan(tableName)
.window(Tumble over 1.second on 'rtime as 'w)
.groupBy('name, 'w)
.select('name, 'w.start, 'amount.sum)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Mary,1970-01-01 00:00:00.0,40",
"Bob,1970-01-01 00:00:00.0,20",
"Liz,1970-01-01 00:00:02.0,40")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testProctimeRowTableSource(): Unit = {
StreamITCase.testResults = mutable.MutableList()
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val tEnv = TableEnvironment.getTableEnvironment(env)
val data = Seq(
Row.of("Mary", new JLong(1L), new JInt(10)),
Row.of("Bob", new JLong(2L), new JInt(20)),
Row.of("Mary", new JLong(2L), new JInt(30)),
Row.of("Liz", new JLong(2001L), new JInt(40)))
val fieldNames = Array("name", "rtime", "amount")
val schema = new TableSchema(
fieldNames :+ "ptime",
Array(Types.STRING, Types.LONG, Types.INT, Types.SQL_TIMESTAMP))
val rowType = new RowTypeInfo(
Array(Types.STRING, Types.LONG, Types.INT).asInstanceOf[Array[TypeInformation[_]]],
fieldNames)
val tableSource = new TestTableSourceWithTime(schema, rowType, data, null, "ptime")
tEnv.registerTableSource(tableName, tableSource)
tEnv.scan(tableName)
.where('ptime.cast(Types.LONG) > 0L)
.select('name, 'amount)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Mary,10",
"Bob,20",
"Mary,30",
"Liz,40")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testRowtimeProctimeRowTableSource(): Unit = {
StreamITCase.testResults = mutable.MutableList()
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val tEnv = TableEnvironment.getTableEnvironment(env)
val data = Seq(
Row.of("Mary", new JLong(1L), new JInt(10)),
Row.of("Bob", new JLong(2L), new JInt(20)),
Row.of("Mary", new JLong(2L), new JInt(30)),
Row.of("Liz", new JLong(2001L), new JInt(40)))
val fieldNames = Array("name", "rtime", "amount")
val schema = new TableSchema(
fieldNames :+ "ptime",
Array(Types.STRING, Types.SQL_TIMESTAMP, Types.INT, Types.SQL_TIMESTAMP))
val rowType = new RowTypeInfo(
Array(Types.STRING, Types.LONG, Types.INT).asInstanceOf[Array[TypeInformation[_]]],
fieldNames)
val tableSource = new TestTableSourceWithTime(schema, rowType, data, "rtime", "ptime")
tEnv.registerTableSource(tableName, tableSource)
tEnv.scan(tableName)
.window(Tumble over 1.second on 'rtime as 'w)
.groupBy('name, 'w)
.select('name, 'w.start, 'amount.sum)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Mary,1970-01-01 00:00:00.0,40",
"Bob,1970-01-01 00:00:00.0,20",
"Liz,1970-01-01 00:00:02.0,40")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testRowtimeAsTimestampRowTableSource(): Unit = {
StreamITCase.testResults = mutable.MutableList()
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val tEnv = TableEnvironment.getTableEnvironment(env)
val data = Seq(
Row.of("Mary", toTimestamp(1L), new JInt(10)),
Row.of("Bob", toTimestamp(2L), new JInt(20)),
Row.of("Mary", toTimestamp(2L), new JInt(30)),
Row.of("Liz", toTimestamp(2001L), new JInt(40)))
val fieldNames = Array("name", "rtime", "amount")
val schema = new TableSchema(fieldNames, Array(Types.STRING, Types.SQL_TIMESTAMP, Types.INT))
val rowType = new RowTypeInfo(
Array(Types.STRING, Types.SQL_TIMESTAMP, Types.INT).asInstanceOf[Array[TypeInformation[_]]],
fieldNames)
val tableSource = new TestTableSourceWithTime(schema, rowType, data, "rtime", null)
tEnv.registerTableSource(tableName, tableSource)
tEnv.scan(tableName)
.window(Tumble over 1.second on 'rtime as 'w)
.groupBy('name, 'w)
.select('name, 'w.start, 'amount.sum)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Mary,1970-01-01 00:00:00.0,40",
"Bob,1970-01-01 00:00:00.0,20",
"Liz,1970-01-01 00:00:02.0,40")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testRowtimeLongTableSource(): Unit = {
StreamITCase.testResults = mutable.MutableList()
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val tEnv = TableEnvironment.getTableEnvironment(env)
val data = Seq(new JLong(1L), new JLong(2L), new JLong(2L), new JLong(2001L), new JLong(4001L))
val schema = new TableSchema(Array("rtime"), Array(Types.SQL_TIMESTAMP))
val returnType = Types.LONG
val tableSource = new TestTableSourceWithTime(schema, returnType, data, "rtime", null)
tEnv.registerTableSource(tableName, tableSource)
tEnv.scan(tableName)
.window(Tumble over 1.second on 'rtime as 'w)
.groupBy('w)
.select('w.start, 1.count)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"1970-01-01 00:00:00.0,3",
"1970-01-01 00:00:02.0,1",
"1970-01-01 00:00:04.0,1")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testProctimeStringTableSource(): Unit = {
StreamITCase.testResults = mutable.MutableList()
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val tEnv = TableEnvironment.getTableEnvironment(env)
val data = Seq("Mary", "Peter", "Bob", "Liz")
val schema = new TableSchema(Array("name", "ptime"), Array(Types.STRING, Types.SQL_TIMESTAMP))
val returnType = Types.STRING
val tableSource = new TestTableSourceWithTime(schema, returnType, data, null, "ptime")
tEnv.registerTableSource(tableName, tableSource)
tEnv.scan(tableName)
.where('ptime.cast(Types.LONG) > 1)
.select('name)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq("Mary", "Peter", "Bob", "Liz")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testRowtimeProctimeLongTableSource(): Unit = {
StreamITCase.testResults = mutable.MutableList()
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val tEnv = TableEnvironment.getTableEnvironment(env)
val data = Seq(new JLong(1L), new JLong(2L), new JLong(2L), new JLong(2001L), new JLong(4001L))
val schema = new TableSchema(
Array("rtime", "ptime"),
Array(Types.SQL_TIMESTAMP, Types.SQL_TIMESTAMP))
val returnType = Types.LONG
val tableSource = new TestTableSourceWithTime(schema, returnType, data, "rtime", "ptime")
tEnv.registerTableSource(tableName, tableSource)
tEnv.scan(tableName)
.where('ptime.cast(Types.LONG) > 1)
.window(Tumble over 1.second on 'rtime as 'w)
.groupBy('w)
.select('w.start, 1.count)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"1970-01-01 00:00:00.0,3",
"1970-01-01 00:00:02.0,1",
"1970-01-01 00:00:04.0,1")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testFieldMappingTableSource(): Unit = {
StreamITCase.testResults = mutable.MutableList()
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val tEnv = TableEnvironment.getTableEnvironment(env)
val data = Seq(
Row.of("Mary", new JLong(1L), new JInt(10)),
Row.of("Bob", new JLong(2L), new JInt(20)),
Row.of("Mary", new JLong(2L), new JInt(30)),
Row.of("Liz", new JLong(2001L), new JInt(40)))
val schema = new TableSchema(
Array("ptime", "amount", "name", "rtime"),
Array(Types.SQL_TIMESTAMP, Types.INT, Types.STRING, Types.SQL_TIMESTAMP))
val returnType = new RowTypeInfo(Types.STRING, Types.LONG, Types.INT)
val mapping = Map("amount" -> "f2", "name" -> "f0", "rtime" -> "f1")
val source = new TestTableSourceWithTime(schema, returnType, data, "rtime", "ptime", mapping)
tEnv.registerTableSource(tableName, source)
tEnv.scan(tableName)
.window(Tumble over 1.second on 'rtime as 'w)
.groupBy('name, 'w)
.select('name, 'w.start, 'amount.sum)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Mary,1970-01-01 00:00:00.0,40",
"Bob,1970-01-01 00:00:00.0,20",
"Liz,1970-01-01 00:00:02.0,40")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testProjectWithoutRowtimeProctime(): Unit = {
StreamITCase.testResults = mutable.MutableList()
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val tEnv = TableEnvironment.getTableEnvironment(env)
val data = Seq(
Row.of(new JInt(1), "Mary", new JLong(10L), new JLong(1)),
Row.of(new JInt(2), "Bob", new JLong(20L), new JLong(2)),
Row.of(new JInt(3), "Mike", new JLong(30L), new JLong(2)),
Row.of(new JInt(4), "Liz", new JLong(40L), new JLong(2001)))
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.STRING, Types.LONG, Types.LONG)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "name", "val", "rtime"))
tEnv.registerTableSource(
"T",
new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime"))
tEnv.scan("T")
.select('name, 'val, 'id)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Mary,10,1",
"Bob,20,2",
"Mike,30,3",
"Liz,40,4")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testProjectWithoutProctime(): Unit = {
StreamITCase.testResults = mutable.MutableList()
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val tEnv = TableEnvironment.getTableEnvironment(env)
val data = Seq(
Row.of(new JInt(1), "Mary", new JLong(10L), new JLong(1)),
Row.of(new JInt(2), "Bob", new JLong(20L), new JLong(2)),
Row.of(new JInt(3), "Mike", new JLong(30L), new JLong(2)),
Row.of(new JInt(4), "Liz", new JLong(40L), new JLong(2001)))
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.STRING, Types.LONG, Types.LONG)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "name", "val", "rtime"))
tEnv.registerTableSource(
"T",
new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime"))
tEnv.scan("T")
.select('rtime, 'name, 'id)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"1970-01-01 00:00:00.001,Mary,1",
"1970-01-01 00:00:00.002,Bob,2",
"1970-01-01 00:00:00.002,Mike,3",
"1970-01-01 00:00:02.001,Liz,4")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testProjectWithoutRowtime(): Unit = {
StreamITCase.testResults = mutable.MutableList()
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val tEnv = TableEnvironment.getTableEnvironment(env)
val data = Seq(
Row.of(new JInt(1), "Mary", new JLong(10L), new JLong(1)),
Row.of(new JInt(2), "Bob", new JLong(20L), new JLong(2)),
Row.of(new JInt(3), "Mike", new JLong(30L), new JLong(2)),
Row.of(new JInt(4), "Liz", new JLong(40L), new JLong(2001)))
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.STRING, Types.LONG, Types.LONG)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "name", "val", "rtime"))
tEnv.registerTableSource(
"T",
new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime"))
tEnv.scan("T")
.filter('ptime.cast(Types.LONG) > 0)
.select('name, 'id)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Mary,1",
"Bob,2",
"Mike,3",
"Liz,4")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
def testProjectOnlyProctime(): Unit = {
StreamITCase.testResults = mutable.MutableList()
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val tEnv = TableEnvironment.getTableEnvironment(env)
val data = Seq(
Row.of(new JInt(1), new JLong(1), new JLong(10L), "Mary"),
Row.of(new JInt(2), new JLong(2L), new JLong(20L), "Bob"),
Row.of(new JInt(3), new JLong(2L), new JLong(30L), "Mike"),
Row.of(new JInt(4), new JLong(2001L), new JLong(30L), "Liz"))
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.LONG, Types.LONG, Types.STRING)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "rtime", "val", "name"))
tEnv.registerTableSource(
"T",
new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime"))
tEnv.scan("T")
.select('ptime > 0)
.select(1.count)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq("4")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
def testProjectOnlyRowtime(): Unit = {
StreamITCase.testResults = mutable.MutableList()
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val tEnv = TableEnvironment.getTableEnvironment(env)
val data = Seq(
Row.of(new JInt(1), new JLong(1), new JLong(10L), "Mary"),
Row.of(new JInt(2), new JLong(2L), new JLong(20L), "Bob"),
Row.of(new JInt(3), new JLong(2L), new JLong(30L), "Mike"),
Row.of(new JInt(4), new JLong(2001L), new JLong(30L), "Liz"))
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.INT, Types.LONG, Types.LONG, Types.STRING)
.asInstanceOf[Array[TypeInformation[_]]],
Array("id", "rtime", "val", "name"))
tEnv.registerTableSource(
"T",
new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime"))
tEnv.scan("T")
.select('rtime)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"1970-01-01 00:00:00.001",
"1970-01-01 00:00:00.002",
"1970-01-01 00:00:00.002",
"1970-01-01 00:00:02.001")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testProjectWithMapping(): Unit = {
StreamITCase.testResults = mutable.MutableList()
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val tEnv = TableEnvironment.getTableEnvironment(env)
val data = Seq(
Row.of(new JLong(1), new JInt(1), "Mary", new JLong(10)),
Row.of(new JLong(2), new JInt(2), "Bob", new JLong(20)),
Row.of(new JLong(2), new JInt(3), "Mike", new JLong(30)),
Row.of(new JLong(2001), new JInt(4), "Liz", new JLong(40)))
val tableSchema = new TableSchema(
Array("id", "rtime", "val", "ptime", "name"),
Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.LONG, Types.INT, Types.STRING, Types.LONG)
.asInstanceOf[Array[TypeInformation[_]]],
Array("p-rtime", "p-id", "p-name", "p-val"))
val mapping = Map("rtime" -> "p-rtime", "id" -> "p-id", "val" -> "p-val", "name" -> "p-name")
tEnv.registerTableSource(
"T",
new TestProjectableTableSource(tableSchema, returnType, data, "rtime", "ptime", mapping))
tEnv.scan("T")
.select('name, 'rtime, 'val)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"Mary,1970-01-01 00:00:00.001,10",
"Bob,1970-01-01 00:00:00.002,20",
"Mike,1970-01-01 00:00:00.002,30",
"Liz,1970-01-01 00:00:02.001,40")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testNestedProject(): Unit = {
StreamITCase.testResults = mutable.MutableList()
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val tEnv = TableEnvironment.getTableEnvironment(env)
val data = Seq(
Row.of(new JLong(1),
Row.of(
Row.of("Sarah", new JInt(100)),
Row.of(new JInt(1000), new JBool(true))
),
Row.of("Peter", new JInt(10000)),
"Mary"),
Row.of(new JLong(2),
Row.of(
Row.of("Rob", new JInt(200)),
Row.of(new JInt(2000), new JBool(false))
),
Row.of("Lucy", new JInt(20000)),
"Bob"),
Row.of(new JLong(3),
Row.of(
Row.of("Mike", new JInt(300)),
Row.of(new JInt(3000), new JBool(true))
),
Row.of("Betty", new JInt(30000)),
"Liz"))
val nested1 = new RowTypeInfo(
Array(Types.STRING, Types.INT).asInstanceOf[Array[TypeInformation[_]]],
Array("name", "value")
)
val nested2 = new RowTypeInfo(
Array(Types.INT, Types.BOOLEAN).asInstanceOf[Array[TypeInformation[_]]],
Array("num", "flag")
)
val deepNested = new RowTypeInfo(
Array(nested1, nested2).asInstanceOf[Array[TypeInformation[_]]],
Array("nested1", "nested2")
)
val tableSchema = new TableSchema(
Array("id", "deepNested", "nested", "name"),
Array(Types.LONG, deepNested, nested1, Types.STRING))
val returnType = new RowTypeInfo(
Array(Types.LONG, deepNested, nested1, Types.STRING).asInstanceOf[Array[TypeInformation[_]]],
Array("id", "deepNested", "nested", "name"))
tEnv.registerTableSource(
"T",
new TestNestedProjectableTableSource(tableSchema, returnType, data))
tEnv
.scan("T")
.select('id,
'deepNested.get("nested1").get("name") as 'nestedName,
'nested.get("value") as 'nestedValue,
'deepNested.get("nested2").get("flag") as 'nestedFlag,
'deepNested.get("nested2").get("num") as 'nestedNum)
.addSink(new StreamITCase.StringSink[Row])
env.execute()
val expected = Seq(
"1,Sarah,10000,true,1000",
"2,Rob,20000,false,2000",
"3,Mike,30000,true,3000")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
@Test
def testRowtimeTableSourcePreserveWatermarks(): Unit = {
StreamITCase.testResults = mutable.MutableList()
val tableName = "MyTable"
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val tEnv = TableEnvironment.getTableEnvironment(env)
// rows with timestamps and watermarks
val data = Seq(
Right(1L),
Left(5L, Row.of(new JInt(1), new JLong(5), "A")),
Left(2L, Row.of(new JInt(2), new JLong(1), "B")),
Right(10L),
Left(8L, Row.of(new JInt(6), new JLong(8), "C")),
Right(20L),
Left(21L, Row.of(new JInt(6), new JLong(21), "D")),
Right(30L)
)
val fieldNames = Array("id", "rtime", "name")
val schema = new TableSchema(fieldNames, Array(Types.INT, Types.SQL_TIMESTAMP, Types.STRING))
val rowType = new RowTypeInfo(
Array(Types.INT, Types.LONG, Types.STRING).asInstanceOf[Array[TypeInformation[_]]],
fieldNames)
val tableSource = new TestPreserveWMTableSource(schema, rowType, data, "rtime")
tEnv.registerTableSource(tableName, tableSource)
tEnv.scan(tableName)
.where('rtime.cast(Types.LONG) > 3L)
.select('id, 'name)
.toAppendStream[Row]
// append current watermark to each row to verify that original watermarks were preserved
.process(new ProcessFunction[Row, (Row, Long)] {
override def processElement(
value: Row,
ctx: ProcessFunction[Row, (Row, Long)]#Context,
out: Collector[(Row, Long)]): Unit = {
out.collect(value, ctx.timerService().currentWatermark())
}
})
.addSink(new StreamITCase.StringSink[(Row, Long)])
env.execute()
val expected = Seq("(1,A,1)", "(6,C,10)", "(6,D,20)")
assertEquals(expected.sorted, StreamITCase.testResults.sorted)
}
}
| zimmermatt/flink | flink-libraries/flink-table/src/test/scala/org/apache/flink/table/runtime/stream/table/TableSourceITCase.scala | Scala | apache-2.0 | 27,793 |
/*
* Copyright (C) 2010 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.tool.cache
import scala.collection.mutable.{ HashMap, WeakHashMap }
class AssociativeCache[K, T] {
val cacheMaps = new WeakHashMap[Object, HashMap[K, T]]
def invalidateCache(cacheAssociation: Object, key: K) = cacheMaps.synchronized {
for { cache ← cacheMaps.get(cacheAssociation) } {
cache -= key
if (cache.isEmpty) cacheMaps.remove(cacheAssociation)
}
}
def cached(cacheAssociation: Object, key: K): Option[T] = cacheMaps.synchronized {
cacheMaps.get(cacheAssociation) match {
case None ⇒ None
case Some(map) ⇒ map.synchronized { map.get(key) }
}
}
def cache(cacheAssociation: Object, key: K, preCompute: Boolean = true)(cacheable: K ⇒ T): T = {
def cache = {
val computedCache = if (preCompute) Some(cacheable(key)) else None
cacheMaps.synchronized {
def cacheMap(cacheAssociation: Object): HashMap[K, T] =
cacheMaps.getOrElseUpdate(cacheAssociation, new HashMap[K, T])
val cache = cacheMap(cacheAssociation)
cache.getOrElseUpdate(key, computedCache.getOrElse(cacheable(key)))
}
}
cached(cacheAssociation, key).getOrElse(cache)
}
}
| openmole/openmole | openmole/third-parties/org.openmole.tool.cache/src/main/scala/org/openmole/tool/cache/AssociativeCache.scala | Scala | agpl-3.0 | 1,912 |
/*
* Copyright 2015-2020 Noel Welsh
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package doodle
package java2d
package effect
import cats.effect.IO
import doodle.core.{BoundingBox, Normalized, Transform}
import doodle.java2d.algebra.{Algebra, Java2D}
import doodle.java2d.algebra.reified.Reified
import java.awt.{Dimension, Graphics, Graphics2D}
import java.util.concurrent.{LinkedBlockingQueue, TimeUnit}
import javax.swing.{JPanel, SwingUtilities}
import scala.collection.mutable.ArrayBuffer
final class Java2DPanel(frame: Frame) extends JPanel {
import Java2DPanel.RenderRequest
/**
* The channel communicates between the Swing thread and outside threads
*/
private val channel: LinkedBlockingQueue[RenderRequest[_]] =
new LinkedBlockingQueue(1)
/**
* The pictures we've rendered, along with the bounding box for each picture.
* Ordered so the last element is the most recent picture (which should be
* rendered last).
*
* Default size is 1 as the most common case is being asked to render only one
* picture.
*
* As an optimization with check the [[Redraw]] property of the [[Frame]], and
* if we use an opaque color to redraw we only keep the last element around.
* See [[opaqueRedraw]].
*/
private val pictures: ArrayBuffer[(BoundingBox, List[Reified])] =
new ArrayBuffer(1)
/**
* True if the redraw is an opaque color and hence we don't need to keep
* earlier pictures around.
*/
private val opaqueRedraw =
frame.redraw match {
case Redraw.ClearToBackground =>
frame.background match {
case None => true
case Some(c) => c.alpha == Normalized.MaxValue
}
case Redraw.ClearToColor(c) =>
c.alpha == Normalized.MaxValue
}
def resize(width: Double, height: Double): Unit = {
setPreferredSize(new Dimension(width.toInt, height.toInt))
SwingUtilities.windowForComponent(this).pack()
}
def render[A](request: RenderRequest[A]): Unit = {
channel.put(request)
// println("Java2DPanel put in the channel")
this.repaint()
// println("Java2DPanel repaint request sent")
}
/**
* Draw all images this [[Java2DPanel]] has received. We assume the Graphics2D
* parameter has already been setup.
*/
def draw(gc: Graphics2D): Unit = {
// Clear to background
frame.background.foreach{ c =>
gc.setColor(Java2D.toAwtColor(c))
gc.fillRect(0, 0, getWidth(), getHeight())
}
pictures.size match {
case 0 =>
// Nothing to do
()
case 1 =>
val (bb, reified) = pictures(0)
val tx = Java2d.transform(
bb,
getWidth.toDouble,
getHeight.toDouble,
frame.center
)
Java2d.render(gc, reified, tx)
case _ =>
val (bb, reified) = pictures(0)
val tx = Java2d.transform(
bb,
getWidth.toDouble,
getHeight.toDouble,
frame.center
)
Java2d.render(gc, reified, tx)
// Draw remaining images, redrawing *before* each image
var i = 0
while(i < pictures.size) {
frame.redraw match {
case Redraw.ClearToBackground =>
frame.background.foreach{ c =>
gc.setColor(Java2D.toAwtColor(c))
gc.fillRect(0, 0, getWidth(), getHeight())
}
case Redraw.ClearToColor(c) =>
gc.setColor(Java2D.toAwtColor(c))
gc.fillRect(0, 0, getWidth(), getHeight())
}
val (bb, reified) = pictures(i)
val tx = Java2d.transform(
bb,
getWidth.toDouble,
getHeight.toDouble,
frame.center
)
Java2d.render(gc, reified, tx)
i = i + 1
}
}
}
override def paintComponent(context: Graphics): Unit = {
// println("Java2DPanel painting")
val gc = context.asInstanceOf[Graphics2D]
Java2d.setup(gc)
val algebra = Algebra(gc)
val rr = channel.poll(10L, TimeUnit.MILLISECONDS)
if (rr == null) ()
else {
val result = rr.render(algebra).unsafeRunSync()
val bb = result.boundingBox
val picture = result.reified
resize(result.width, result.height)
if(opaqueRedraw && pictures.size > 0)
pictures.update(0, (bb, picture))
else
pictures += ((bb, picture))
}
draw(gc)
}
}
object Java2DPanel {
final case class RenderResult[A](
boundingBox: BoundingBox,
width: Double,
height: Double,
reified: List[Reified],
value: A
)
final case class RenderRequest[A](
picture: Picture[A],
frame: Frame,
cb: Either[Throwable, RenderResult[A]] => Unit
) {
def render(algebra: Algebra): IO[RenderResult[A]] = {
IO {
val drawing = picture(algebra)
val (bb, rdr) = drawing.runA(List.empty).value
val (w, h) = Java2d.size(bb, frame.size)
val (_, fa) = rdr.run(Transform.identity).value
val (reified, a) = fa.run.value
val result = RenderResult(bb, w, h, reified, a)
cb(Right(result))
result
}
}
}
}
| underscoreio/doodle | java2d/src/main/scala/doodle/java2d/effect/Java2DPanel.scala | Scala | apache-2.0 | 5,705 |
/*
* Copyright (c) 2010 Thorsten Berger <berger@informatik.uni-leipzig.de>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package gsd.buildanalysis.linux.test
import org.scalatest.FunSuite
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class FuzzyParserTest extends FunSuite{
test("placeholder"){
// to be implemented...
assert( true )
}
} | ckaestne/KBuildMiner | src/test/scala/gsd/buildanalysis/linux/test/FuzzyParserTest.scala | Scala | gpl-3.0 | 1,017 |
package no.skytteren.elasticala.index
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import scala.concurrent.Promise
import org.elasticsearch.action.ActionListener
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequestBuilder
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse
import org.elasticsearch.client.{ Client => EsClient }
import no.skytteren.elasticala.Executor
import no.skytteren.elasticala.Index
import no.skytteren.elasticala.Request
import no.skytteren.elasticala.Response
case class ExistsIndexRequest(index: Index) extends Request
case class ExistsIndexResponse(exists: Boolean) extends Response
class ExistsIndexExecutor extends Executor[ExistsIndexRequest, ExistsIndexResponse] {
def execute(req: ExistsIndexRequest, client: EsClient)(implicit ec: ExecutionContext): Future[ExistsIndexResponse] = {
val promise = Promise[ExistsIndexResponse]()
val listener = new ActionListener[IndicesExistsResponse]{
override def onResponse(response: IndicesExistsResponse): Unit = {
promise.success(ExistsIndexResponse(response.isExists()))
}
def onFailure(e: Throwable): Unit = promise.failure(e)
}
client.admin().indices().prepareExists(req.index.value).execute(listener)
promise.future
}
} | skytteren/elasticala | src/main/scala/no/skytteren/elasticala/index/Exists.scala | Scala | apache-2.0 | 1,336 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.api
import slamdata.Predef._
import quasar.contrib.pathy._
import org.http4s.dsl.{Path => HPath}
import pathy.Path._
import pathy.scalacheck.PathyArbitrary._
class AsPathSpec extends quasar.Qspec {
"AsPath" should {
"decode any Path we can throw at it" >> {
"AbsFile" >> prop { file: AFile =>
val httpPath = HPath(UriPathCodec.printPath(file))
AsFilePath.unapply(httpPath) must_== Some(file)
}
"AbsDir" >> prop { dir : ADir =>
val httpPath = HPath(UriPathCodec.printPath(dir))
AsDirPath.unapply(httpPath) must_== Some(dir)
}
}
"decode root" in {
val httpPath = HPath("/")
AsDirPath.unapply(httpPath) must_== Some(rootDir)
}
"decode escaped /" in {
val httpPath = HPath("/foo%2Fbar/baz/")
AsDirPath.unapply(httpPath) must beSome(rootDir </> dir("foo/bar") </> dir("baz"))
}
}
}
| drostron/quasar | web/src/test/scala/quasar/api/AsPathSpec.scala | Scala | apache-2.0 | 1,510 |
package coursier.graph
import coursier.core.{Module, Parse, Resolution, Version, VersionConstraint, VersionInterval}
import coursier.util.Print.Colors
import coursier.util.{Print, Tree}
import coursier.util.Print.compatibleVersions
import dataclass.data
@data class Conflict(
module: Module,
version: String,
wantedVersion: String,
wasExcluded: Boolean,
dependeeModule: Module,
dependeeVersion: String
) {
def repr: String =
// FIXME Say something about wasExcluded?
s"$module:$version selected, but $dependeeModule:$dependeeVersion wanted $wantedVersion"
}
object Conflict {
@data class Conflicted(tree: ReverseModuleTree) {
def conflict: Conflict =
Conflict(
tree.dependsOnModule,
tree.dependsOnReconciledVersion,
tree.dependsOnVersion,
tree.excludedDependsOn,
tree.module,
tree.reconciledVersion
)
def repr: String = {
val colors0 = Colors.get(coursier.core.compatibility.coloredOutput)
val tree0 = Tree(
Seq(tree).toVector.sortBy(t =>
(t.module.organization.value, t.module.name.value, t.module.nameWithAttributes)
)
)(_.dependees)
val treeRepr = tree0.render { node =>
if (node.excludedDependsOn)
s"${colors0.yellow}(excluded by)${colors0.reset} ${node.module}:${node.reconciledVersion}"
else if (node.dependsOnVersion != node.dependsOnReconciledVersion) {
val assumeCompatibleVersions =
compatibleVersions(node.dependsOnVersion, node.dependsOnReconciledVersion)
s"${node.module}:${node.reconciledVersion} " +
(if (assumeCompatibleVersions) colors0.yellow else colors0.red) +
s"wants ${node.dependsOnModule}:${node.dependsOnVersion}" +
colors0.reset
}
else
s"${node.module}:${node.reconciledVersion}"
}
val assumeCompatibleVersions =
Print.compatibleVersions(tree.dependsOnVersion, tree.dependsOnReconciledVersion)
System.lineSeparator() + s"${tree.dependsOnModule.repr}:" +
s"${if (assumeCompatibleVersions) colors0.yellow
else colors0.red}${tree.dependsOnReconciledVersion}${colors0.reset} " +
s"(${tree.dependsOnVersion} wanted)" + System.lineSeparator() + treeRepr
}
}
def conflicted(
resolution: Resolution,
withExclusions: Boolean = false,
semVer: Boolean = false
): Seq[Conflicted] = {
val tree = ReverseModuleTree(resolution, withExclusions = withExclusions)
def compatible(wanted: String, selected: String): Boolean =
wanted == selected || {
val c = Parse.versionConstraint(wanted)
val v = Version(selected)
if (c.interval == VersionInterval.zero) {
if (semVer)
c.preferred.exists(_.items.take(2) == v.items.take(2))
else
c.preferred.contains(v)
}
else
c.interval.contains(v)
}
val transitive = tree.flatMap { t =>
t.dependees.collect {
case d
if !d.excludedDependsOn &&
!compatible(d.dependsOnReconciledVersion, d.dependsOnVersion) =>
Conflicted(d)
}
}
val fromRoots = resolution.rootDependencies.flatMap { dep =>
val version = resolution
.reconciledVersions
.getOrElse(dep.module, dep.version)
val matches = compatible(dep.version, version)
if (matches)
Nil
else {
val node = ReverseModuleTree.Node(
dep.module,
dep.version,
dep.version,
dep.module,
dep.version,
version,
excludedDependsOn = false,
Map.empty,
Map.empty
)
Seq(Conflicted(node))
}
}
fromRoots ++ transitive
}
def apply(
resolution: Resolution,
withExclusions: Boolean = false,
semVer: Boolean = false
): Seq[Conflict] =
conflicted(resolution, withExclusions, semVer)
.map(_.conflict)
}
| alexarchambault/coursier | modules/core/shared/src/main/scala/coursier/graph/Conflict.scala | Scala | apache-2.0 | 4,005 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.fuberlin.wiwiss.silk.plugins.transformer.substring
import de.fuberlin.wiwiss.silk.linkagerule.input.SimpleTransformer
import de.fuberlin.wiwiss.silk.runtime.plugin.Plugin
/**
* Returns a substring between 'beginIndex' (inclusive) and 'endIndex' (exclusive).
*
* @author Robert Isele
*/
@Plugin(
id = "substring",
categories = Array("Substring"),
label = "Substring",
description =
"Returns a substring between 'beginIndex' (inclusive) and 'endIndex' (exclusive)." +
"If 'endIndex' is 0 (default), it is ignored and the entire string beginning with 'beginIndex' is returned."
)
case class SubstringTransformer(beginIndex: Int = 0, endIndex: Int = 0) extends SimpleTransformer {
override def evaluate(value: String) = {
if(endIndex == 0)
value.substring(beginIndex)
else
value.substring(beginIndex, endIndex)
}
}
| fusepoolP3/p3-silk | silk-core/src/main/scala/de/fuberlin/wiwiss/silk/plugins/transformer/substring/SubstringTransformer.scala | Scala | apache-2.0 | 1,434 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.evaluation
import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.mllib.util.TestingUtils._
class BinaryClassificationMetricsSuite extends SparkFunSuite with MLlibTestSparkContext {
private def assertSequencesMatch(actual: Seq[Double], expected: Seq[Double]): Unit = {
actual.zip(expected).foreach { case (a, e) => assert(a ~== e absTol 1.0e-5) }
}
private def assertTupleSequencesMatch(actual: Seq[(Double, Double)],
expected: Seq[(Double, Double)]): Unit = {
actual.zip(expected).foreach { case ((ax, ay), (ex, ey)) =>
assert(ax ~== ex absTol 1.0e-5)
assert(ay ~== ey absTol 1.0e-5)
}
}
private def validateMetrics(metrics: BinaryClassificationMetrics,
expectedThresholds: Seq[Double],
expectedROCCurve: Seq[(Double, Double)],
expectedPRCurve: Seq[(Double, Double)],
expectedFMeasures1: Seq[Double],
expectedFmeasures2: Seq[Double],
expectedPrecisions: Seq[Double],
expectedRecalls: Seq[Double]): Unit = {
assertSequencesMatch(metrics.thresholds().collect(), expectedThresholds)
assertTupleSequencesMatch(metrics.roc().collect(), expectedROCCurve)
assert(metrics.areaUnderROC() ~== AreaUnderCurve.of(expectedROCCurve) absTol 1E-5)
assertTupleSequencesMatch(metrics.pr().collect(), expectedPRCurve)
assert(metrics.areaUnderPR() ~== AreaUnderCurve.of(expectedPRCurve) absTol 1E-5)
assertTupleSequencesMatch(metrics.fMeasureByThreshold().collect(),
expectedThresholds.zip(expectedFMeasures1))
assertTupleSequencesMatch(metrics.fMeasureByThreshold(2.0).collect(),
expectedThresholds.zip(expectedFmeasures2))
assertTupleSequencesMatch(metrics.precisionByThreshold().collect(),
expectedThresholds.zip(expectedPrecisions))
assertTupleSequencesMatch(metrics.recallByThreshold().collect(),
expectedThresholds.zip(expectedRecalls))
}
test("binary evaluation metrics") {
val scoreAndLabels = sc.parallelize(
Seq((0.1, 0.0), (0.1, 1.0), (0.4, 0.0), (0.6, 0.0), (0.6, 1.0), (0.6, 1.0), (0.8, 1.0)), 2)
val metrics = new BinaryClassificationMetrics(scoreAndLabels)
val thresholds = Seq(0.8, 0.6, 0.4, 0.1)
val numTruePositives = Seq(1, 3, 3, 4)
val numFalsePositives = Seq(0, 1, 2, 3)
val numPositives = 4
val numNegatives = 3
val precisions = numTruePositives.zip(numFalsePositives).map { case (t, f) =>
t.toDouble / (t + f)
}
val recalls = numTruePositives.map(t => t.toDouble / numPositives)
val fpr = numFalsePositives.map(f => f.toDouble / numNegatives)
val rocCurve = Seq((0.0, 0.0)) ++ fpr.zip(recalls) ++ Seq((1.0, 1.0))
val pr = recalls.zip(precisions)
val prCurve = Seq((0.0, 1.0)) ++ pr
val f1 = pr.map { case (r, p) => 2.0 * (p * r) / (p + r)}
val f2 = pr.map { case (r, p) => 5.0 * (p * r) / (4.0 * p + r)}
validateMetrics(metrics, thresholds, rocCurve, prCurve, f1, f2, precisions, recalls)
}
test("binary evaluation metrics with weights") {
val w1 = 1.5
val w2 = 0.7
val w3 = 0.4
val scoreAndLabelsWithWeights = sc.parallelize(
Seq((0.1, 0.0, w1), (0.1, 1.0, w2), (0.4, 0.0, w1), (0.6, 0.0, w3),
(0.6, 1.0, w2), (0.6, 1.0, w2), (0.8, 1.0, w1)), 2)
val metrics = new BinaryClassificationMetrics(scoreAndLabelsWithWeights, 0)
val thresholds = Seq(0.8, 0.6, 0.4, 0.1)
val numTruePositives =
Seq(1 * w1, 1 * w1 + 2 * w2, 1 * w1 + 2 * w2, 3 * w2 + 1 * w1)
val numFalsePositives = Seq(0.0, 1.0 * w3, 1.0 * w1 + 1.0 * w3, 1.0 * w3 + 2.0 * w1)
val numPositives = 3 * w2 + 1 * w1
val numNegatives = 2 * w1 + w3
val precisions = numTruePositives.zip(numFalsePositives).map { case (t, f) =>
t.toDouble / (t + f)
}
val recalls = numTruePositives.map(_ / numPositives)
val fpr = numFalsePositives.map(_ / numNegatives)
val rocCurve = Seq((0.0, 0.0)) ++ fpr.zip(recalls) ++ Seq((1.0, 1.0))
val pr = recalls.zip(precisions)
val prCurve = Seq((0.0, 1.0)) ++ pr
val f1 = pr.map { case (r, p) => 2.0 * (p * r) / (p + r)}
val f2 = pr.map { case (r, p) => 5.0 * (p * r) / (4.0 * p + r)}
validateMetrics(metrics, thresholds, rocCurve, prCurve, f1, f2, precisions, recalls)
}
test("binary evaluation metrics for RDD where all examples have positive label") {
val scoreAndLabels = sc.parallelize(Seq((0.5, 1.0), (0.5, 1.0)), 2)
val metrics = new BinaryClassificationMetrics(scoreAndLabels)
val thresholds = Seq(0.5)
val precisions = Seq(1.0)
val recalls = Seq(1.0)
val fpr = Seq(0.0)
val rocCurve = Seq((0.0, 0.0)) ++ fpr.zip(recalls) ++ Seq((1.0, 1.0))
val pr = recalls.zip(precisions)
val prCurve = Seq((0.0, 1.0)) ++ pr
val f1 = pr.map { case (r, p) => 2.0 * (p * r) / (p + r)}
val f2 = pr.map { case (r, p) => 5.0 * (p * r) / (4.0 * p + r)}
validateMetrics(metrics, thresholds, rocCurve, prCurve, f1, f2, precisions, recalls)
}
test("binary evaluation metrics for RDD where all examples have negative label") {
val scoreAndLabels = sc.parallelize(Seq((0.5, 0.0), (0.5, 0.0)), 2)
val metrics = new BinaryClassificationMetrics(scoreAndLabels)
val thresholds = Seq(0.5)
val precisions = Seq(0.0)
val recalls = Seq(0.0)
val fpr = Seq(1.0)
val rocCurve = Seq((0.0, 0.0)) ++ fpr.zip(recalls) ++ Seq((1.0, 1.0))
val pr = recalls.zip(precisions)
val prCurve = Seq((0.0, 0.0)) ++ pr
val f1 = pr.map {
case (0, 0) => 0.0
case (r, p) => 2.0 * (p * r) / (p + r)
}
val f2 = pr.map {
case (0, 0) => 0.0
case (r, p) => 5.0 * (p * r) / (4.0 * p + r)
}
validateMetrics(metrics, thresholds, rocCurve, prCurve, f1, f2, precisions, recalls)
}
test("binary evaluation metrics with downsampling") {
val scoreAndLabels = Seq(
(0.1, 0.0), (0.2, 0.0), (0.3, 1.0), (0.4, 0.0), (0.5, 0.0),
(0.6, 1.0), (0.7, 1.0), (0.8, 0.0), (0.9, 1.0))
val scoreAndLabelsRDD = sc.parallelize(scoreAndLabels, 1)
val original = new BinaryClassificationMetrics(scoreAndLabelsRDD)
val originalROC = original.roc().collect().sorted.toList
// Add 2 for (0,0) and (1,1) appended at either end
assert(2 + scoreAndLabels.size == originalROC.size)
assert(
List(
(0.0, 0.0), (0.0, 0.25), (0.2, 0.25), (0.2, 0.5), (0.2, 0.75),
(0.4, 0.75), (0.6, 0.75), (0.6, 1.0), (0.8, 1.0), (1.0, 1.0),
(1.0, 1.0)
) ==
originalROC)
val numBins = 4
val downsampled = new BinaryClassificationMetrics(scoreAndLabelsRDD, numBins)
val downsampledROC = downsampled.roc().collect().sorted.toList
assert(
// May have to add 1 if the sample factor didn't divide evenly
2 + (numBins + (if (scoreAndLabels.size % numBins == 0) 0 else 1)) ==
downsampledROC.size)
assert(
List(
(0.0, 0.0), (0.2, 0.25), (0.2, 0.75), (0.6, 0.75), (0.8, 1.0),
(1.0, 1.0), (1.0, 1.0)
) ==
downsampledROC)
val downsampledRecall = downsampled.recallByThreshold().collect().sorted.toList
assert(
// May have to add 1 if the sample factor didn't divide evenly
numBins + (if (scoreAndLabels.size % numBins == 0) 0 else 1) ==
downsampledRecall.size)
assert(
List(
(0.1, 1.0), (0.2, 1.0), (0.4, 0.75), (0.6, 0.75), (0.8, 0.25)
) ==
downsampledRecall)
}
}
| maropu/spark | mllib/src/test/scala/org/apache/spark/mllib/evaluation/BinaryClassificationMetricsSuite.scala | Scala | apache-2.0 | 8,240 |
package com.imaginea.activegrid.core.models
/**
* Created by nagulmeeras on 04/01/17.
*/
case class WorkflowContext(workflow: Workflow,
currentStep: Option[Step],
stepContextMap: Option[Map[Step, StepExecutonContext]],
workFlowExecutionStatus: Option[WorkFlowExecutionStatus]
)
object WorkflowContext {
def get() : WorkflowContext = {
AnyRef.asInstanceOf[WorkflowContext] // TODO by Shiva
}
}
| eklavya/activeGrid | src/main/scala/com/imaginea/activegrid/core/models/WorkflowContext.scala | Scala | apache-2.0 | 514 |
//macros
import scala.quoted._
object Macros {
//a specialization of the `findOwner` function from `sourcecode` for our purposes
private def firstNonSyntheticOwner(using Quotes)(s: quotes.reflect.Symbol): quotes.reflect.Symbol = {
import quotes.reflect._
if (s.flags.is(Flags.Synthetic)) firstNonSyntheticOwner(s.owner)
else s
}
def genOwnerImpl()(using Quotes): Expr[String] = {
import quotes.reflect._
Expr(firstNonSyntheticOwner(Symbol.spliceOwner).name)
}
}
object Foo {
inline def genOwner: String = ${ Macros.genOwnerImpl() }
}
| dotty-staging/dotty | tests/run-macros/i8877/Macros_1.scala | Scala | apache-2.0 | 569 |
package fpinscala.testing
import fpinscala.laziness.Stream
import fpinscala.state._
import fpinscala.parallelism._
import fpinscala.parallelism.Par.Par
import fpinscala.monads.Functor
import fpinscala.monads.Monad
import Gen._
import Prop._
import fpinscala.laziness.Stream
import java.util.concurrent.{Executors,ExecutorService}
import scala.collection.immutable.{Stream => _}
//case class Prop(run: ((PropTypes.TestCases, RNG) => PropTypes.Result))
case class Prop (
run: (
Prop.MaxSize, // Int
Prop.TestCases, // Int
RNG
) => Prop.Result
){
def &&(otherProp: Prop): Prop = Prop (
(
max: Prop.MaxSize, // Int
n: Prop.TestCases, // Int
rng: RNG
) => {
val thisResult: Prop.Result = this.run(max, n, rng)
val otherResult: Prop.Result = otherProp.run(max, n, rng)
//(thisResult, otherResult) match {
/*
^^^ Interesting fluke in formatter:
tuple above assumed to be application to otherResult,
and so is indented incorrectly.
*/
val mergedResult: Prop.Result = (thisResult, otherResult) match {
case (Passed, Passed) => Passed
case (falsified1: Falsified, Passed) =>
falsified1
case (Passed, falsified2: Falsified) =>
falsified2
case (
Falsified(failure1, successes1),
Falsified(failure2, successes2)
) => Falsified(failure1+", "+failure2, successes1+successes2)
}
mergedResult
}
)
def ||(otherProp: Prop): Prop = Prop (
(
max: Prop.MaxSize, // Int
n: Prop.TestCases, // Int
rng: RNG
) => {
val thisResult: Prop.Result = this.run(max, n, rng)
val otherResult: Prop.Result = otherProp.run(max, n, rng)
val mergedResult: Prop.Result = (thisResult, otherResult) match {
case (Passed, Passed) => Passed
case (falsified1: Falsified, Passed) =>
Passed
case (Passed, falsified2: Falsified) =>
Passed
case (
Falsified(failure1, successes1),
Falsified(failure2, successes2)
) => Falsified(failure1+", "+failure2, successes1+successes2)
}
mergedResult
}
)
}
object Prop {
// this method and case class Prop's run method both have the same name,
// but are differentiated by their arguments, I think.
def run(p: Prop,
maxSize: Int = 100,
testCases: Int = 100,
rng: RNG = RNG.Simple(System.currentTimeMillis().toLong)
): Unit =
p.run(maxSize, testCases, rng) match {
case Prop.Falsified(msg, n) =>
println(s"Falsified after $n passed tests: \n $msg")
case Prop.Passed => println(s"Passed $testCases tests")
}
// forAll for Gen[A] from answers
def forAll[A](as: Gen[A])(f: A => Boolean): Prop = {
val g: (MaxSize, TestCases, RNG) => Result =
(max: Int, n: Int, rng: RNG) => {
val streamA: Stream[A] =
randomStream(as)(rng)
// println("Stream[A]")
//streamA.feedback
val streamAInt: Stream[(A, Int)] =
streamA.zip(Stream.from(0))
// println("Stream[(A, Int)]")
//streamAInt.feedback
// 'take' is, I think, makes a lazy stream strict, up to 'n' nodes
val taken: Stream[(A, Int)] =
streamAInt.take(n)
// println("taken")
//taken.feedback
val streamResult: Stream[Result] =
taken.map {(tpl: Tuple2[A,Int]) => {
val a: A = tpl._1
val i: Int = tpl._2
// println(i)
val result: Result = try {
if (f(a)) Passed else Falsified(a.toString, i)
} catch { case e: Exception => Falsified(buildMsg(a, e), i) }
result
}
}: Stream[Result]
// println("Stream[Result]")
//streamResult.feedback
val optionAggregatedResult: Option[Result] =
streamResult.find((r: Result) => r match {
case fpinscala.testing.Prop.Passed => false
case fpinscala.testing.Prop.Falsified(
failure: String, successes: Int
) => {
// println(failure)
// println("successes: "+successes)
true
}
}
)
val aggregatedResult: Result =
optionAggregatedResult.getOrElse(Passed)
// println("Result: "+aggregatedResult)
aggregatedResult
// Prop.Passed
}
Prop(g)
}
def forAll[A](sGen: SGen[A])(f: A => Boolean): Prop =
forAll(sGen(_))(f)
def forAll[A](g: Int => Gen[A])(f: A => Boolean): Prop = Prop (
(max: MaxSize, n: TestCases, rng: RNG) => {
val casesPerSize = (n + (max - 1)) / max
val props: Stream[Prop] =
Stream.from(0).take((n min max) + 1).map {
(i: Int) => this.forAll(g(i))(f)
}
val propResults: Stream[Prop] = props.map {
(p0: Prop) => Prop { (max, _, rng) =>
p0.run(max, casesPerSize, rng)
}}
val reducedProp: Prop = propResults.toList.reduce {
(p1: Prop, p2: Prop) => p1.&&(p2)
}
reducedProp.run(max, n, rng)
}
)
type SuccessCount = Int
type FailedCase = String
type TestCases = Int
type MaxSize = Int
sealed trait Result {
def isFalsified: Boolean
}
case object Passed extends Result {
def isFalsified = false
}
case class Falsified(
failure: FailedCase,
successes: SuccessCount
) extends Result {
def isFalsified = true
}
def randomStream[A](g: Gen[A])(rng0: RNG): Stream[A] =
Stream.unfold(rng0){
(rng1: RNG) => {
val (a, rng2): Tuple2[A, RNG] = g.sample.run(rng1)
Some((a, rng2))
}
}
def buildMsg[A](a: A, e: Exception): String =
s"test case: $a \n" +
s"generated an exception: $e \n" +
s"stack trace: \n ${e.getStackTrace().mkString("\n")}"
}
// object PropTypes {
// type SuccessCount = Int
// type FailedCase = String
// type TestCases = Int
// sealed trait Result {
// def isFalsified: Boolean
// }
// case object Passed extends Result {
// def isFalsified = false
// }
// case object Failed extends Result {
// def isFalsified = true
// }
// }
/*
case class State[S,A](run: S => (A, S))
*/
// case class Gen[A](sample: State[RNG, A]){
case class Gen[A](sample: State.Rand[A]){
def map[B](f: A => B): Gen[B] = {
val stateB: State.Rand[B] = this.sample.map(f)
Gen[B](stateB)
}
def map2[B,C](gb: Gen[B])(f: (A, B) => C): Gen[C] = {
this.flatMap({(a: A) => {
gb.map((b: B) => f(a, b)): Gen[C]}
}: A => Gen[C]
): Gen[C]
}: Gen[C]
def flatMap[B](f: A => Gen[B]): Gen[B] = {
// How can this be simplified?
// Is this equivalent to the meticulous, error-prone "wiring"
// between states, mentioned here?
// https://www.youtube.com/watch?v=Jg3Uv_YWJqI
Gen {
State {
(rng0: RNG) => {
val (a1, rng1): Tuple2[A, RNG] = sample.run(rng0)
val fOut: Gen[B] = f(a1)
val (b0, rng2): Tuple2[B, RNG] = fOut.sample.run(rng1)
(b0, rng2)
}
}
}
}
// def flatMap[B](f: A => Gen[B]): Gen[B] = {
// val runA: Function1[RNG,Tuple2[A,RNG]] = this.sample.run
// val runB: Function1[RNG,Tuple2[B,RNG]] =
// (rng: RNG) => runA(rng)
// redundant use of method name, but arguments are different
def listOfN(size: Gen[Int]): Gen[List[A]] = {
// Int => Gen[List[A]]
size.flatMap {
(i: Int) => {
Gen.listOfN(i, this)
}
}
}
// 'def **' is the apply counterpart to 'object **.unapply'
def **[B](g: Gen[B]): Gen[(A, B)] =
this.map2(g)({(a: A, b: B) => {
(a, b): Tuple2[A, B]
}}: (A, B) => Tuple2[A, B]
): Gen[Tuple2[A, B]]
object ** {
def unapply[A, B](p: (A, B)) = Some(p)
}
def product[B](g: Gen[B]): Gen[(A, B)] =
this.map2(g)({(a: A, b: B) => {
(a, b): Tuple2[A, B]
}}: (A, B) => Tuple2[A, B]
): Gen[Tuple2[A, B]]
object product {
def unapply[A, B](p: (A, B)) = Some(p)
}
def unsized: SGen[A] = SGen((i: Int) => this) // i ignored?
}
object Gen {
val genMonad = Monad.genMonad
def unit[A](a: => A): Gen[A] = new Gen(State.unit(a))
// def boolean: Gen[Boolean]
// do it with flatMap
def listOfN[A](n: Int, g: Gen[A]): Gen[List[A]] = {
val ll = List.fill(n)(g)
// traverse, sequence, or replicate
// genMonad.sequence(lma: List[Gen[A]])
genMonad.sequence(ll)
}
def _listOfN[A](n: Int, g: Gen[A]): Gen[List[A]] = {
val listState = List.fill(n)(g.sample)
Gen(State.sequence(listState))
}
def stringOfLength(n: Int): Gen[String] = {
val genAsciiInt = choose(0x21, 0x7a)
val genListInt: Gen[List[Int]] = listOfN(n, genAsciiInt)
genListInt.map((li: List[Int]) => li.toString())
}
// up to length 100
def string: Gen[String] = {
val genLength = choose(10, 100)
genLength.flatMap(length => stringOfLength(length))
}
// no randomness here
// Gen is limited to using State[RNG,A]
// So it's not possible to create a Gen of State[Int, Int],
// to make a counting generator
// def counter(start: Int): Gen[Int] = {
// }
// generates one integer
def choose(start: Int, stopExclusive: Int): Gen[Int] = {
// use Simple RNG
// val genInt = Gen(State(RNG.chooseInt(rng)(start, stopExclusive)))
/*
type State.Rand[Int] == State[RNG, Int]
instance of State.Rand[Int] includes method
run: RNG => (Int, RNG)
*/
val stateInt: State.Rand[Int] = State {
(rng: RNG) => RNG.chooseInt(rng)(start, stopExclusive)
}
val genInt: Gen[Int] = Gen(stateInt)
genInt
}
// make a generator of incrementing ints
// def genInt: Gen[Int] = {
// val stateInt: State.Rand[Int] = State {
// (rng: RNG) => RNG.chooseInt(rng)(start, stopExclusive)
// }
// Gen(stateInt)
// }
def chooseDouble(start: Double, stopExclusive: Double): Gen[Double] = {
val stateDouble: State.Rand[Double] = State {
(rng: RNG) => RNG.chooseDouble(rng)(start, stopExclusive)
}
val genDouble: Gen[Double] = Gen(stateDouble)
genDouble
}
def union[A](g1: Gen[A], g2: Gen[A]): Gen[A] = {
// choose int in [0,2)
// 0, return g1
// 1, return g2
val genInt: Gen[Int] = choose(0, 2)
val genChosen: Gen[A] = genInt.flatMap {
(i: Int) => if(i==0) g1 else g2
}
genChosen
}
// can weights be negative?
// nowhere stated that weights add up to 1.0
// lets assume weights are positive...
def weighted[A](g1: (Gen[A], Double), g2: (Gen[A], Double)): Gen[A] = {
val w1: Double = scala.math.abs(g1._2)
val w2: Double = scala.math.abs(g2._2)
// val min: Double = scala.math.min(w1, w2)
// val max: Double = scala.math.max(w1, w2)
val sum: Double = w1+w2
val genDouble: Gen[Double] = chooseDouble(0, sum)
val genChosen: Gen[A] = genDouble.flatMap {
(d: Double) => if(d<w1) g1._1 else g2._1
}
genChosen
}
def listOf[A](g: Gen[A]): SGen[List[A]] = {
SGen((size: Int) => {
// A => List[A]
val genListA: Gen[List[A]] =
Gen.listOfN(size, g)
genListA
}
)
}
// val ES: ExecutorService = Executors.newCachedThreadPool
// /*
// In this example, Prop.forAll takes in a Gen that will only ever
// generate one value -- Par.unit(1).
// How will Prop.forAll handle a generator that generates many Pars?
// */
// val parAdditionProp = Prop.forAll(Gen.unit(Par.unit(1))){
// (pi: Par[Int]) => {
// Par.map(pi)(_ + 1)(ES).get == Par.unit(2)(ES).get
// }
// }
// val randomsAboveZeroProp = Prop.forAll(Gen.choose(4, 40)){
// (i: Int) => i > 0
// }
}
object PropTests {
val simpleProp = Prop(
(maxSize: Int, testCases: Int, rng: RNG) => Prop.Passed
)
val chooseGenInt: Gen[Int] = Gen.choose(5, 30)
val simpleRNG: RNG = RNG.Simple(123)
val randomIntStream: Stream[Int] =
Prop.randomStream(chooseGenInt)(simpleRNG)
val randomIntStreamProp: Prop =
Prop.forAll(chooseGenInt)((i: Int) => (i>=5 && i<30))
def main(args: Array[String]): Unit = {
val ES: ExecutorService = Executors.newCachedThreadPool
/*
These don't exist *until* main is run!
One of the differences between a def and val.
scala> PropTests.main(Array[String]()).chooseGenInt
<console>:9: error: value chooseGenInt is not a member of Unit
PropTests.main(Array[String]()).chooseGenInt
^
*/
println("random int stream prop")
println(randomIntStream.toListFinite(15))
Prop.run(randomIntStreamProp)
println("random Ints above zero prop")
val randomsAboveZeroProp = Prop.forAll(Gen.choose(4, 40)){
(i: Int) => i > 0
}
Prop.run(randomsAboveZeroProp)
// test case: List()
// generated an exception:
// java.lang.UnsupportedOperationException: empty.max
println("list maximum prop")
val smallInt: Gen[Int] = Gen.choose(-10,10)
val genListInt: Gen[List[Int]] = smallInt.listOfN(Gen.choose(5,15))
//val sgenListInt: SGen[List[Int]] = Gen.listOf(smallInt)
val smallIntProp: Prop = forAll(genListInt) {
(la: List[Int]) => {
val max = la.max
!la.exists((i: Int) => i>max)
}
}
Prop.run(smallIntProp)
/*
In this example, Prop.forAll takes in a Gen that will only ever
generate one value -- Par.unit(1).
How will Prop.forAll handle a generator that generates many Pars?
*/
println("parallel addition prop, with Par")
val parAdditionProp: Prop = Prop.forAll(Gen.unit(Par.unit(1))){
(pi: Par[Int]) => {
Par.map(pi)(_ + 1)(ES).get == Par.unit(2)(ES).get
}
}
Prop.run(parAdditionProp)
ES.shutdown()
}
}
// sized generator
/*
covariant type A occurs in invariant position in type
=> Int => fpinscala.testing.Gen[A] of value g
case class SGen[+A](g: Int => Gen[A]){...
^
Given A >: B (Number >: Double)
SGen[A] >: SGen[B] (SGen[Number] >: SGen[Double])
but in function g
Gen[A] and Gen[B] have no relation
or
(Int => Gen[A]) and (Int => Gen[B]) have no relation
*/
// case class SGen[+A](g: Int => Gen[A]){
case class SGen[A](g: Int => Gen[A]){
// want covariance
//case class SGen[A >: B](g: Int => Gen[A]){
// from answers...
def apply(n: Int): Gen[A] = g(n)
def map[B](f: A => B): SGen[B] =
SGen(g andThen (_ map f))
def flatMap[B](f: A => Gen[B]): SGen[B] =
SGen(g andThen (_ flatMap f))
def **[B](s2: SGen[B]): SGen[(A,B)] =
SGen(n => apply(n) ** s2(n))
}
| peterbecich/fpinscala | exercises/src/main/scala/fpinscala/testing/Gen.scala | Scala | mit | 14,667 |
/*
* Copyright (c) 2017 Magomed Abdurakhmanov, Hypertino
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
*/
package com.hypertino.facade.workers
import akka.actor.ActorRefFactory
import com.typesafe.scalalogging.StrictLogging
import spray.http.ContentTypes._
import spray.http.{HttpEntity, HttpResponse}
import spray.routing._
import spray.routing.directives.{HeaderDirectives, RouteDirectives}
class NoMoreConnectionsWorker(maxConnectionCount: Int) extends HttpServiceActor with StrictLogging {
def receive: Receive = {
implicit val refFactory: ActorRefFactory = context
runRoute {
HeaderDirectives.optionalHeaderValueByName("X-Forwarded-For") { forwardedFor ⇒
RouteDirectives.complete {
logger.warn(s"Maximum ($maxConnectionCount) active input connection count is exceed for ${forwardedFor.getOrElse("'address is unknown'")}.")
HttpResponse(503, HttpEntity(`text/plain`, "Connection/worker limit is exceeded"))
}
}
}
}
}
| hypertino/hyperfacade | src/main/scala/com/hypertino/facade/workers/NoMoreConnectionsWorker.scala | Scala | mpl-2.0 | 1,151 |
package org.hashids
import scala.annotation.tailrec
class Hashids(
salt: String = "",
minHashLength: Int = 0,
alphabet: String = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"
) {
require(alphabet.length == alphabet.toSet.size , "check your alphabet for duplicates")
require(alphabet.length >= 16, "alphabet must contain at least 16 characters")
require(alphabet.indexOf(" ") < 0, "alphabet cannot contains spaces")
private val sepDiv = 3.5
private val guardDiv = 12
private val (seps, guards, effectiveAlphabet) = {
val filteredSeps = "cfhistuCFHISTU".filter(x => alphabet.contains(x))
val filteredAlphabet = alphabet.filterNot(x => filteredSeps.contains(x))
val shuffledSeps = consistentShuffle(filteredSeps, salt)
val (tmpSeps, tmpAlpha) =
if (shuffledSeps.isEmpty || ((filteredAlphabet.length / shuffledSeps.length) > sepDiv)) {
val sepsTmpLen = Math.ceil(filteredAlphabet.length / sepDiv).toInt
val sepsLen = if(sepsTmpLen == 1) 2 else sepsTmpLen
if(sepsLen > shuffledSeps.length) {
val diff = sepsLen - shuffledSeps.length
val seps = shuffledSeps + filteredAlphabet.substring(0, diff)
val alpha = filteredAlphabet.substring(diff)
(seps, alpha)
} else {
val seps = shuffledSeps.substring(0, sepsLen)
val alpha = filteredAlphabet
(seps, alpha)
}
} else (shuffledSeps, filteredAlphabet)
val guardCount = Math.ceil(tmpAlpha.length.toDouble / guardDiv).toInt
val shuffledAlpha = consistentShuffle(tmpAlpha, salt)
if(shuffledAlpha.length < 3) {
val guards = tmpSeps.substring(0, guardCount)
val seps = tmpSeps.substring(guardCount)
(seps, guards, shuffledAlpha)
} else {
val guards = shuffledAlpha.substring(0, guardCount)
val alpha = shuffledAlpha.substring(guardCount)
(tmpSeps, guards, alpha)
}
}
def encode(numbers: Long*): String =
if(numbers.isEmpty) "" else _encode(numbers:_*)
def encodeHex(in: String): String = {
require(in.matches("^[0-9a-fA-F]+$"), "Not a HEX string")
val matcher = "[\\w\\W]{1,12}".r.pattern.matcher(in)
@tailrec
def doSplit(result: List[Long]): List[Long] = {
if (matcher.find()) doSplit(java.lang.Long.parseLong("1" + matcher.group, 16) :: result)
else result
}
_encode(doSplit(Nil):_*)
}
private def _encode(numbers: Long*): String = {
val indexedNumbers = numbers.zipWithIndex
val numberHash = indexedNumbers
.foldLeft[Int](0){ case (acc, (x, i)) =>
acc + (x % (i+100)).toInt
}
val lottery = effectiveAlphabet.charAt(numberHash % effectiveAlphabet.length).toString
val (tmpResult, tmpAlpha) =
indexedNumbers.foldLeft[(String, String)]((lottery, effectiveAlphabet)) {
case ((result, alpha), (x, i)) =>
val buffer = lottery + salt + alpha
val newAlpha = consistentShuffle(alpha, buffer.substring(0, alpha.length))
val last = hash(x, newAlpha)
val newResult = result + last
if (i + 1 < numbers.size) {
val num = x % (last.codePointAt(0) + i)
val sepsIndex = (num % seps.length).toInt
(newResult + seps.charAt(sepsIndex), newAlpha)
} else {
(newResult, newAlpha)
}
}
val provisionalResult = if(tmpResult.length < minHashLength) {
val guardIndex = (numberHash + tmpResult.codePointAt(0)) % guards.length
val guard = guards.charAt(guardIndex)
val provResult = s"$guard$tmpResult"
if(provResult.length < minHashLength) {
val guardIndex = (numberHash + provResult.codePointAt(2)) % guards.length
val guard = guards.charAt(guardIndex)
provResult + guard
} else {
provResult
}
} else tmpResult
val halfLen = tmpAlpha.length / 2
@tailrec
def respectMinHashLength(alpha: String, res: String): String = {
if (res.length >= minHashLength) res
else {
val newAlpha = consistentShuffle(alpha, alpha)
val tmpRes = newAlpha.substring(halfLen) + res + newAlpha.substring(0, halfLen)
val excess = tmpRes.length - minHashLength
val newRes = if(excess > 0) {
val startPos = excess / 2
tmpRes.substring(startPos, startPos + minHashLength)
} else tmpRes
respectMinHashLength(newAlpha, newRes)
}
}
respectMinHashLength(tmpAlpha, provisionalResult)
}
def decode(hash: String): List[Long] = hash match {
case "" => Nil
case x =>
val res = _decode(x, effectiveAlphabet)
if (encode(res:_*) == hash) res
else throw new IllegalStateException(s"Hash $hash was generated with different Hashids salt/alphabet")
}
def decodeHex(hash: String): String =
decode(hash).map(x => x.toHexString.substring(1).toUpperCase).mkString
private def _decode(hash: String, alphabet: String): List[Long] = {
val hashArray = hash.split(s"[$guards]")
val i = if(hashArray.length == 3 || hashArray.length == 2) 1 else 0
val lottery = hashArray(i).charAt(0)
val hashBreakdown = hashArray(i).substring(1).split(s"[$seps]")
@tailrec
def doDecode(in: List[String], buff: String,
alpha: String, result: List[Long]): List[Long] = in match {
case Nil => result.reverse
case x :: tail =>
val newAlpha = consistentShuffle(alpha, buff.substring(0, alpha.length))
val newBuf = s"$lottery$salt$newAlpha"
doDecode(tail, newBuf, newAlpha, unhash(x, newAlpha) :: result)
}
doDecode(hashBreakdown.toList, s"$lottery$salt$effectiveAlphabet", effectiveAlphabet, Nil)
}
def consistentShuffle(alphabet: String, salt: String): String = {
@tailrec
def doShuffle(i: Int, v: Int, p: Int, result: String): String = {
if (i <= 0) {
result
} else {
val newV = v % salt.length;
val ascii = salt.codePointAt(newV)
val newP = p + ascii
val j = (ascii + newV + newP) % i
val tmp = result.charAt(j)
val alphaSuff = result.substring(0, j) + result.charAt(i) + result.substring(j + 1)
val res = alphaSuff.substring(0, i) + tmp + alphaSuff.substring(i + 1)
doShuffle(i - 1, newV + 1, newP, res)
}
}
if(salt.length <= 0) alphabet
else doShuffle(alphabet.length - 1, 0, 0, alphabet)
}
private def hash(input: Long, alphabet: String): String = {
val alphaSize = alphabet.length.toLong
@tailrec
def doHash(in: Long, hash: String): String = {
if (in <= 0) hash
else {
val newIn = in / alphaSize
val newChar = alphabet.charAt((in % alphaSize).toInt)
doHash(newIn, s"$newChar$hash")
}
}
doHash(input / alphaSize, alphabet.charAt((input % alphaSize).toInt).toString)
}
private def unhash(input: String, alphabet: String): Long =
input.zipWithIndex.foldLeft[Long](0L){case (acc, (in, i)) =>
acc + (alphabet.indexOf(in) *
Math.pow(alphabet.length, input.length - i - 1)).toLong
}
}
object Hashids {
implicit class HashidsLongOps(x: Long) {
def hashid(implicit hashids: Hashids): String = hashids.encode(x)
}
implicit class HashidsStringOps(x: String) {
def unhashid(implicit hashids: Hashids) : Seq[Long] = hashids.decode(x)
def hashidHex(implicit hashids: Hashids) : String = hashids.encodeHex(x)
def unhashidHex(implicit hashids: Hashids): String = hashids.decodeHex(x)
}
def apply(salt: String) =
new Hashids(salt)
def apply(salt: String, minHashLength: Int) =
new Hashids(salt, minHashLength)
def apply(salt: String, minHashLength: Int, alphabet: String) =
new Hashids(salt, minHashLength, alphabet)
}
| ancane/hashids.scala | src/main/scala/org/hashids/Hashids.scala | Scala | mit | 7,783 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.fuberlin.wiwiss.silk.server.model
import de.fuberlin.wiwiss.silk.datasource.DataSource
import de.fuberlin.wiwiss.silk.runtime.plugin.Plugin
import de.fuberlin.wiwiss.silk.entity.{SparqlRestriction, Path, Entity, EntityDescription}
/**
* ${DESCRIPTION}
*
* <p><b>Company:</b>
* SAT, Research Studios Austria</p>
*
* <p><b>Copyright:</b>
* (c) 2011</p>
*
* <p><b>last modified:</b><br/>
* $Author: $<br/>
* $Date: $<br/>
* $Revision: $</p>
*
* @author fkleedorfer
*/
/**
* DataSource which doesn't retrieve any entities at all
*/
@Plugin(id = "nop", label = "inactive datasource", description = "DataSource which doesn't retrieve any entities at all")
class NopDataSource extends DataSource {
override def retrieve(entityDesc: EntityDescription, entities: Seq[String]) = {
Traversable.empty[Entity]
}
override def retrievePaths(restrictions: SparqlRestriction, depth: Int, limit: Option[Int]): Traversable[(Path, Double)] = {
Traversable.empty[(Path, Double)]
}
}
| fusepoolP3/p3-silk | silk-server/src/main/scala/de/fuberlin/wiwiss/silk/server/model/NopDataSource.scala | Scala | apache-2.0 | 1,594 |
package play.boilerplate.parser.model
object HttpMethod extends Enumeration {
val Get = Value("GET")
val Put = Value("PUT")
val Post = Value("POST")
val Head = Value("HEAD")
val Delete = Value("DELETE")
val Patch = Value("PATCH")
val Options = Value("OPTIONS")
}
| Romastyi/sbt-play-boilerplate | sbt-plugin/lib/src/main/scala/play/boilerplate/parser/model/HttpMethod.scala | Scala | apache-2.0 | 278 |
/**
* Copyright (C) 2016 Hurence (support@hurence.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hurence.botsearch.analytics
import java.text.SimpleDateFormat
import java.util.Date
import com.hurence.logisland.botsearch.{HttpFlow, Trace}
import com.hurence.logisland.util.spark.SparkUtils
import com.typesafe.scalalogging.slf4j.LazyLogging
import kafka.serializer.StringDecoder
import org.apache.commons.cli.{GnuParser, Options}
import org.apache.spark.mllib.clustering.KMeans
import org.apache.spark.mllib.feature.StandardScaler
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.sql.SQLContext
//import org.apache.spark.streaming.kafka.{KafkaUtils, OffsetRange}
import scala.collection.JavaConversions._
import scala.collection.mutable.ArrayBuffer
/**
* A demo program that index a few items
*
* https://cwiki.apache.org/confluence/display/KAFKA/0.8.0+SimpleConsumer+Example
*
*
*/
object BatchTracesIndexer extends LazyLogging {
def main(args: Array[String]) {
//////////////////////////////////////////
// Commande lien management
val parser = new GnuParser()
val options = new Options()
options.addOption("o", "output", true, "es, solr, debug")
options.addOption("w", "time-window", true, "window time for micro batch")
options.addOption("b", "broker-list", true, "kafka broker list :localhost:9092,anotherhost:9092")
options.addOption("t", "topic-list", true, "kafka topic list logisland1,logisland2")
options.addOption("e", "es-host", true, "elasticsearch host : sandbox")
options.addOption("h", "help", false, "print usage")
options.addOption("f", "folder-path", true, "parquet folder path")
options.addOption("p", "parquet", false, "store to parquet ?")
options.addOption("i", "index", false, "indexation ?")
// parse the command line arguments
val line = parser.parse(options, args)
val output = line.getOptionValue("o", "debug")
val windowTime = line.getOptionValue("w", "2").toLong
val brokerList = line.getOptionValue("b", "sandbox:9092")
val topicList = line.getOptionValue("t", "logisland")
val esHosts = line.getOptionValue("e", "sandbox")
val doSaveAsParquet = line.hasOption("p")
val doIndexation = line.hasOption("i")
val source = "file://" + line.getOptionValue("f", "/usr/local/logisland/data/out")
// set up context
val sc = SparkUtils.initContext(this.getClass.getName)
val sqlContext = new SQLContext(sc)
import sqlContext.implicits._
// Define the Kafka parameters, broker list must be specified
val kafkaParams = Map("metadata.broker.list" -> brokerList, "group.id" -> "logisland-demo")
// Define which topics to read from
val topics = topicList.split(",").toSet
// get first 100000 messages
val lastOffest = KafkaOffsetUtils.getLastOffset(
brokerList,
"logisland",
0,
kafka.api.OffsetRequest.LatestTime)
/* val offsetRanges = Array(
OffsetRange("logisland", 0, 0, lastOffest)
)
logger.info(s"last offset for kafka topic is $lastOffest")
if (lastOffest != 0) {
////////////////////////////////////////
// flows loading
logger.info("Create the direct stream with the Kafka parameters and topics")
val kafkaRdd = KafkaUtils.createRDD[String, String, StringDecoder, StringDecoder](
sc, kafkaParams, offsetRanges)
logger.info("convert raw lines to networkflow objects")
val flows = kafkaRdd.map(kv => NetworkFlow.parse(kv._2))
////////////////////////////////////////
// trace computation
logger.info("compute traces from flows")
val traces = flows.map(r => (r.ipSource + "_" + r.ipTarget, r))
.groupByKey()
.map(t => {
val flows = t._2
val tokens = t._1.split("_")
val trace = new Trace()
try {
trace.setIpSource(tokens(0))
trace.setIpTarget(tokens(1))
// we need at least 5 flows to compute one trace
if (flows.size > 5) {
// set up the flows buffer
val buffer = new ArrayBuffer[HttpFlow]()
flows.foreach(f => {
val flow = new HttpFlow()
flow.setDate(new java.util.Date(f.timestamp))
flow.setRequestSize(f.requestSize)
flow.setResponseSize(f.responseSize)
flow.setTags(f.tags.split(",").toList)
buffer += flow
})
// flows need to be sorted on timestamp
val sortedFlows = buffer.toList.sortWith(_.getDate().getTime() < _.getDate().getTime())
sortedFlows.foreach(f => trace.add(f))
// compute trace frequencies and stats
trace.compute()
}
} catch {
case ex: Throwable => logger.error(ex.getMessage)
}
trace
}).map(trace => (trace.getIpSource + "_" + trace.getIpTarget, new NetworkTrace(
trace.getIpSource,
trace.getIpTarget,
trace.getAvgUploadedBytes.toFloat,
trace.getAvgDownloadedBytes.toFloat,
trace.getAvgTimeBetweenTwoFLows.toFloat,
trace.getMostSignificantFrequency.toFloat,
trace.getFlows.size(),
trace.getTags.toArray.mkString(","))))
////////////////////////////////////////
// traces clustering
logger.info("convert traces into a Dense vector")
val tracesVector = traces.map(t => (t._1,
Vectors.dense(Array[Double](
t._2.avgUploadedBytes,
t._2.avgDownloadedBytes,
t._2.avgTimeBetweenTwoFLows,
t._2.mostSignificantFrequency))))
.cache()
logger.info("scale the trace to get mean = 0 and std = 1")
val scaler = new StandardScaler(withMean = true, withStd = true)
.fit(tracesVector.map(_._2))
val scaledTraces = tracesVector.map(x => (x._1, scaler.transform(x._2)))
// TODO add an automated job which compute best parameters
// Cluster the data into two classes using KMeans
val numClusters = 8
val numIterations = 20
logger.info(s"Cluster the data into two classes using KMeans k:$numClusters, numIterations:$numIterations")
val clusters = KMeans.train(scaledTraces.map(_._2), numClusters, numIterations)
// Evaluate clustering by computing Within Set Sum of Squared Errors
val WSSSE = clusters.computeCost(scaledTraces.map(_._2))
logger.info(s"Within Set Sum of Squared Errors = $WSSSE")
logger.info(s"assign traces to clusters")
val centroids = scaledTraces.map(t => (t._1, clusters.predict(t._2))).toDF("id", "centroid")
logger.info(s"save traces to parquet")
val tmp = traces.map(r => (r._1, r._2.ipSource, r._2.ipTarget,
r._2.avgUploadedBytes,
r._2.avgDownloadedBytes,
r._2.avgTimeBetweenTwoFLows,
r._2.mostSignificantFrequency,
r._2.flowsCount,
r._2.tags)).toDF("id",
"ipSource",
"ipTarget",
"avgUploadedBytes",
"avgDownloadedBytes",
"avgTimeBetweenTwoFLows",
"mostSignificantFrequency",
"flowsCount",
"tags")
.join(centroids, "id")
.select("ipSource",
"ipTarget",
"avgUploadedBytes",
"avgDownloadedBytes",
"avgTimeBetweenTwoFLows",
"mostSignificantFrequency",
"flowsCount",
"tags",
"centroid")
.filter("flowsCount != 0")
.repartition(8)
tmp.printSchema()
tmp.show()
////////////////////////////////////////
// traces indexation
val dateSuffix = new SimpleDateFormat("yyyy.MM.dd").format(new Date())
if (doIndexation) {
logger.info("create es index")
// EventConverter.createIndex(esHosts, s"logisland-$dateSuffix")
logger.info("launch traces indexation to es")
// EventConverter.index(tmp.toDF.rdd, esHosts, s"logisland-$dateSuffix", "trace")
}
if (doSaveAsParquet) {
logger.info("save traces to parquet")
tmp.write.save(s"$source/$dateSuffix/traces.parquet")
}
}*/
sc.stop()
}
}
| Hurence/log-island | logisland-components/logisland-processors/logisland-processor-botsearch/src/main/scala/com/hurence/botsearch/analytics/BatchTracesIndexer.scala | Scala | apache-2.0 | 9,976 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.python
import org.apache.spark.sql.execution.{FileSourceScanExec, SparkPlan, SparkPlanTest}
import org.apache.spark.sql.functions.col
import org.apache.spark.sql.test.SharedSQLContext
class ExtractPythonUDFsSuite extends SparkPlanTest with SharedSQLContext {
import testImplicits._
val batchedPythonUDF = new MyDummyPythonUDF
val scalarPandasUDF = new MyDummyScalarPandasUDF
private def collectBatchExec(plan: SparkPlan): Seq[BatchEvalPythonExec] = plan.collect {
case b: BatchEvalPythonExec => b
}
private def collectArrowExec(plan: SparkPlan): Seq[ArrowEvalPythonExec] = plan.collect {
case b: ArrowEvalPythonExec => b
}
test("Chained Batched Python UDFs should be combined to a single physical node") {
val df = Seq(("Hello", 4)).toDF("a", "b")
val df2 = df.withColumn("c", batchedPythonUDF(col("a")))
.withColumn("d", batchedPythonUDF(col("c")))
val pythonEvalNodes = collectBatchExec(df2.queryExecution.executedPlan)
assert(pythonEvalNodes.size == 1)
}
test("Chained Scalar Pandas UDFs should be combined to a single physical node") {
val df = Seq(("Hello", 4)).toDF("a", "b")
val df2 = df.withColumn("c", scalarPandasUDF(col("a")))
.withColumn("d", scalarPandasUDF(col("c")))
val arrowEvalNodes = collectArrowExec(df2.queryExecution.executedPlan)
assert(arrowEvalNodes.size == 1)
}
test("Mixed Batched Python UDFs and Pandas UDF should be separate physical node") {
val df = Seq(("Hello", 4)).toDF("a", "b")
val df2 = df.withColumn("c", batchedPythonUDF(col("a")))
.withColumn("d", scalarPandasUDF(col("b")))
val pythonEvalNodes = collectBatchExec(df2.queryExecution.executedPlan)
val arrowEvalNodes = collectArrowExec(df2.queryExecution.executedPlan)
assert(pythonEvalNodes.size == 1)
assert(arrowEvalNodes.size == 1)
}
test("Independent Batched Python UDFs and Scalar Pandas UDFs should be combined separately") {
val df = Seq(("Hello", 4)).toDF("a", "b")
val df2 = df.withColumn("c1", batchedPythonUDF(col("a")))
.withColumn("c2", batchedPythonUDF(col("c1")))
.withColumn("d1", scalarPandasUDF(col("a")))
.withColumn("d2", scalarPandasUDF(col("d1")))
val pythonEvalNodes = collectBatchExec(df2.queryExecution.executedPlan)
val arrowEvalNodes = collectArrowExec(df2.queryExecution.executedPlan)
assert(pythonEvalNodes.size == 1)
assert(arrowEvalNodes.size == 1)
}
test("Dependent Batched Python UDFs and Scalar Pandas UDFs should not be combined") {
val df = Seq(("Hello", 4)).toDF("a", "b")
val df2 = df.withColumn("c1", batchedPythonUDF(col("a")))
.withColumn("d1", scalarPandasUDF(col("c1")))
.withColumn("c2", batchedPythonUDF(col("d1")))
.withColumn("d2", scalarPandasUDF(col("c2")))
val pythonEvalNodes = collectBatchExec(df2.queryExecution.executedPlan)
val arrowEvalNodes = collectArrowExec(df2.queryExecution.executedPlan)
assert(pythonEvalNodes.size == 2)
assert(arrowEvalNodes.size == 2)
}
test("Python UDF should not break column pruning/filter pushdown") {
withTempPath { f =>
spark.range(10).select($"id".as("a"), $"id".as("b"))
.write.parquet(f.getCanonicalPath)
val df = spark.read.parquet(f.getCanonicalPath)
withClue("column pruning") {
val query = df.filter(batchedPythonUDF($"a")).select($"a")
val pythonEvalNodes = collectBatchExec(query.queryExecution.executedPlan)
assert(pythonEvalNodes.length == 1)
val scanNodes = query.queryExecution.executedPlan.collect {
case scan: FileSourceScanExec => scan
}
assert(scanNodes.length == 1)
assert(scanNodes.head.output.map(_.name) == Seq("a"))
}
withClue("filter pushdown") {
val query = df.filter($"a" > 1 && batchedPythonUDF($"a"))
val pythonEvalNodes = collectBatchExec(query.queryExecution.executedPlan)
assert(pythonEvalNodes.length == 1)
val scanNodes = query.queryExecution.executedPlan.collect {
case scan: FileSourceScanExec => scan
}
assert(scanNodes.length == 1)
// 'a is not null and 'a > 1
assert(scanNodes.head.dataFilters.length == 2)
assert(scanNodes.head.dataFilters.flatMap(_.references.map(_.name)).distinct == Seq("a"))
}
}
}
}
| icexelloss/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/python/ExtractPythonUDFsSuite.scala | Scala | apache-2.0 | 5,185 |
package cassandra.cql
sealed trait CqlDataType {
def name:String
}
abstract class BaseDataType(val name: String) extends CqlDataType
case class ListDt(subType: CqlDataType) extends BaseDataType(s"list<${subType.name}>")
case class SetDt(subType: CqlDataType) extends BaseDataType(s"set<${subType.name}>")
case class TupleDt(subTypes: CqlDataType*) extends BaseDataType(s"frozen <tuple<${subTypes.map(_.name).mkString(",")}>>")
case class UserDefineDt(userDefinedName: String, ids:Traversable[String], types: (String, CqlDataType)*) extends BaseDataType(s"frozen <$userDefinedName>")
case object UuidDt extends BaseDataType("uuid")
case object TextDt extends BaseDataType("text")
case object BlobDt extends BaseDataType("blob")
case object IntDt extends BaseDataType("int")
case object LongDt extends BaseDataType("bigint")
case object BooleanDt extends BaseDataType("boolean")
case object DoubleDt extends BaseDataType("double")
case object FloatDt extends BaseDataType("float")
case object DecimalDt extends BaseDataType("decimal")
case object TimestampDt extends BaseDataType("timestamp")
| fabianmurariu/cassandra-scala-nuveau-driver | cql/lib/src/main/scala/cassandra/cql/CqlDataType.scala | Scala | apache-2.0 | 1,097 |
class Y[T](val i: Option[T]) extends AnyVal {
def q: List[T] = {
lazy val e: List[T] = i.toList
e
}
}
| AlexSikia/dotty | tests/untried/pos/t6358_2.scala | Scala | bsd-3-clause | 120 |
package com.chrisrebert.lmvtfy
import java.nio.charset.Charset
import scala.collection.mutable
import scala.util.Try
import akka.util.ByteString
import spray.http.{Uri, ContentType, MediaTypes, HttpCharsets, HttpEntity, HttpResponse}
package object util {
private val utf8name = "UTF-8"
private val utf8Charset = Charset.forName(utf8name)
implicit class Utf8String(str: String) {
def utf8Bytes: Array[Byte] = str.getBytes(utf8Charset)
def utf8ByteString: ByteString = ByteString(this.utf8Bytes)
}
implicit class ConvenientString(str: String) {
def rindex(substr: String): Option[Int] = {
str.lastIndexOf(substr) match {
case -1 => None
case validIndex => Some(validIndex)
}
}
def thruFinal(substr: String): String = {
str.rindex(substr) match {
case None => str
case Some(index) => str.substring(0, index + 1)
}
}
def matches(regex: scala.util.matching.Regex): Boolean = {
str match {
case regex(_*) => true
case _ => false
}
}
}
implicit class InputSourceString(str: String) {
import java.io.StringReader
import org.xml.sax.InputSource
def asInputSource: InputSource = new InputSource(new StringReader(str))
}
implicit class Utf8ByteArray(bytes: Array[Byte]) {
def utf8String: Try[String] = Try { new String(bytes, utf8Charset) }
}
implicit class Utf8ByteString(byteStr: ByteString) {
def utf8String: String = byteStr.decodeString(utf8name)
def asUtf8HtmlHttpEntity: HttpEntity = HttpEntity(ContentType(MediaTypes.`text/html`, HttpCharsets.`UTF-8`), byteStr)
}
implicit class RichResponse(response: HttpResponse) {
def entityByteString: ByteString = response.entity.data.toByteString
def entityUtf8String: String = response.entity.data.asString(utf8Charset)
}
implicit class RichStack[T](stack: mutable.Stack[T]) {
def popOption(): Option[T] = Try{ stack.pop() }.toOption
def topOption: Option[T] = Try{ stack.top }.toOption
}
implicit class RichUri(uri: Uri) {
import spray.http.Uri.NamedHost
import spray.http.Uri.Query.{Empty=>EmptyQuery}
def isHttp = uri.scheme == "http" || uri.scheme == "https"
def lacksUserInfo = uri.authority.userinfo.isEmpty
def lacksNonDefaultPort = uri.authority.port <= 0
def hasNamedHost = uri.authority.host.isInstanceOf[NamedHost]
def isSafe = uri.isHttp && uri.lacksUserInfo && uri.hasNamedHost && uri.lacksNonDefaultPort && uri.isAbsolute
def withoutQuery = uri.withQuery(EmptyQuery)
}
object HtmlSuffixed {
private val extension = ".html"
def unapply(filename: String): Option[String] = Some(if (filename.endsWith(extension)) filename else (filename + extension))
}
}
| cvrebert/lmvtfy | src/main/scala/com/chrisrebert/lmvtfy/util/package.scala | Scala | mit | 2,756 |
package info.cmlubinski.newslearning.web
// Based off https://github.com/unfiltered/unfiltered-scalate.g8/blob/master/src/main/g8/src/main/scala/Scalate.scala
import scala.collection.JavaConversions._
import de.neuland.jade4j.Jade4J
import unfiltered.request.HttpRequest
import unfiltered.response.{HtmlContent, ResponseString}
object Jade {
def apply[A, B](request: HttpRequest[A],
template: String,
attributes:(String, AnyRef)*)
= HtmlContent ~> ResponseString(
Jade4J.render("src/main/resources/templates/" + template,
Map(attributes :_*))
)
}
| cmc333333/news-learning | src/main/scala/web/Jade.scala | Scala | mit | 614 |
package scala.collection.scalameter.mutable.HashBag
import org.scalameter.api._
object HashBag_forall extends HashBagBenchmark {
def sizes = Gen.range("size")(20000, 200000, 20000)
def funName: String = "forall{result:=true}"
def fun(bag: Bag[BigInt]): Unit = bag.forall(_ => true)
def listFun(list: List[BigInt]): Unit = list.forall(_ => true)
runBenchmark()
}
| nicolasstucki/multisets | src/test/scala/scala/collection/scalameter/mutable/HashBag/HashBag_forall.scala | Scala | bsd-3-clause | 379 |
package test
import org.specs2.mutable.Specification
import org.specs2.specification.Scope
import solicitor.backend.HTTP
import solicitor.Client
class HTTPSpec extends Specification {
sequential
"HTTP Backend" should {
"handle 200" in new httpBin {
val res = client.getString("foo/bar")
res must beSome
res.get must contain("123")
}
"handle 404" in new httpBin {
client.getString("getXXX") must beNone
}
"handle no answer" in new httpNope {
client.getString("getXXX") must beNone
}
}
}
trait httpBin extends Scope {
val client = new Client(backend = new HTTP(
hosts = Seq(("localhost", 8000))
))
}
trait httpNope extends Scope {
val client = new Client(backend = new HTTP(
hosts = Seq(("thisdomainprobablywonteverexist.com", 80))
))
} | gphat/solicitor | http/src/test/scala/HTTPSpec.scala | Scala | mit | 822 |
import sbt._
class Plugins(info: ProjectInfo) extends PluginDefinition(info) {
val scriptedDep = "org.scala-tools.sbt" % "scripted" % "0.7.4"
val databinder_repo = Resolver.url("Databinder Repository", new java.net.URL("http://databinder.net/repo"))(Resolver.ivyStylePatterns)
}
| rubbish/cuke4duke-sbt-plugin | project/plugins/Plugins.scala | Scala | mit | 284 |
/*
* Copyright 2001-2008 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.tools
import org.scalatest._
import java.io.BufferedOutputStream
import java.io.File
import java.io.FileOutputStream
import java.io.IOException
import java.io.OutputStream
import java.io.OutputStreamWriter
import java.io.PrintWriter
import java.util.Iterator
import java.util.Set
import java.io.StringWriter
import org.scalatest.events._
import PrintReporter._
import org.scalatest.junit.JUnitTestFailedError
import StringReporter.colorizeLinesIndividually
/**
* A <code>Reporter</code> that prints test status information to
* a <code>Writer</code>, <code>OutputStream</code>, or file.
*
* @author Bill Venners
*/
private[scalatest] abstract class PrintReporter(pw: PrintWriter, presentAllDurations: Boolean,
presentInColor: Boolean, presentShortStackTraces: Boolean, presentFullStackTraces: Boolean,
presentUnformatted: Boolean) extends StringReporter(
presentAllDurations, presentInColor, presentShortStackTraces, presentFullStackTraces,
presentUnformatted) {
/**
* Construct a <code>PrintReporter</code> with passed
* <code>OutputStream</code>. Information about events reported to instances of this
* class will be written to the <code>OutputStream</code> using the
* default character encoding.
*
* @param os the <code>OutputStream</code> to which to print reported info
* @throws NullPointerException if passed <code>os</code> reference is <code>null</code>
*/
def this(
os: OutputStream,
presentAllDurations: Boolean,
presentInColor: Boolean,
presentShortStackTraces: Boolean,
presentFullStackTraces: Boolean,
presentUnformatted: Boolean
) =
this(
new PrintWriter(
new OutputStreamWriter(
new BufferedOutputStream(os, BufferSize)
)
),
presentAllDurations,
presentInColor,
presentShortStackTraces,
presentFullStackTraces,
presentUnformatted
)
/**
* Construct a <code>PrintReporter</code> with passed
* <code>String</code> file name. Information about events reported to instances of this
* class will be written to the specified file using the
* default character encoding.
*
* @param filename the <code>String</code> name of the file to which to print reported info
* @throws NullPointerException if passed <code>filename</code> reference is <code>null</code>
* @throws IOException if unable to open the specified file for writing
*/
def this(
filename: String,
presentAllDurations: Boolean,
presentInColor: Boolean,
presentShortStackTraces: Boolean,
presentFullStackTraces: Boolean,
presentUnformatted: Boolean
) =
this(
new PrintWriter(new BufferedOutputStream(new FileOutputStream(new File(filename)), BufferSize)),
presentAllDurations,
presentInColor,
presentShortStackTraces,
presentFullStackTraces,
presentUnformatted
)
protected def printPossiblyInColor(text: String, ansiColor: String) {
pw.println(if (presentInColor) colorizeLinesIndividually(text, ansiColor) else text)
}
override def apply(event: Event) {
super.apply(event)
pw.flush()
}
// Closes the print writer. Subclasses StandardOutReporter and StandardErrReporter override dispose to do nothing
// so that those aren't closed.
override def dispose() {
pw.close()
}
override protected def makeFinalReport(resourceName: String, duration: Option[Long], summaryOption: Option[Summary]) {
super.makeFinalReport(resourceName, duration, summaryOption)
pw.flush()
}
// We subtract one from test reports because we add "- " in front, so if one is actually zero, it will come here as -1
// private def indent(s: String, times: Int) = if (times <= 0) s else (" " * times) + s
// Stupid properties file won't let me put spaces at the beginning of a property
// " {0}" comes out as "{0}", so I can't do indenting in a localizable way. For now
// just indent two space to the left. // if (times <= 0) s
// else Resources("indentOnce", indent(s, times - 1))
}
private[tools] object PrintReporter {
final val BufferSize = 4096
final val ansiReset = "\\033[0m"
final val ansiGreen = "\\033[32m"
final val ansiCyan = "\\033[36m"
final val ansiYellow = "\\033[33m"
final val ansiRed = "\\033[31m"
def makeDurationString(duration: Long) = {
val milliseconds = duration % 1000
val seconds = ((duration - milliseconds) / 1000) % 60
val minutes = ((duration - milliseconds) / 60000) % 60
val hours = (duration - milliseconds) / 3600000
val hoursInSeconds = hours * 3600
val hoursInMinutes = hours * 60
val durationInSeconds = duration / 1000
val durationInMinutes = durationInSeconds / 60
if (duration == 1)
Resources("oneMillisecond")
else if (duration < 1000)
Resources("milliseconds", duration.toString)
else if (duration == 1000)
Resources("oneSecond")
else if (duration == 1001)
Resources("oneSecondOneMillisecond")
else if (duration % 1000 == 0 && duration < 60000) // 2 seconds, 10 seconds, etc.
Resources("seconds", seconds.toString)
else if (duration > 1001 && duration < 2000)// 1 second, 45 milliseconds, etc.
Resources("oneSecondMilliseconds", milliseconds.toString)
else if (durationInSeconds < 60)// 3 seconds, 45 milliseconds, etc.
Resources("secondsMilliseconds", seconds.toString, milliseconds.toString)
else if (durationInSeconds < 61)
Resources("oneMinute")
else if (durationInSeconds < 62)
Resources("oneMinuteOneSecond")
else if (durationInSeconds < 120)
Resources("oneMinuteSeconds", seconds.toString)
else if (durationInSeconds < 121)
Resources("minutes", minutes.toString) //
else if (durationInSeconds < 3600 && (durationInSeconds % 60) == 1)
Resources("minutesOneSecond", minutes.toString)
else if (durationInSeconds < 3600)
Resources("minutesSeconds", minutes.toString, seconds.toString)
else if (durationInSeconds < hoursInSeconds + 1) {
if (hours == 1)
Resources("oneHour")
else
Resources("hours", hours.toString)
}
else if (durationInSeconds < hoursInSeconds + 2) {
if (hours == 1)
Resources("oneHourOneSecond")
else
Resources("hoursOneSecond", hours.toString)
}
else if (durationInSeconds < hoursInSeconds + 60) {
if (hours == 1)
Resources("oneHourSeconds", seconds.toString)
else
Resources("hoursSeconds", hours.toString, seconds.toString)
}
else if (durationInSeconds == hoursInSeconds + 60) {
if (hours == 1)
Resources("oneHourOneMinute")
else
Resources("hoursOneMinute", hours.toString)
}
else if (durationInSeconds == hoursInSeconds + 61) {
if (hours == 1)
Resources("oneHourOneMinuteOneSecond")
else
Resources("hoursOneMinuteOneSecond", hours.toString)
}
else if (durationInSeconds < hoursInSeconds + 120) {
if (hours == 1)
Resources("oneHourOneMinuteSeconds", seconds.toString)
else
Resources("hoursOneMinuteSeconds", hours.toString, seconds.toString)
}
else if (durationInSeconds % 60 == 0) {
if (hours == 1)
Resources("oneHourMinutes", minutes.toString)
else
Resources("hoursMinutes", hours.toString, minutes.toString)
}
else if (durationInMinutes % 60 != 1 && durationInSeconds % 60 == 1) {
if (hours == 1)
Resources("oneHourMinutesOneSecond", minutes.toString)
else
Resources("hoursMinutesOneSecond", hours.toString, minutes.toString)
}
else {
if (hours == 1)
Resources("oneHourMinutesSeconds", minutes.toString, seconds.toString)
else
Resources("hoursMinutesSeconds", hours.toString, minutes.toString, seconds.toString)
}
}
}
| hubertp/scalatest | src/main/scala/org/scalatest/tools/PrintReporter.scala | Scala | apache-2.0 | 8,698 |
/*
* Copyright (c) 2021 Couchbase, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.couchbase.spark.query
import com.couchbase.client.core.service.ServiceType
import com.couchbase.client.scala.codec.JsonDeserializer
import com.couchbase.spark.config.{CouchbaseConfig, CouchbaseConnection}
import org.apache.spark.internal.Logging
import org.apache.spark.{Partition, SparkContext, TaskContext}
import org.apache.spark.rdd.RDD
import com.couchbase.client.scala.query.{QueryOptions => CouchbaseQueryOptions}
import com.couchbase.spark.{DefaultConstants, Keyspace}
import collection.JavaConverters._
import scala.reflect.ClassTag
class QueryPartition(id: Int, loc: Seq[String]) extends Partition {
override def index: Int = id
def location: Seq[String] = loc
override def toString = s"QueryPartition($id, $loc)"
}
class QueryRDD[T: ClassTag](
@transient private val sc: SparkContext,
val statement: String,
val queryOptions: CouchbaseQueryOptions = null,
val keyspace: Keyspace = null,
)(implicit deserializer: JsonDeserializer[T]) extends RDD[T](sc, Nil) with Logging {
private val globalConfig = CouchbaseConfig(sparkContext.getConf)
override def compute(split: Partition, context: TaskContext): Iterator[T] = {
val connection = CouchbaseConnection()
val cluster = connection.cluster(globalConfig)
var options = if (this.queryOptions == null) {
CouchbaseQueryOptions()
} else {
this.queryOptions
}
options = options.metrics(true)
val result = if (keyspace == null || keyspace.isEmpty) {
cluster.query(statement, options).get
} else {
if (keyspace.collection.isDefined) {
throw new IllegalArgumentException("A Collection must not be provided on a N1QL Query inside the Keyspace, " +
"only Bucket and/or Scope are allowed. The collection itself is provided as part of the statement itself!")
}
val bucketName = globalConfig.
implicitBucketNameOr(this.keyspace.bucket.orNull)
val scopeName = globalConfig
.implicitScopeNameOr(this.keyspace.scope.orNull).
getOrElse(DefaultConstants.DefaultScopeName)
cluster.bucket(bucketName).scope(scopeName).query(statement, options).get
}
if (result.metaData.metrics.isDefined) {
logDebug(s"Metrics for query $statement: " + result.metaData.metrics.get)
}
result.rowsAs[T].get.iterator
}
override protected def getPartitions: Array[Partition] = {
val core = CouchbaseConnection().cluster(globalConfig).async.core
val config = core.clusterConfig()
val partitions = if (config.globalConfig() != null) {
Array(new QueryPartition(0, config
.globalConfig()
.portInfos()
.asScala
.filter(p => p.ports().containsKey(ServiceType.QUERY))
.map(p => {
val aa = core.context().alternateAddress()
if (aa != null && aa.isPresent) {
p.alternateAddresses().get(aa.get()).hostname()
} else {
p.hostname()
}
})))
} else {
Array(new QueryPartition(0, Seq())
)
}
logDebug(s"Calculated QueryPartitions operation ${partitions.mkString("Array(", ", ", ")")}")
partitions.asInstanceOf[Array[Partition]]
}
override protected def getPreferredLocations(split: Partition): Seq[String] = {
split.asInstanceOf[QueryPartition].location
}
} | couchbaselabs/couchbase-spark-connector | src/main/scala/com/couchbase/spark/query/QueryRDD.scala | Scala | apache-2.0 | 3,923 |
package shred.man.cuda.cumath.tensor.matrix
import jcuda.jcublas.JCublas2
import shred.man.cuda.cumath.CuValue
import shred.man.cuda.cumath.tensor.vector.CuVector
import shred.man.cuda.driver.CuContext
import shred.man.cuda.utils.cuBlas._
//trait CublasMatrix
//{
// def copy(x: CuMatrix, res: CuMatrix)(implicit ctx: CuContext)
//
// def gemm(a: CuValue[Float], x: CuMatrix, y: CuMatrix, b: CuValue[Float], res: CuMatrix)(implicit ctx: CuContext)
//
// def gemv(a: CuValue[Float], x: CuMatrix, y: CuVector, b: CuValue[Float], res: CuVector)(implicit ctx: CuContext)
//
// def geam(a: CuValue[Float], x: CuMatrix, b: CuValue[Float], y: CuMatrix, res: CuMatrix)(implicit ctx: CuContext)
//
// def scal(c: CuValue[Float], x: CuMatrix)(implicit ctx: CuContext)
//
// def asum(x: CuMatrix, res: CuValue[Float])(implicit ctx: CuContext)
//
// def nrm2(x: CuMatrix, res: CuValue[Float])(implicit ctx: CuContext)
//}
object CublasMatrix //extends CublasMatrix
{
/**
* res = x
*/
def copy(x: CuMatrix, res: CuMatrix)(implicit ctx: CuContext) = {
ctx.requireComply(ctx, x.ctx, res.ctx)
require(x.cols == res.cols, "x.cols != res.cols")
require(x.rows == res.rows, "x.rows != res.rows")
JCublas2.cublasScopy(ctx.cublas.handle, x.size, x.ptr(), 1, res.ptr(), 1)
}
/**
* res = a * x * y + b * res
*/
def gemm(a: CuValue[Float], x: CuMatrix, y: CuMatrix, b: CuValue[Float], res: CuMatrix)(implicit ctx: CuContext) = {
ctx.requireComply(a.ctx, x.ctx, y.ctx, b.ctx, res.ctx)
require(x.cols == y.rows, "x.cols != y.rows")
require(x.rows == res.rows, "x.rows != res.rows")
require(y.cols == res.cols, "y.cols != res.cols")
JCublas2.cublasSgemm(
ctx.cublas.handle,
op(x.transposed), op(y.transposed),
x.rows, y.cols, x.cols,
a.ptr(), x.ptr(), x.origRows,
y.ptr(), y.origRows,
b.ptr(), res.ptr(), res.origRows
)
}
/**
* res = a * x * y + b * res
*/
def gemv(a: CuValue[Float], x: CuMatrix, y: CuVector, b: CuValue[Float], res: CuVector)(implicit ctx: CuContext) = {
ctx.requireComply(a.ctx, x.ctx, y.ctx, b.ctx, res.ctx)
require(x.cols == y.size, "x.cols != y.size")
require(x.rows == res.size, "x.rows != res.size")
JCublas2.cublasSgemv(
ctx.cublas.handle,
op(x.transposed),
x.origRows, x.origCols,
a.ptr(), x.ptr(), x.origRows,
y.ptr(), 1,
b.ptr(), res.ptr(), 1
)
}
/**
* res = a * x + b * y
*/
def geam(a: CuValue[Float], x: CuMatrix, b: CuValue[Float], y: CuMatrix, res: CuMatrix)(implicit ctx: CuContext) {
ctx.requireComply(a.ctx, x.ctx, b.ctx, y.ctx, res.ctx)
require(x.rows == y.rows, "x.rows != y.rows")
require(x.cols == y.cols, "x.cols != y.cols")
require(x.rows == res.rows, "x.rows != res.rows")
require(x.cols == res.cols, "x.cols != res.cols")
JCublas2.cublasSgeam(
ctx.cublas.handle,
op(x.transposed), op(y.transposed),
x.rows, x.cols,
a.ptr(), x.ptr(), x.origRows,
b.ptr(), y.ptr(), y.origRows,
res.ptr(), res.origRows
)
}
/**
* res = c * x
*/
def scal(c: CuValue[Float], x: CuMatrix)(implicit ctx: CuContext) {
ctx.requireComply(x.ctx, c.ctx)
JCublas2.cublasSscal(ctx.cublas.handle, x.size, c.ptr(), x.ptr(), 1)
}
/**
* res = absolute sum of x elements
*/
def asum(x: CuMatrix, res: CuValue[Float])(implicit ctx: CuContext) {
ctx.requireComply(x.ctx, res.ctx)
JCublas2.cublasSasum(ctx.cublas.handle, x.size, x.ptr(), 1, res.ptr())
}
/**
* res = euclidean norm of x elements
*/
def nrm2(x: CuMatrix, res: CuValue[Float])(implicit ctx: CuContext) {
ctx.requireComply(x.ctx, res.ctx)
JCublas2.cublasSnrm2(ctx.cublas.handle, x.size, x.ptr(), 1, res.ptr())
}
} | shredzzz/shredman | cuda/src/main/scala/shred/man/cuda/cumath/tensor/matrix/CublasMatrix.scala | Scala | apache-2.0 | 3,777 |
package notebook.util
import play.api.libs.json._
/**
* Pluggable interface for completing Strings.
*/
case class Match(matchedValue: String, metadata: Map[String, String]) {
def toJson = JsString(matchedValue)
def toJsonWithDescription = {
JsObject(
Seq(
("value", JsString(matchedValue)),
("display_text", JsString(metadata.getOrElse("display_text", matchedValue)))
))
}
}
object Match {
def apply(matchedValue: String): Match = Match(matchedValue, Map())
}
trait StringCompletor {
def complete(stringToComplete: String): (String, Seq[Match])
}
| radek1st/spark-notebook | modules/common/src/main/scala/notebook/util/StringCompletor.scala | Scala | apache-2.0 | 597 |
/*
* Copyright 2020 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.redis
import com.spotify.scio.redis.types._
import redis.clients.jedis.{Pipeline, Response}
import java.lang.{Long => JLong}
sealed abstract class RedisMutator[T] extends Serializable {
def mutate(client: Pipeline, mutation: T): List[Response[_]]
}
object RedisMutator {
implicit val stringAppend: RedisMutator[Append[String]] =
new RedisMutator[Append[String]] {
override def mutate(client: Pipeline, mutation: Append[String]): List[Response[JLong]] = {
client.append(mutation.key, mutation.value) ::
mutation.ttl
.map(expireTime => client.pexpire(mutation.key, expireTime.getMillis))
.toList
}
}
implicit val byteArrayAppend: RedisMutator[Append[Array[Byte]]] =
new RedisMutator[Append[Array[Byte]]] {
override def mutate(client: Pipeline, mutation: Append[Array[Byte]]): List[Response[JLong]] =
client.append(mutation.key, mutation.value) ::
mutation.ttl
.map(expireTime => client.pexpire(mutation.key, expireTime.getMillis))
.toList
}
implicit val stringSet: RedisMutator[Set[String]] =
new RedisMutator[Set[String]] {
override def mutate(client: Pipeline, mutation: Set[String]): List[Response[_]] =
client.set(mutation.key, mutation.value) ::
mutation.ttl.map(expireTime => client.pexpire(mutation.key, expireTime.getMillis)).toList
}
implicit val byteArraySet: RedisMutator[Set[Array[Byte]]] =
new RedisMutator[Set[Array[Byte]]] {
override def mutate(client: Pipeline, mutation: Set[Array[Byte]]): List[Response[_]] =
client.set(mutation.key, mutation.value) ::
mutation.ttl.map(expireTime => client.pexpire(mutation.key, expireTime.getMillis)).toList
}
implicit val stringIncrBy: RedisMutator[IncrBy[String]] =
new RedisMutator[IncrBy[String]] {
override def mutate(client: Pipeline, mutation: IncrBy[String]): List[Response[JLong]] =
client.incrBy(mutation.key, mutation.value) ::
mutation.ttl.map(expireTime => client.pexpire(mutation.key, expireTime.getMillis)).toList
}
implicit val byteArrayIncrBy: RedisMutator[IncrBy[Array[Byte]]] =
new RedisMutator[IncrBy[Array[Byte]]] {
override def mutate(client: Pipeline, mutation: IncrBy[Array[Byte]]): List[Response[JLong]] =
client.incrBy(mutation.key, mutation.value) ::
mutation.ttl.map(expireTime => client.pexpire(mutation.key, expireTime.getMillis)).toList
}
implicit val stringDecrBy: RedisMutator[DecrBy[String]] =
new RedisMutator[DecrBy[String]] {
override def mutate(client: Pipeline, mutation: DecrBy[String]): List[Response[JLong]] =
client.decrBy(mutation.key, mutation.value) ::
mutation.ttl.map(expireTime => client.pexpire(mutation.key, expireTime.getMillis)).toList
}
implicit val byteArrayDecrBy: RedisMutator[DecrBy[Array[Byte]]] =
new RedisMutator[DecrBy[Array[Byte]]] {
override def mutate(client: Pipeline, mutation: DecrBy[Array[Byte]]): List[Response[JLong]] =
client.decrBy(mutation.key, mutation.value) ::
mutation.ttl.map(expireTime => client.pexpire(mutation.key, expireTime.getMillis)).toList
}
implicit val stringSAdd: RedisMutator[SAdd[String]] =
new RedisMutator[SAdd[String]] {
override def mutate(client: Pipeline, mutation: SAdd[String]): List[Response[JLong]] =
client.sadd(mutation.key, mutation.value: _*) ::
mutation.ttl.map(expireTime => client.pexpire(mutation.key, expireTime.getMillis)).toList
}
implicit val byteArraySAdd: RedisMutator[SAdd[Array[Byte]]] =
new RedisMutator[SAdd[Array[Byte]]] {
override def mutate(client: Pipeline, mutation: SAdd[Array[Byte]]): List[Response[JLong]] =
client.sadd(mutation.key, mutation.value: _*) ::
mutation.ttl.map(expireTime => client.pexpire(mutation.key, expireTime.getMillis)).toList
}
implicit val stringLPush: RedisMutator[LPush[String]] =
new RedisMutator[LPush[String]] {
override def mutate(client: Pipeline, mutation: LPush[String]): List[Response[JLong]] =
client.lpush(mutation.key, mutation.value: _*) ::
mutation.ttl.map(expireTime => client.pexpire(mutation.key, expireTime.getMillis)).toList
}
implicit val byteArrayLPush: RedisMutator[LPush[Array[Byte]]] =
new RedisMutator[LPush[Array[Byte]]] {
override def mutate(client: Pipeline, mutation: LPush[Array[Byte]]): List[Response[JLong]] =
client.lpush(mutation.key, mutation.value: _*) ::
mutation.ttl.map(expireTime => client.pexpire(mutation.key, expireTime.getMillis)).toList
}
implicit val stringRPush: RedisMutator[RPush[String]] =
new RedisMutator[RPush[String]] {
override def mutate(client: Pipeline, mutation: RPush[String]): List[Response[JLong]] =
client.rpush(mutation.key, mutation.value: _*) ::
mutation.ttl.map(expireTime => client.pexpire(mutation.key, expireTime.getMillis)).toList
}
implicit val byteArrayRPush: RedisMutator[RPush[Array[Byte]]] =
new RedisMutator[RPush[Array[Byte]]] {
override def mutate(client: Pipeline, mutation: RPush[Array[Byte]]): List[Response[JLong]] =
client.rpush(mutation.key, mutation.value: _*) ::
mutation.ttl.map(expireTime => client.pexpire(mutation.key, expireTime.getMillis)).toList
}
implicit val stringPFAdd: RedisMutator[PFAdd[String]] =
new RedisMutator[PFAdd[String]] {
override def mutate(client: Pipeline, mutation: PFAdd[String]): List[Response[JLong]] =
client.pfadd(mutation.key, mutation.value: _*) ::
mutation.ttl.map(expireTime => client.pexpire(mutation.key, expireTime.getMillis)).toList
}
implicit val byteArrayPFAdd: RedisMutator[PFAdd[Array[Byte]]] =
new RedisMutator[PFAdd[Array[Byte]]] {
override def mutate(client: Pipeline, mutation: PFAdd[Array[Byte]]): List[Response[JLong]] =
client.pfadd(mutation.key, mutation.value: _*) ::
mutation.ttl.map(expireTime => client.pexpire(mutation.key, expireTime.getMillis)).toList
}
implicit def redisMutator[T <: RedisMutation]: RedisMutator[T] =
new RedisMutator[T] {
override def mutate(client: Pipeline, mutation: T): List[Response[_]] = {
mutation match {
case RedisMutation(mt: Append[Array[Byte] @unchecked], RedisType.ByteArrayRedisType) =>
RedisMutator.mutate(client)(mt)
case RedisMutation(mt: Append[String @unchecked], RedisType.StringRedisType) =>
RedisMutator.mutate(client)(mt)
case RedisMutation(mt: Set[Array[Byte] @unchecked], RedisType.ByteArrayRedisType) =>
RedisMutator.mutate(client)(mt)
case RedisMutation(mt: Set[String @unchecked], RedisType.StringRedisType) =>
RedisMutator.mutate(client)(mt)
case RedisMutation(mt: IncrBy[Array[Byte] @unchecked], RedisType.ByteArrayRedisType) =>
RedisMutator.mutate(client)(mt)
case RedisMutation(mt: IncrBy[String @unchecked], RedisType.StringRedisType) =>
RedisMutator.mutate(client)(mt)
case RedisMutation(mt: DecrBy[Array[Byte] @unchecked], RedisType.ByteArrayRedisType) =>
RedisMutator.mutate(client)(mt)
case RedisMutation(mt: DecrBy[String @unchecked], RedisType.StringRedisType) =>
RedisMutator.mutate(client)(mt)
case RedisMutation(mt: SAdd[Array[Byte] @unchecked], RedisType.ByteArrayRedisType) =>
RedisMutator.mutate(client)(mt)
case RedisMutation(mt: SAdd[String @unchecked], RedisType.StringRedisType) =>
RedisMutator.mutate(client)(mt)
case RedisMutation(mt: RPush[Array[Byte] @unchecked], RedisType.ByteArrayRedisType) =>
RedisMutator.mutate(client)(mt)
case RedisMutation(mt: RPush[String @unchecked], RedisType.StringRedisType) =>
RedisMutator.mutate(client)(mt)
case RedisMutation(mt: LPush[Array[Byte] @unchecked], RedisType.ByteArrayRedisType) =>
RedisMutator.mutate(client)(mt)
case RedisMutation(mt: LPush[String @unchecked], RedisType.StringRedisType) =>
RedisMutator.mutate(client)(mt)
case RedisMutation(mt: PFAdd[Array[Byte] @unchecked], RedisType.ByteArrayRedisType) =>
RedisMutator.mutate(client)(mt)
case RedisMutation(mt: PFAdd[String @unchecked], RedisType.StringRedisType) =>
RedisMutator.mutate(client)(mt)
}
}
}
def mutate[T <: RedisMutation: RedisMutator](client: Pipeline)(value: T): List[Response[_]] =
implicitly[RedisMutator[T]].mutate(client, value)
}
| regadas/scio | scio-redis/src/main/scala/com/spotify/scio/redis/RedisMutator.scala | Scala | apache-2.0 | 9,317 |
package nest.sparkle.util
import java.io.Closeable
/** A simple managed resource. Use it safely like this:
* for {
* resource <- GetResource()
* } {
* useResource(resource)
* }
*
* The resource will be close()d after it's used.
*/
object Managed { // LATER which SCALA ARM library to use?
object implicits {
def managed[T <: Closeable](resource: T): Resource[T] = {
new Resource(resource)
}
}
class Resource[T <: Closeable](resource: T) {
def foreach(fn: T => Unit): Unit = {
try {
fn(resource)
} finally {
resource.close()
}
}
}
}
| mighdoll/sparkle | util/src/main/scala/nest/sparkle/util/Managed.scala | Scala | apache-2.0 | 621 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.tensor
/**
* This package is used to provide concrete implementations of the conversions
* between numeric primitives. The idea here is that the Numeric trait can
* extend these traits to inherit the conversions.
*
* We can also use these implementations to provide a way to convert from
* A -> B, where both A and B are generic Numeric types. Without a separate
* trait, we'd have circular type definitions when compiling Numeric.
*/
import scala.language.implicitConversions
import scala.{specialized => spec}
/**
* Conversions to type.
*
* An object implementing ConvertableTo[A] provides methods to go
* from number types to A.
*/
trait ConvertableTo[@spec A] {
implicit def fromFloat(a: Float): A
implicit def fromDouble(a: Double): A
implicit def fromInt(a: Int): A
}
trait ConvertableToFloat extends ConvertableTo[Float] {
implicit def fromFloat(a: Float): Float = a
implicit def fromDouble(a: Double): Float = a.toFloat
implicit def fromInt(a: Int): Float = a.toFloat
}
trait ConvertableToDouble extends ConvertableTo[Double] {
implicit def fromFloat(a: Float): Double = a.toDouble
implicit def fromDouble(a: Double): Double = a
implicit def fromInt(a: Int): Double = a.toDouble
}
trait ConvertableToInt extends ConvertableTo[Int] {
implicit def fromFloat(a: Float): Int = a.toInt
implicit def fromDouble(a: Double): Int = a.toInt
implicit def fromInt(a: Int): Int = a
}
object ConvertableTo {
implicit object ConvertableToFloat extends ConvertableToFloat
implicit object ConvertableToDouble extends ConvertableToDouble
implicit object ConvertableToInt extends ConvertableToInt
}
/**
* Conversions from type.
*
* An object implementing ConvertableFrom[A] provides methods to go
* from A to number types (and String).
*/
trait ConvertableFrom[@spec A] {
implicit def toFloat(a: A): Float
implicit def toDouble(a: A): Double
implicit def toInt(a: A): Int
}
trait ConvertableFromFloat extends ConvertableFrom[Float] {
implicit def toFloat(a: Float): Float = a
implicit def toDouble(a: Float): Double = a.toDouble
implicit def toInt(a: Float): Int = a.toInt
}
trait ConvertableFromDouble extends ConvertableFrom[Double] {
implicit def toFloat(a: Double): Float = a.toFloat
implicit def toDouble(a: Double): Double = a
implicit def toInt(a: Double): Int = a.toInt
}
trait ConvertableFromInt extends ConvertableFrom[Int] {
implicit def toFloat(a: Int): Float = a.toFloat
implicit def toDouble(a: Int): Double = a.toDouble
implicit def toInt(a: Int): Int = a
}
object ConvertableFrom {
implicit object ConvertableFromFloat extends ConvertableFromFloat
implicit object ConvertableFromDouble extends ConvertableFromDouble
implicit object ConvertableFromInt extends ConvertableFromInt
}
| psyyz10/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/tensor/Convertable.scala | Scala | apache-2.0 | 3,438 |
package io.circe.generic
import cats.data.Xor
import io.circe.{ Decoder, Encoder, Json }
import io.circe.generic.semiauto._
import io.circe.test.{ CodecTests, CirceSuite }
import org.scalacheck.Prop.forAll
import shapeless.CNil
class SemiautoCodecTests extends CirceSuite with Examples {
implicit def decodeQux[A: Decoder]: Decoder[Qux[A]] = deriveFor[Qux[A]].decoder
implicit def encodeQux[A: Encoder]: Encoder[Qux[A]] = deriveFor[Qux[A]].encoder
implicit val decodeWub: Decoder[Wub] = deriveFor[Wub].decoder
implicit val encodeWub: Encoder[Wub] = deriveFor[Wub].encoder
implicit val decodeFoo: Decoder[Foo] = deriveFor[Foo].decoder
implicit val encodeFoo: Encoder[Foo] = deriveFor[Foo].encoder
checkAll("Codec[Tuple1[Int]]", CodecTests[Tuple1[Int]].codec)
checkAll("Codec[(Int, Int, Foo)]", CodecTests[(Int, Int, Foo)].codec)
checkAll("Codec[Qux[Int]]", CodecTests[Qux[Int]].codec)
checkAll("Codec[Foo]", CodecTests[Foo].codec)
test("Generic instances should not interfere with base instances") {
check {
forAll { (is: List[Int]) =>
val json = Encoder[List[Int]].apply(is)
json === Json.fromValues(is.map(Json.int)) && json.as[List[Int]] === Xor.right(is)
}
}
}
}
| groz/circe | generic/shared/src/test/scala/io/circe/generic/SemiautoCodecTests.scala | Scala | apache-2.0 | 1,231 |
package glasskey.resource
import glasskey.config.OAuthConfig
import glasskey.model.fetchers.Cookie
import glasskey.model.{ValidatedToken, OAuthAccessToken}
trait ProtectedResource {
import glasskey.model.fetchers.{AuthHeader, RequestParameter}
import glasskey.model.{InvalidRequest, InvalidToken, ValidatedData, ProtectedResourceRequest}
import scala.concurrent.{ExecutionContext, Future}
val fetchers = Seq(new AuthHeader.Default,
new RequestParameter.Default(OAuthConfig.providerConfig.jwksUri),
new Cookie.Default(OAuthConfig.providerConfig.authCookieName))
def handleRequest[R](request: R, handler: ProtectedResourceHandler[ValidatedData, ValidatedToken])(implicit ec: ExecutionContext): Future[ValidatedData] =
request match {
case r: ProtectedResourceRequest => handleProtectedResourceRequest(r, handler)
case t: (OAuthAccessToken, Option[OIDCTokenData]) => handleToken(t, handler)
}
private def handleToken(token: (OAuthAccessToken, Option[OIDCTokenData]),
handler: ProtectedResourceHandler[ValidatedData, ValidatedToken])(implicit ec: ExecutionContext): Future[ValidatedData] =
handler.validateToken(token).flatMap { maybeToken =>
handler
.findValidatedData(maybeToken)
.map(_.getOrElse(throw new InvalidToken("The access token is invalid")))
}
private def handleProtectedResourceRequest(request: ProtectedResourceRequest,
handler: ProtectedResourceHandler[ValidatedData, ValidatedToken])(implicit ec: ExecutionContext): Future[ValidatedData] =
fetchers
.find(fetcher => fetcher.matches(request))
.map(fetcher => handleToken(fetcher.fetch(request), handler))
.getOrElse(throw new InvalidRequest("Access token is not found"))
}
object ProtectedResource {
def apply: ProtectedResource = new ProtectedResource {}
}
| MonsantoCo/glass-key | glass-key-common/src/main/scala/glasskey/resource/ProtectedResource.scala | Scala | bsd-3-clause | 1,896 |
package au.com.dius.pact.model
import au.com.dius.pact.model.Fixtures._
import org.junit.runner.RunWith
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class MatchingSpec extends Specification {
"Matching" should {
import au.com.dius.pact.model.Matching._
implicit val autoParse = JsonDiff.autoParse _
"Body Matching" should {
val config = DiffConfig()
"Handle both None" in {
matchBody(Request("", "", None, Some(Map("Content-Type" -> "a")), None, None),
Request("", "", None, Some(Map("Content-Type" -> "a")), None, None), config) must beEmpty
}
"Handle left None" in {
val expected = List(BodyMismatch(request.body, None))
matchBody(Request("", "", None, Some(Map("Content-Type" -> "a")), request.body, None),
Request("", "", None, Some(Map("Content-Type" -> "a")), None, None), config) must beEqualTo(expected)
}
"Handle right None" in {
matchBody(Request("", "", None, Some(Map("Content-Type" -> "a")), None, None),
Request("", "", None, Some(Map("Content-Type" -> "a")), request.body, None), config) must beEmpty
}
"Handle different mime types" in {
val expected = List(BodyTypeMismatch("a", "b"))
matchBody(Request("", "", None, Some(Map("Content-Type" -> "a")), request.body, None),
Request("", "", None, Some(Map("Content-Type" -> "b")), request.body, None), config) must beEqualTo(expected)
}
"match different mimetypes by regexp" in {
matchBody(Request("", "", None, Some(Map("Content-Type" -> "application/x+json")), Some("{ \\"name\\": \\"bob\\" }"), None),
Request("", "", None, Some(Map("Content-Type" -> "application/x+json")), Some("{\\"name\\":\\"bob\\"}"), None), config) must beEmpty
}
}
"Method Matching" should {
"match same" in {
matchMethod("a", "a") must beNone
}
"match ignore case" in {
matchMethod("a", "A") must beNone
}
"mismatch different" in {
matchMethod("a", "b") must beSome(MethodMismatch("a", "b"))
}
}
"Query Matching" should {
"match same" in {
matchQuery(Some("a=b"), Some("a=b")) must beNone
}
"match none" in {
matchQuery(None, None) must beNone
}
"mismatch none to something" in {
matchQuery(None, Some("a=b")) must beSome(QueryMismatch("", "a=b"))
}
"mismatch something to none" in {
matchQuery(Some("a=b"), None) must beSome(QueryMismatch("a=b", ""))
}
"match keys in different order" in {
matchQuery(Some("status=RESPONSE_RECEIVED&insurerCode=ABC"), Some("insurerCode=ABC&status=RESPONSE_RECEIVED")) must beNone
}
"mismatch if the same key is repeated with values in different order" in {
matchQuery(Some("a=1&a=2&b=3"), Some("a=2&a=1&b=3")) must beSome(QueryMismatch("a=1&a=2&b=3", "a=2&a=1&b=3"))
}
}
"Header Matching" should {
"match empty" in {
matchHeaders(Request("", "", None, None, None, None),
Request("", "", None, None, None, None)) must beEmpty
}
"match same headers" in {
matchHeaders(Request("", "", None, Some(Map("A" -> "B")), None, None),
Request("", "", None, Some(Map("A" -> "B")), None, None)) must beEmpty
}
"ignore additional headers" in {
matchHeaders(Request("", "", None, Some(Map("A" -> "B")), None, None),
Request("", "", None, Some(Map("A" -> "B", "C" -> "D")), None, None)) must beEmpty
}
"complain about missing headers" in {
matchHeaders(Request("", "", None, Some(Map("A" -> "B", "C" -> "D")), None, None),
Request("", "", None, Some(Map("A" -> "B")), None, None)) must beEqualTo(List(
HeaderMismatch("C", "D", "", Some("Expected a header 'C' but was missing"))))
}
"complain about incorrect headers" in {
matchHeaders(Request("", "", None, Some(Map("A" -> "B")), None, None),
Request("", "", None, Some(Map("A" -> "C")), None, None)) must beEqualTo(List(
HeaderMismatch("A", "B", "C", Some("Expected header 'A' to have value 'B' but was 'C'"))))
}
}
}
}
| sangohan/pact-jvm | pact-jvm-matchers/src/test/scala/au/com/dius/pact/model/MatchingSpec.scala | Scala | apache-2.0 | 4,281 |
package org.flowpaint.pixelprocessors
import org.flowpaint.pixelprocessor.SingleFunctionPixelProcessor
/**
*
*
* @author Hans Haggstrom
*/
class OneOver extends SingleFunctionPixelProcessor( "1f / " ) | zzorn/flowpaint | src/main/scala/org/flowpaint/pixelprocessors/OneOver.scala | Scala | gpl-2.0 | 207 |
package net.scalytica.symbiotic.core.http
import org.scalajs.dom
import scala.scalajs.js.{Date, URIUtils}
object Cookies {
def set(cookieName: String, args: Map[String, String]): Unit = {
val expiresAt = new Date(year = 9999, month = 12).toUTCString()
val argStr = args.toList.map(kv => s"${kv._1}=${kv._2}").mkString("&")
val cookieValue = s"$argStr; expires=$expiresAt; path=/"
dom.document.cookie = s"$cookieName=${URIUtils.encodeURI(cookieValue)}"
}
def remove(cookieName: String): Unit = {
val expiresAt = new Date(Date.now()).toUTCString()
dom.document.cookie = s"$cookieName= ; expires=$expiresAt; path=/"
}
def get(cookieName: String): Option[String] =
Option(dom.document.cookie)
.flatMap(_.split(';').find(_.startsWith(cookieName)))
def toMap(cookieName: String): Map[String, String] =
Cookies
.get(cookieName)
.map { mc =>
mc.stripPrefix(s"$cookieName=")
.split("&")
.toSeq
.map { e =>
val kvp = e.split("=")
// There should only ever be a key and a value
kvp.head -> kvp.last.stripPrefix("=")
}
.toMap
}
.getOrElse(Map.empty[String, String])
def valueOf(cookieName: String, key: String) = toMap(cookieName).get(key)
}
| kpmeen/symbiotic | examples/symbiotic-client/src/main/scala/net/scalytica/symbiotic/core/http/Cookies.scala | Scala | apache-2.0 | 1,316 |
package scalaz.contrib
package validator
import java.text.SimpleDateFormat
import java.util.UUID
import scalaz._
import Scalaz._
import org.specs2.mutable.Specification
class StringValidationSpec extends Specification {
import string._
val errorMessage = "Generic Error Message"
"match pattern" should {
val digitOnly = matchRegex("""^\\d*$""".r, errorMessage)
"succeed when the pattern is matched" in {
digitOnly("123456") must beNone
}
"fail when the pattern is not matched" in {
digitOnly("123a") must beSome(errorMessage)
}
}
"not blank validator" should {
val failNotBlank = notBlank(errorMessage)
"fail when the string is blank" in {
failNotBlank("") must beSome(errorMessage)
failNotBlank(" ") must beSome(errorMessage)
}
"succeed when the string is not blank" in {
List("1", " 1 ").foreach(s => failNotBlank(s) must beNone)
}
}
"luhn check" should {
val check = luhn(errorMessage)
"success when string checks out" in {
List(
"5105105105105100", "5454545454545454", "5555555555554444", "4222222222222", "4111111111111111",
"4012888888881881", "378282246310005", "371449635398431", "378734493671000", "38520000023237", "30569309025904",
"6011111111111117", "6011000990139424", "3530111333300000", "3566002020360505"
) foreach { num =>
check(num) must beNone
}
}
"fail for invalid strings" in {
List(
"4105105105105100", "5554545454545454", "5545555555554444", "4222322222222", "4111116111111111",
"4012888878881881", "378282246300005", "371449635398432", "378734493671030", "38520000023231", "30569309125904",
"6011111111114117", "6011000990132424", "3530111333303000", "3566002020260505"
) foreach { num =>
check(num) must beSome(errorMessage)
}
}
}
"strLength validation" should {
val maxThree = maxStrLength(3, errorMessage)
val minThree = minStrLength(3, errorMessage)
val eqThree = strLength(3, errorMessage)
"succeed when correct" in {
List("", "1", "12", "123") foreach { x => maxThree(x) must beNone }
List("123", "1234", "12345") foreach { x => minThree(x) must beNone }
eqThree("123") must beNone
}
"fail when invalid" in {
List("1234", "12345") foreach { x => maxThree(x) must beSome(errorMessage) }
List("", "1", "12") foreach { x => minThree(x) must beSome(errorMessage) }
List("", "1", "12", "1234", "12345") foreach { num => eqThree(num) must beSome(errorMessage) }
}
}
}
// vim: expandtab:ts=2:sw=2
| non/scalaz-contrib | validation-ext/test/scala/validator/StringValidationSpec.scala | Scala | mit | 2,635 |
//: ----------------------------------------------------------------------------
//: Copyright (C) 2015 Verizon. All Rights Reserved.
//:
//: Licensed under the Apache License, Version 2.0 (the "License");
//: you may not use this file except in compliance with the License.
//: You may obtain a copy of the License at
//:
//: http://www.apache.org/licenses/LICENSE-2.0
//:
//: Unless required by applicable law or agreed to in writing, software
//: distributed under the License is distributed on an "AS IS" BASIS,
//: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//: See the License for the specific language governing permissions and
//: limitations under the License.
//:
//: ----------------------------------------------------------------------------
package knobs
import com.typesafe.config.{Config => TC, _}
import scala.collection.JavaConversions._
import scala.reflect.ClassTag
import scalaz.concurrent.Task
object Typesafe {
def config: Task[Config] = Task {
def convertList(list: ConfigList): CfgList = {
def unwrap[T](c: ConfigValue)(implicit ev: ClassTag[T]): T =
c.unwrapped match {
case t: T => t
case _ =>
sys.error(s"Can't convert $c to underlying type ${ev.runtimeClass.getName}")
}
val items: List[CfgValue] =
list.toList.flatMap { v =>
v.valueType match {
case ConfigValueType.NULL => None
case ConfigValueType.BOOLEAN =>
Some(CfgBool(unwrap[Boolean](v)))
case ConfigValueType.LIST =>
Some(convertList(unwrap[ConfigList](v)))
case ConfigValueType.NUMBER =>
Some(CfgNumber(unwrap[Number](v).doubleValue))
case ConfigValueType.STRING =>
Some(CfgText(unwrap[String](v)))
case x =>
sys.error(s"Can't convert $v to a CfgValue")
}
}
CfgList(items)
}
def convertTypesafeConfig(cfg: TC) = {
cfg.entrySet.foldLeft(Config.empty.env) {
case (m, entry) => {
val (k, v) = (entry.getKey, entry.getValue)
v.valueType match {
case ConfigValueType.OBJECT => m
case ConfigValueType.NULL => m
case _ =>
def convert(v: ConfigValue): CfgValue = v.valueType match {
case ConfigValueType.BOOLEAN =>
CfgBool(cfg.getBoolean(k))
case ConfigValueType.LIST =>
convertList(cfg.getList(k))
case ConfigValueType.NUMBER =>
CfgNumber(cfg.getNumber(k).doubleValue)
case ConfigValueType.STRING =>
CfgText(cfg.getString(k))
case x => sys.error(s"Can't convert $v to a CfgValue")
}
m + (k -> convert(v))
}
}
}
}
val cfg = ConfigFactory.load
Config(convertTypesafeConfig(cfg))
}
}
| runarorama/knobs | typesafe/src/main/scala/knobs/Typesafe.scala | Scala | apache-2.0 | 2,981 |
package demo
import javax.servlet.http.HttpServletRequest
import javax.ws.rs.core.Response
import javax.ws.rs.{GET, Path, DefaultValue, QueryParam}
import javax.ws.rs.core.{Response, Context}
// import core geotrellis types
import geotrellis._
// import some useful operations
import geotrellis.raster.op._
import geotrellis.statistics.op._
import geotrellis.rest.op._
// import syntax implicits like "raster + raster"
import geotrellis.Implicits._
object response {
def apply(mime:String)(data:Any) = Response.ok(data).`type`(mime).build()
}
/**
* Operation to perform the basic weighted overlay calculation.
*/
object WeightedOverlayBasic {
def apply(raster1:Op[Raster], weight1:Op[Int],
raster2:Op[Raster], weight2:Op[Int]) = {
val x:Op[Raster] = raster1 * weight1
val y:Op[Raster] = raster2 * weight2
val z:Op[Raster] = x + y
val weightSum:Op[Int] = weight1 + weight2
z / weightSum
}
}
object WeightedOverlayArray {
def apply(rasters:Op[Array[Raster]], weights:Op[Array[Int]]) = {
val rs:Op[Array[Raster]] = logic.ForEach(rasters, weights)(_ * _)
val weightSum:Op[Int] = logic.Do(weights)(_.sum)
local.AddArray(rs) / weightSum
}
}
object Demo {
val server = process.Server("demo", "src/test/resources/demo-catalog.json")
def errorPage(msg:String, traceback:String) = """
<html>
<p>%s</p>
<tt>%s</tt>
</html>
""" format (msg, traceback)
def infoPage(cols:Int, rows:Int, ms:Long, url:String, tree:String) = """
<html>
<head>
<script type="text/javascript">
</script>
</head>
<body>
<h2>raster time!</h2>
<h3>rendered %dx%d image (%d pixels) in %d ms</h3>
<table>
<tr>
<td style="vertical-align:top"><img style="vertical-align:top" src="%s" /></td>
<td><pre>%s</pre></td>
</tr>
</table>
</body>
</html>
""" format(cols, rows, cols * rows, ms, url, tree)
}
@Path("/demo1")
class DemoService1 {
final val defaultBox = "-8379782.57151,4846436.32082,-8360582.57151,4865636.32082"
final val defaultColors = "ff0000,ffff00,00ff00,0000ff"
@GET
def get(
@DefaultValue(defaultBox) @QueryParam("bbox") bbox:String,
@DefaultValue("256") @QueryParam("cols") cols:String,
@DefaultValue("256") @QueryParam("rows") rows:String,
@DefaultValue("SBN_inc_percap") @QueryParam("layers") layers:String,
@DefaultValue("1") @QueryParam("weights") weights:String,
@DefaultValue("") @QueryParam("mask") mask:String,
@DefaultValue(defaultColors) @QueryParam("palette") palette:String,
@DefaultValue("4") @QueryParam("colors") numColors:String,
@DefaultValue("info") @QueryParam("format") format:String,
@Context req:HttpServletRequest
) = {
// First let's figure out what geographical area we're interestd in, as
// well as the resolution we want to use.
val colsOp = string.ParseInt(cols)
val rowsOp = string.ParseInt(rows)
val extentOp = string.ParseExtent(bbox)
val reOp = extent.GetRasterExtent(extentOp, colsOp, rowsOp)
// Figure out which rasters and weights the user wants to use.
val layerOps = logic.ForEach(string.SplitOnComma(layers))(io.LoadRaster(_, reOp))
val weightOps = logic.ForEach(string.SplitOnComma(weights))(string.ParseInt(_))
// Do the actual weighted overlay operation
val overlayOp = WeightedOverlayArray(layerOps, weightOps)
// Cache and (optionally) mask the result.
val outputOp = if (mask.isEmpty) {
overlayOp
} else {
local.Mask(overlayOp, io.LoadRaster(mask, reOp), NODATA, NODATA)
}
// Build a histogram of the output raster values.
val histogramOp = stat.GetHistogram(outputOp)
// Parse the user's color palette and allocate colors.
val paletteColorsOp = logic.ForEach(string.SplitOnComma(palette))(s => string.ParseHexInt(s))
val numColorsOp = string.ParseInt(numColors)
val colorsOp = stat.GetColorsFromPalette(paletteColorsOp, numColorsOp)
// Determine some good quantile breaks to use for coloring output.
val breaksOp = stat.GetColorBreaks(histogramOp, colorsOp)
// Render the actual PNG image.
val pngOp = io.RenderPNG(outputOp, breaksOp, 0, true)
format match {
case "hello" => response("text/plain")("hello world")
case "info" => Demo.server.getResult(pngOp) match {
case process.Complete(img, h) => {
val ms = h.elapsedTime
val query = req.getQueryString + "&format=png"
val url = "/demo1?" + query + "&format=png"
println(url)
val html = Demo.infoPage(cols.toInt, rows.toInt, ms, url, h.toPretty)
response("text/html")(html)
}
case process.Error(msg, trace) => {
response("text/plain")("failed: %s\ntrace:\n%s".format(msg, trace))
}
}
case _ => Demo.server.getResult(pngOp) match {
case process.Complete(img, _) => response("image/png")(img)
case process.Error(msg, trace) => {
response("text/plain")("failed: %s\ntrace:\n%s".format(msg, trace))
}
}
}
}
}
| Tjoene/thesis | Case_Programs/geotrellis-0.7.0/demo/src/main/scala/demo/Demo.scala | Scala | gpl-2.0 | 5,024 |
package scalaprops
abstract class Choose[A] {
def withBoundaries(from: A, to: A): Gen[A]
def choose(from: A, to: A): Gen[A]
}
object Choose {
def apply[A](implicit A: Choose[A]): Choose[A] = A
implicit val intChoose: Choose[Int] =
new Choose[Int] {
override def withBoundaries(from: Int, to: Int) = {
if (from == to) {
Gen.value(from)
} else {
val min = math.min(from, to)
val max = math.max(from, to)
(max - min) match {
case 1 =>
Gen.elements(min, max)
case 2 =>
Gen.elements(min, min + 1, max)
case _ =>
Gen.frequency(
1 -> Gen.value(min),
1 -> Gen.value(min + 1),
1 -> Gen.value(max - 1),
1 -> Gen.value(max),
90 -> Gen.choose(from, to)
)
}
}
}
override def choose(from: Int, to: Int) =
Gen.choose(from, to)
}
implicit val byteChoose: Choose[Byte] =
new Choose[Byte] {
override def withBoundaries(from: Byte, to: Byte) =
Choose[Int].withBoundaries(from, to).map(Gen.Int2Byte)
override def choose(from: Byte, to: Byte) =
Choose[Int].choose(from, to).map(Gen.Int2Byte)
}
implicit val shortChoose: Choose[Short] =
new Choose[Short] {
override def withBoundaries(from: Short, to: Short) =
Choose[Int].withBoundaries(from, to).map(Gen.Int2Short)
override def choose(from: Short, to: Short) =
Choose[Int].choose(from, to).map(Gen.Int2Short)
}
implicit val longChoose: Choose[Long] =
new Choose[Long] {
override def withBoundaries(from: Long, to: Long) = {
if (from == to) {
Gen.value(from)
} else {
val min = math.min(from, to)
val max = math.max(from, to)
(max - min) match {
case 1 =>
Gen.elements(min, max)
case 2 =>
Gen.elements(min, min + 1L, max)
case _ =>
Gen.frequency(
1 -> Gen.value(min),
1 -> Gen.value(min + 1L),
1 -> Gen.value(max - 1L),
1 -> Gen.value(max),
90 -> Gen.chooseLong(from, to)
)
}
}
}
override def choose(from: Long, to: Long) =
Gen.chooseLong(from, to)
}
}
| scalaprops/scalaprops | gen/src/main/scala/scalaprops/Choose.scala | Scala | mit | 2,438 |
package com.github.xubo245.gcdss.disease
import java.text.SimpleDateFormat
import java.util.Date
import com.github.xubo245.gcdss.utils.Constants
import org.apache.spark.{SparkConf, SparkContext}
import org.bdgenomics.adam.rdd.ADAMContext
import org.bdgenomics.adam.rdd.ADAMContext._
/**
* Created by xubo on 2017/4/9.
*/
object CallDiseaseFromGenotype {
def main(args: Array[String]) {
val startTime = System.currentTimeMillis()
var genotypeFile = args(0)
val vcf2omimSimpleFile = args(1)
var out = args(2)
var appArgs = "genotypeFile:" + genotypeFile + "\\tout:" + out
val conf = new SparkConf().setAppName("CallDiseaseFromGenotype:" + appArgs)
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.set("spark.kryo.registrator", "org.bdgenomics.adam.serialization.ADAMKryoRegistrator")
.set("spark.kryo.referenceTracking", "true")
if (System.getProperties.getProperty("os.name").contains("Windows")) {
conf.setMaster("local[16]")
}
val sc = new SparkContext(conf)
val ac = new ADAMContext(sc)
Constants.debug = false
// var vcfFile = "file/callDisease/input/vcf/small.vcf"
// val vcfFile = "file\\\\callVariant\\\\output\\\\DiscoverVariant20170409211412241.adam"
// var output = "file/callDisease/output/vcf2omim/test1"
// val vcf2omimSimpleFile = "file/callDisease/input/vcf2omimAll.txt"
// val rdd = sc.loadVcf(vcfFile, sd)
// rdd.map(_.variant.variant).foreach(println)
val rdd = sc.loadGenotypes(genotypeFile)
// val vcfRDD = sc.loadGenotypes(vcfFile).toVariantContext.collect.sortBy(_.position)
val vcf2Omim = new CallDisease(rdd.toVariantContextRDD)
val returnRDD = vcf2Omim.runComplex(sc, vcf2omimSimpleFile)
val iString = new SimpleDateFormat("yyyyMMddHHmmssSSS").format(new Date())
val output1 = out + "/ComplexT" + iString
val saveRDD = returnRDD.map { each =>
val str1 = each._1.split(Array(',', '(', ')'))
val str = str1(1) + '|' + str1(2) + '|' + str1(3) + '|' + str1(4) + '|' + each._2 + '|' +
each._3 + '|' + each._4 + '|' + each._5 + '|' + each._6 + '|' + each._7 + '|' +
each._8 + '|' + each._9 + '|' + each._10 + '|' + each._11 + '|' + each._12 + '|' +
each._13 + '|' + each._14 + '|' + each._15
str
}
println("*************count:"+saveRDD.count())
// saveRDD.foreach(println)
saveRDD.repartition(1).saveAsTextFile(output1)
sc.stop
val stopTime = System.currentTimeMillis()
println(appArgs + "\\ttime:\\t" + (stopTime - startTime) / 1000.0 + "\\t")
}
}
| xubo245/GCDSS | src/main/scala/com/github/xubo245/gcdss/disease/CallDiseaseFromGenotype.scala | Scala | gpl-2.0 | 2,612 |
/*
* Copyright 2013-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.io
import java.io._
/** Collection of I/O utilities.
*
* @author Jens Halm
*/
object IO {
/** Calls the specified function, closes the IO resource if the resource
* mixes in `java.io.Closeable` and returns the result of the function call.
*
* @param resource the IO resource to manage
* @param f the function to invoke, with the managed resource getting passed into it
* @return the result of the function call
*/
def apply [R, T] (resource: R)(f: R => T): T = resource match {
case c: Closeable => try f(resource) finally c.close
case _ => f(resource)
}
/** Copies all bytes from the specified InputStream to the
* OutputStream. Rethrows all Exceptions and does not
* close the streams afterwards.
*/
def copy (input: InputStream, output: OutputStream): Unit = (input, output) match {
case (in: FileInputStream, out: FileOutputStream) =>
in.getChannel.transferTo(0, Integer.MAX_VALUE, out.getChannel);
case _ =>
val buffer = new Array[Byte](8192)
Iterator.continually(input.read(buffer))
.takeWhile(_ != -1)
.foreach { output.write(buffer, 0 , _) }
}
/** Copies all characters from the specified Reader to the
* Writer. Rethrows all Exceptions and does not
* close the Reader or Writer afterwards.
*/
def copy (input: Reader, output: Writer): Unit = {
val buffer = new Array[Char](8192)
Iterator.continually(input.read(buffer))
.takeWhile(_ != -1)
.foreach { output.write(buffer, 0 , _) }
}
/** Copies all bytes or characters (depending on Input type)
* from the specified Input to the
* Output. Rethrows all Exceptions and does not
* close the Input or Output afterwards.
*/
def copy (input: Input, output: Output): Unit = {
(input, output) match {
case (in: Input.Binary, out: Output.Binary) =>
val binaryIn = in.asBinaryInput
val binaryOut = out.asBinaryOutput
apply(binaryIn) { in => apply(binaryOut) { out => copy(in.asStream, out.asStream) } }
case _ =>
apply(input) { in => apply(output) { out => copy(in.asReader, out.asWriter) } }
}
}
} | amuramatsu/Laika | core/src/main/scala/laika/io/IO.scala | Scala | apache-2.0 | 2,818 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.utils.geotools
import java.util.{Date, UUID}
import org.junit.runner.RunWith
import org.locationtech.geomesa.utils.geotools.SftBuilder.Opts
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes.AttributeOptions._
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class SftBuilderTest extends Specification {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
sequential
"SpecBuilder" >> {
"build simple types" >> {
val spec = new SftBuilder().intType("i").longType("l").floatType("f").doubleType("d").stringType("s").getSpec
spec mustEqual "i:Integer,l:Long,f:Float,d:Double,s:String"
}
"handle date and uuid types" >> {
val spec = new SftBuilder().date("d").uuid("u").getSpec
spec mustEqual "d:Date,u:UUID"
}
"provide index when set to true" >> {
val spec = new SftBuilder()
.intType("i", index = true)
.longType("l", index = true)
.floatType("f", index = true)
.doubleType("d", index = true)
.stringType("s", index = true)
.date("dt", Opts(index = true))
.uuid("u", index = true)
.getSpec
val expected = "i:Integer,l:Long,f:Float,d:Double,s:String,dt:Date,u:UUID".split(",").map(_+":index=true").mkString(",")
spec mustEqual expected
}
// Example of fold...also can do more complex things like zipping to automatically build SFTs
"work with foldLeft" >> {
val spec = ('a' to 'z').foldLeft(new SftBuilder()) { case (builder, name) =>
builder.stringType(name.toString)
}
val expected = ('a' to 'z').map{ c => c.toString + ":" + "String" }.mkString(",")
spec.getSpec mustEqual expected
val sft = spec.build("foobar")
sft.getAttributeCount mustEqual 26
sft.getAttributeDescriptors.map(_.getLocalName).toList mustEqual ('a' to 'z').map(_.toString).toList
}
"set default dtg correctly" >> {
new SftBuilder()
.date("foobar", default = true)
.build("foobar").getDtgField must beSome("foobar")
new SftBuilder()
.date("foobar")
.withDefaultDtg("foobar")
.build("foobar").getDtgField must beSome("foobar")
new SftBuilder()
.date("foobar")
.date("dtg")
.withDefaultDtg("foobar")
.build("foobar").getDtgField must beSome("foobar")
new SftBuilder()
.date("dtg")
.date("foobar")
.withDefaultDtg("foobar")
.build("foobar").getDtgField must beSome("foobar")
new SftBuilder()
.date("dtg")
.date("foobar", default = true)
.build("foobar").getDtgField must beSome("foobar")
}
"build lists" >> {
val builder = new SftBuilder()
.listType[Int]("i")
.listType[Long]("l")
.listType[Float]("f")
.listType[Double]("d")
.listType[String]("s")
.listType[Date]("dt")
.listType[UUID]("u")
val expected =
List(
"i" -> "Int",
"l" -> "Long",
"f" -> "Float",
"d" -> "Double",
"s" -> "String",
"dt" -> "Date",
"u" -> "UUID"
).map { case (k,v) => s"$k:List[$v]" }.mkString(",")
builder.getSpec mustEqual expected
val sft = builder.build("foobar")
sft.getAttributeCount mustEqual 7
sft.getAttributeDescriptors.map(_.getType.getBinding).forall (_ must beAssignableFrom[java.util.List[_]])
}
"build lists with Java Types" >> {
val builder = new SftBuilder()
.listType[java.lang.Integer]("i")
.listType[java.lang.Long]("l")
.listType[java.lang.Float]("f")
.listType[java.lang.Double]("d")
.listType[java.lang.String]("s")
.listType[java.util.Date]("dt")
.listType[java.util.UUID]("u")
val expected =
List(
"i" -> "Integer", //for java use Integer instead of Int
"l" -> "Long",
"f" -> "Float",
"d" -> "Double",
"s" -> "String",
"dt" -> "Date",
"u" -> "UUID"
).map { case (k,v) => s"$k:List[$v]" }.mkString(",")
builder.getSpec mustEqual expected
val sft = builder.build("foobar")
sft.getAttributeCount mustEqual 7
sft.getAttributeDescriptors.map(_.getType.getBinding).forall (_ must beAssignableFrom[java.util.List[_]])
}
"build maps" >> {
val builder = new SftBuilder()
.mapType[Int,Int]("i")
.mapType[Long,Long]("l")
.mapType[Float,Float]("f")
.mapType[Double,Double]("d")
.mapType[String,String]("s")
.mapType[Date,Date]("dt")
.mapType[UUID,UUID]("u")
val expected =
List(
"i" -> "Int",
"l" -> "Long",
"f" -> "Float",
"d" -> "Double",
"s" -> "String",
"dt" -> "Date",
"u" -> "UUID"
).map { case (k,v) => s"$k:Map[$v,$v]" }.mkString(",")
builder.getSpec mustEqual expected
val sft = builder.build("foobar")
sft.getAttributeCount mustEqual 7
sft.getAttributeDescriptors.map(_.getType.getBinding).forall (_ must beAssignableFrom[java.util.Map[_,_]])
}
"build maps of diff types" >> {
val builder = new SftBuilder()
.mapType[Int,String]("a")
.mapType[Long,UUID]("b")
.mapType[Date,Float]("c")
builder.getSpec mustEqual "a:Map[Int,String],b:Map[Long,UUID],c:Map[Date,Float]"
val sft = builder.build("foobar")
sft.getAttributeCount mustEqual 3
sft.getAttributeDescriptors.map(_.getType.getBinding).forall (_ must beAssignableFrom[java.util.Map[_,_]])
}
"handle multiple geoms" >> {
val builder = new SftBuilder()
.geometry("geom")
.point("foobar", default = true)
.multiLineString("mls")
builder.getSpec mustEqual s"geom:Geometry:srid=4326,*foobar:Point:srid=4326:index=true:$OPT_INDEX_VALUE=true,mls:MultiLineString:srid=4326"
val sft = builder.build("foobar")
sft.getAttributeCount mustEqual 3
sft.getGeometryDescriptor.getLocalName mustEqual "foobar"
}
"handle Bytes type" >> {
val spec = new SftBuilder().stringType("a").bytes("b").getSpec
spec mustEqual "a:String,b:Bytes"
val lSpec = new SftBuilder().listType[Array[Byte]]("lst").getSpec
lSpec mustEqual "lst:List[Bytes]"
val mSpec = new SftBuilder().mapType[String,Array[Byte]]("m").getSpec
mSpec mustEqual "m:Map[String,Bytes]"
val m2Spec = new SftBuilder().mapType[Array[Byte],Array[Byte]]("m2").getSpec
m2Spec mustEqual "m2:Map[Bytes,Bytes]"
}
}
}
| nagavallia/geomesa | geomesa-utils/src/test/scala/org/locationtech/geomesa/utils/geotools/SftBuilderTest.scala | Scala | apache-2.0 | 7,272 |
package dk.bayes.math.discretise
/**
* This class represents histogram. http://en.wikipedia.org/wiki/Histogram
*
* @author korzekwad
*/
case class Histogram(startValue: Double, endValue: Double, binsNum: Int) {
private val interval = (endValue - startValue) / (binsNum - 1)
/**
* Returns values for all bins of the histogram.
*/
def toValues(): Seq[Double] = mapValues(v => v)
/**
* Maps all values for all histogram bins to a sequence of objects
*
* @param f Mapping function (Bin value) => Object in a sequence
*/
def mapValues[T](f: Double => T): Seq[T] = {
val values = for (i <- startValue to endValue by interval) yield f(i)
values
}
/**
* Returns the value for a given bin index. Bin index starts from 0.
*/
def valueOf(binIndex: Int): Double = {
require(binIndex >= 0 && binIndex < binsNum, "Bin index out of range")
startValue + binIndex * interval
}
/**
* Returns the bin index for a given value. Bin index starts from 0.
*/
def binIndexOf(value: Double): Int = {
require(value >= startValue && value <= endValue, "Value out of range")
var binIndex = 0
var continue = true
while (continue && binIndex < binsNum) {
if (valueOf(binIndex) >= value) continue = false
else binIndex+=1
}
binIndex
}
} | danielkorzekwa/bayes-scala | src/main/scala/dk/bayes/math/discretise/Histogram.scala | Scala | bsd-2-clause | 1,325 |
import scala.language.existentials
object Test {
def f() = { case class Bar(x: Int); Bar }
def g() = { case class Bar(x: Int); Bar(5) }
def h() = { case object Bar ; Bar }
val f1 = f()
val g1 = g()
val h1 = h()
def m[T: Manifest](x: T) = println(manifest[T])
def main(args: Array[String]): Unit = {
m(f)
m(g)
m(h)
m(f1)
m(g1)
m(h1)
}
}
class A1[T] {
class B1[U] {
def f = { case class D(x: Int) extends A1[String] ; new D(5) }
}
}
| dotty-staging/dotty | tests/pending/run/t1195-old.scala | Scala | apache-2.0 | 487 |
package org.usagram.clarify.error
import org.usagram.clarify.{ Indefinite, Tags }
import org.scalatest._
class OutOfRangeSpec extends FunSpec {
import Matchers._
describe("#message") {
val error = OutOfRange(10, 100, isInclusive = true)
describe("when Tags#label is Some(string)") {
val tags = Tags(Indefinite(Some("a label")))
it("returns a message include the label") {
error.message(tags) should be("a label is out of range [10, 100]")
}
}
describe("when Tags#label is None") {
val tags = Tags(Indefinite(None))
it("returns a message include the default label") {
error.message(tags) should be("(no label) is out of range [10, 100]")
}
}
describe("when isInclusive = false") {
val tags = Tags(Indefinite(None))
val error = OutOfRange(10, 100, isInclusive = false)
it("returns a message with [lower, upper)") {
error.message(tags) should be("(no label) is out of range [10, 100)")
}
}
}
}
| takkkun/clarify | core/src/test/scala/org/usagram/clarify/error/OutOfRangeSpec.scala | Scala | mit | 1,020 |
package scalaoauth2.provider
import java.net.URLDecoder
case class FetchResult(token: String, params: Map[String, String])
trait AccessTokenFetcher {
def matches(request: ProtectedResourceRequest): Boolean
def fetch(request: ProtectedResourceRequest): FetchResult
}
object RequestParameter extends AccessTokenFetcher {
override def matches(request :ProtectedResourceRequest): Boolean = {
request.oauthToken.isDefined || request.accessToken.isDefined
}
override def fetch(request: ProtectedResourceRequest): FetchResult = {
val t = request.oauthToken.getOrElse(request.requireAccessToken)
val params = request.params.filter { case (k, v) => !v.isEmpty } map { case (k, v) => (k, v.head) }
FetchResult(t, params - ("oauth_token", "access_token"))
}
}
object AuthHeader extends AccessTokenFetcher {
val REGEXP_AUTHORIZATION = """^\\s*(OAuth|Bearer)\\s+([^\\s\\,]*)""".r
val REGEXP_TRIM = """^\\s*,\\s*""".r
val REGEXP_DIV_COMMA = """,\\s*""".r
override def matches(request: ProtectedResourceRequest): Boolean = {
request.header("Authorization").exists { header =>
REGEXP_AUTHORIZATION.findFirstMatchIn(header).isDefined
}
}
override def fetch(request: ProtectedResourceRequest): FetchResult = {
val header = request.requireHeader("Authorization")
val matcher = REGEXP_AUTHORIZATION.findFirstMatchIn(header).getOrElse {
throw new InvalidRequest("parse() method was called when match() result was false.")
}
val token = matcher.group(2)
val end = matcher.end
val params = if (header.length != end) {
val trimmedHeader = REGEXP_TRIM.replaceFirstIn(header.substring(end), "")
val pairs = REGEXP_DIV_COMMA.split(trimmedHeader).map { exp =>
val (key, value) = exp.split("=", 2) match {
case Array(k, v) => (k, v.replaceFirst("^\\"", ""))
case Array(k) => (k, "")
}
(key, URLDecoder.decode(value.replaceFirst("\\"$", ""), "UTF-8"))
}
Map(pairs: _*)
} else {
Map.empty[String, String]
}
FetchResult(token, params)
}
}
| centraldesktop/scala-oauth2-provider | scala-oauth2-core/src/main/scala/scalaoauth2/provider/AccessTokenFetcher.scala | Scala | mit | 2,086 |
package org.apache.flink.contrib.tensorflow.models
import java.io.Serializable
/**
* Represents a TensorFlow model.
*
* A model is a self-contained, hermetic graph with associated assets
* and well-defined run methods.
*
* A model encapsulates state (a graph) and the means to persist it (checkpointing).
*
* @tparam Self the Model type.
*/
trait Model[Self] extends Serializable {
that: Self =>
}
/**
* A base interface for all rich user-defined models. This class defines methods for
* the life cycle of the models, as well as methods to access the context in which the models
* are executed.
*/
trait RichModel[Self <: RichModel[Self]] extends Model[Self] {
that: Self =>
/**
* Initialization method for the model. It is called before the run method
* and thus suitable for one-time initialization work, such as loading a graph
* and opening a TensorFlow session.
*
* @throws Exception Implementations may forward exceptions, which are caught by the runtime.
*/
@throws(classOf[Exception])
def open()
/**
* Tear-down method for the model. It is called after the last call to the run method.
*
* @throws Exception Implementations may forward exceptions, which are caught by the runtime.
*/
@throws(classOf[Exception])
def close()
}
| cookieai/flink-tensorflow | flink-tensorflow/src/main/scala/org/apache/flink/contrib/tensorflow/models/Model.scala | Scala | apache-2.0 | 1,324 |
/*
* Copyright (c) 2013 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
package syntax
package std
/**
* Conversions between ordinary functions and `HList` functions.
*
* The implicits defined by this object enhance ordinary functions (resp. HList functions) with a `toProduct` (resp.
* `fromProduct`) method which creates an equivalently typed `HList` function (resp. ordinary function).
*
* @author Miles Sabin
*/
object function {
import ops.function._
implicit def fnHListOps[F](t : F)(implicit fnHLister : FnToProduct[F]) = new FnHListOps[fnHLister.Out] {
def toProduct = fnHLister(t)
}
implicit def fnUnHListOps[F](t : F)(implicit fnUnHLister : FnFromProduct[F]) = new FnUnHListOps[fnUnHLister.Out] {
def fromProduct = fnUnHLister(t)
}
}
trait FnHListOps[HLFn] {
def toProduct : HLFn
}
trait FnUnHListOps[F] {
def fromProduct : F
}
| mandubian/shapeless | core/src/main/scala/shapeless/syntax/std/functions.scala | Scala | apache-2.0 | 1,422 |
package com.twitter.util.jackson
import com.fasterxml.jackson.annotation.JsonInclude
import com.fasterxml.jackson.annotation.JsonInclude.Include
import com.fasterxml.jackson.core.json.JsonWriteFeature
import com.fasterxml.jackson.core.util.DefaultIndenter
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter
import com.fasterxml.jackson.core.JsonFactory
import com.fasterxml.jackson.core.JsonFactoryBuilder
import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.core.TSFBuilder
import com.fasterxml.jackson.databind.util.ByteBufferBackedInputStream
import com.fasterxml.jackson.databind.{ObjectMapper => JacksonObjectMapper, _}
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory
import com.fasterxml.jackson.dataformat.yaml.YAMLFactoryBuilder
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.{ScalaObjectMapper => JacksonScalaObjectMapper}
import com.twitter.io.Buf
import com.twitter.util.jackson.caseclass.CaseClassJacksonModule
import com.twitter.util.jackson.serde.DefaultSerdeModule
import com.twitter.util.jackson.serde.LongKeyDeserializers
import com.twitter.util.validation.ScalaValidator
import java.io.ByteArrayOutputStream
import java.io.InputStream
import java.io.OutputStream
import java.nio.ByteBuffer
object ScalaObjectMapper {
/** The default [[ScalaValidator]] for a [[ScalaObjectMapper]] */
private[twitter] val DefaultValidator: ScalaValidator = ScalaValidator()
/** The default [[JsonWriteFeature.WRITE_NUMBERS_AS_STRINGS]] setting */
private[twitter] val DefaultNumbersAsStrings: Boolean = false
/** Framework modules need to be added 'last' so they can override existing ser/des */
private[twitter] val DefaultJacksonModules: Seq[Module] =
Seq(DefaultScalaModule, new JavaTimeModule, LongKeyDeserializers, DefaultSerdeModule)
/** The default [[PropertyNamingStrategy]] for a [[ScalaObjectMapper]] */
private[twitter] val DefaultPropertyNamingStrategy: PropertyNamingStrategy =
PropertyNamingStrategies.SNAKE_CASE
/** The default [[JsonInclude.Include]] for serialization for a [[ScalaObjectMapper]] */
private[twitter] val DefaultSerializationInclude: JsonInclude.Include =
JsonInclude.Include.NON_ABSENT
/** The default configuration for serialization as a `Map[SerializationFeature, Boolean]` */
private[twitter] val DefaultSerializationConfig: Map[SerializationFeature, Boolean] =
Map(
SerializationFeature.WRITE_DATES_AS_TIMESTAMPS -> false,
SerializationFeature.WRITE_ENUMS_USING_TO_STRING -> true)
/** The default configuration for deserialization as a `Map[DeserializationFeature, Boolean]` */
private[twitter] val DefaultDeserializationConfig: Map[DeserializationFeature, Boolean] =
Map(
DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES -> true,
DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES -> false,
DeserializationFeature.READ_ENUMS_USING_TO_STRING -> true,
DeserializationFeature.FAIL_ON_READING_DUP_TREE_KEY -> true,
DeserializationFeature.USE_JAVA_ARRAY_FOR_JSON_ARRAY -> true /* see jackson-module-scala/issues/148 */
)
/** The default for configuring additional modules on the underlying [[JacksonScalaObjectMapperType]] */
private[twitter] val DefaultAdditionalJacksonModules: Seq[Module] = Seq.empty[Module]
/** The default setting to enable case class validation during case class deserialization */
private[twitter] val DefaultValidation: Boolean = true
/**
* Returns a new [[ScalaObjectMapper]] configured with [[Builder]] defaults.
*
* @return a new [[ScalaObjectMapper]] instance.
*
* @see [[com.fasterxml.jackson.databind.InjectableValues]]
*/
def apply(): ScalaObjectMapper =
ScalaObjectMapper.builder.objectMapper
/**
* Creates a new [[ScalaObjectMapper]] from an underlying [[JacksonScalaObjectMapperType]].
*
* @note this mutates the underlying mapper to configure it with the [[ScalaObjectMapper]] defaults.
*
* @param underlying the [[JacksonScalaObjectMapperType]] to wrap.
*
* @return a new [[ScalaObjectMapper]]
*/
def apply(underlying: JacksonScalaObjectMapperType): ScalaObjectMapper = {
new ScalaObjectMapper(
ScalaObjectMapper.builder
.configureJacksonScalaObjectMapper(underlying)
)
}
/**
* Utility to create a new [[ScalaObjectMapper]] which simply wraps the given
* [[JacksonScalaObjectMapperType]].
*
* @note the `underlying` mapper is not mutated to produce the new [[ScalaObjectMapper]]
*/
def objectMapper(underlying: JacksonScalaObjectMapperType): ScalaObjectMapper = {
val objectMapperCopy = ObjectMapperCopier.copy(underlying)
new ScalaObjectMapper(objectMapperCopy)
}
/**
* Utility to create a new [[ScalaObjectMapper]] explicitly configured to serialize and deserialize
* YAML using the given [[JacksonScalaObjectMapperType]]. The resultant mapper [[PropertyNamingStrategy]]
* will be that configured on the `underlying` mapper.
*
* @note the `underlying` mapper is copied (not mutated) to produce the new [[ScalaObjectMapper]]
* to negotiate YAML serialization and deserialization.
*/
def yamlObjectMapper(underlying: JacksonScalaObjectMapperType): ScalaObjectMapper =
underlying.getFactory match {
case _: YAMLFactory => // correct
val objectMapperCopy = ObjectMapperCopier.copy(underlying)
new ScalaObjectMapper(objectMapperCopy)
case _ => // incorrect
throw new IllegalArgumentException(
s"The underlying mapper is not properly configured with a YAMLFactory")
}
/**
* Utility to create a new [[ScalaObjectMapper]] explicitly configured with
* [[PropertyNamingStrategies.LOWER_CAMEL_CASE]] as a `PropertyNamingStrategy` wrapping the
* given [[JacksonScalaObjectMapperType]].
*
* @note the `underlying` mapper is copied (not mutated) to produce the new [[ScalaObjectMapper]]
* with a [[PropertyNamingStrategies.LOWER_CAMEL_CASE]] PropertyNamingStrategy.
*/
def camelCaseObjectMapper(underlying: JacksonScalaObjectMapperType): ScalaObjectMapper = {
val objectMapperCopy = ObjectMapperCopier.copy(underlying)
objectMapperCopy.setPropertyNamingStrategy(PropertyNamingStrategies.LOWER_CAMEL_CASE)
new ScalaObjectMapper(objectMapperCopy)
}
/**
* Utility to create a new [[ScalaObjectMapper]] explicitly configured with
* [[PropertyNamingStrategies.SNAKE_CASE]] as a `PropertyNamingStrategy` wrapping the
* given [[JacksonScalaObjectMapperType]].
*
* @note the `underlying` mapper is copied (not mutated) to produce the new [[ScalaObjectMapper]]
* with a [[PropertyNamingStrategies.SNAKE_CASE]] PropertyNamingStrategy.
*/
def snakeCaseObjectMapper(underlying: JacksonScalaObjectMapperType): ScalaObjectMapper = {
val objectMapperCopy = ObjectMapperCopier.copy(underlying)
objectMapperCopy.setPropertyNamingStrategy(PropertyNamingStrategies.SNAKE_CASE)
new ScalaObjectMapper(objectMapperCopy)
}
/**
*
* Build a new instance of a [[ScalaObjectMapper]].
*
* For example,
* {{{
* ScalaObjectMapper.builder
* .withPropertyNamingStrategy(new PropertyNamingStrategies.UpperCamelCaseStrategy)
* .withNumbersAsStrings(true)
* .withAdditionalJacksonModules(...)
* .objectMapper
* }}}
*
* or
*
* {{{
* val builder =
* ScalaObjectMapper.builder
* .withPropertyNamingStrategy(new PropertyNamingStrategies.UpperCamelCaseStrategy)
* .withNumbersAsStrings(true)
* .withAdditionalJacksonModules(...)
*
* val mapper = builder.objectMapper
* val camelCaseMapper = builder.camelCaseObjectMapper
* }}}
*
*/
def builder: ScalaObjectMapper.Builder = Builder()
/**
* A Builder for creating a new [[ScalaObjectMapper]]. E.g., to build a new instance of
* a [[ScalaObjectMapper]].
*
* For example,
* {{{
* ScalaObjectMapper.builder
* .withPropertyNamingStrategy(new PropertyNamingStrategies.UpperCamelCaseStrategy)
* .withNumbersAsStrings(true)
* .withAdditionalJacksonModules(...)
* .objectMapper
* }}}
*
* or
*
* {{{
* val builder =
* ScalaObjectMapper.builder
* .withPropertyNamingStrategy(new PropertyNamingStrategies.UpperCamelCaseStrategy)
* .withNumbersAsStrings(true)
* .withAdditionalJacksonModules(...)
*
* val mapper = builder.objectMapper
* val camelCaseMapper = builder.camelCaseObjectMapper
* }}}
*/
case class Builder private[jackson] (
propertyNamingStrategy: PropertyNamingStrategy = DefaultPropertyNamingStrategy,
numbersAsStrings: Boolean = DefaultNumbersAsStrings,
serializationInclude: Include = DefaultSerializationInclude,
serializationConfig: Map[SerializationFeature, Boolean] = DefaultSerializationConfig,
deserializationConfig: Map[DeserializationFeature, Boolean] = DefaultDeserializationConfig,
defaultJacksonModules: Seq[Module] = DefaultJacksonModules,
validator: Option[ScalaValidator] = Some(DefaultValidator),
additionalJacksonModules: Seq[Module] = DefaultAdditionalJacksonModules,
additionalMapperConfigurationFns: Seq[JacksonObjectMapper => Unit] = Seq.empty,
validation: Boolean = DefaultValidation) {
/* Public */
/** Create a new [[ScalaObjectMapper]] from this [[Builder]]. */
final def objectMapper: ScalaObjectMapper =
new ScalaObjectMapper(jacksonScalaObjectMapper)
/** Create a new [[ScalaObjectMapper]] from this [[Builder]] using the given [[JsonFactory]]. */
final def objectMapper[F <: JsonFactory](factory: F): ScalaObjectMapper =
new ScalaObjectMapper(jacksonScalaObjectMapper(factory))
/**
* Create a new [[ScalaObjectMapper]] explicitly configured to serialize and deserialize
* YAML from this [[Builder]].
*
* @note the used [[PropertyNamingStrategy]] is defined by the current [[Builder]] configuration.
*/
final def yamlObjectMapper: ScalaObjectMapper =
new ScalaObjectMapper(
configureJacksonScalaObjectMapper(new YAMLFactoryBuilder(new YAMLFactory())))
/**
* Creates a new [[ScalaObjectMapper]] explicitly configured with
* [[PropertyNamingStrategies.LOWER_CAMEL_CASE]] as a `PropertyNamingStrategy`.
*/
final def camelCaseObjectMapper: ScalaObjectMapper =
ScalaObjectMapper.camelCaseObjectMapper(jacksonScalaObjectMapper)
/**
* Creates a new [[ScalaObjectMapper]] explicitly configured with
* [[PropertyNamingStrategies.SNAKE_CASE]] as a `PropertyNamingStrategy`.
*/
final def snakeCaseObjectMapper: ScalaObjectMapper =
ScalaObjectMapper.snakeCaseObjectMapper(jacksonScalaObjectMapper)
/* Builder Methods */
/**
* Configure a [[PropertyNamingStrategy]] for this [[Builder]].
* @note the default is [[PropertyNamingStrategies.SNAKE_CASE]]
* @see [[ScalaObjectMapper.DefaultPropertyNamingStrategy]]
*/
final def withPropertyNamingStrategy(propertyNamingStrategy: PropertyNamingStrategy): Builder =
this.copy(propertyNamingStrategy = propertyNamingStrategy)
/**
* Enable the [[JsonWriteFeature.WRITE_NUMBERS_AS_STRINGS]] for this [[Builder]].
* @note the default is false.
*/
final def withNumbersAsStrings(numbersAsStrings: Boolean): Builder =
this.copy(numbersAsStrings = numbersAsStrings)
/**
* Configure a [[JsonInclude.Include]] for serialization for this [[Builder]].
* @note the default is [[JsonInclude.Include.NON_ABSENT]]
* @see [[ScalaObjectMapper.DefaultSerializationInclude]]
*/
final def withSerializationInclude(serializationInclude: Include): Builder =
this.copy(serializationInclude = serializationInclude)
/**
* Set the serialization configuration for this [[Builder]] as a `Map` of `SerializationFeature`
* to `Boolean` (enabled).
* @note the default is described by [[ScalaObjectMapper.DefaultSerializationConfig]].
* @see [[ScalaObjectMapper.DefaultSerializationConfig]]
*/
final def withSerializationConfig(
serializationConfig: Map[SerializationFeature, Boolean]
): Builder =
this.copy(serializationConfig = serializationConfig)
/**
* Set the deserialization configuration for this [[Builder]] as a `Map` of `DeserializationFeature`
* to `Boolean` (enabled).
* @note this overwrites the default deserialization configuration of this [[Builder]].
* @note the default is described by [[ScalaObjectMapper.DefaultDeserializationConfig]].
* @see [[ScalaObjectMapper.DefaultDeserializationConfig]]
*/
final def withDeserializationConfig(
deserializationConfig: Map[DeserializationFeature, Boolean]
): Builder =
this.copy(deserializationConfig = deserializationConfig)
/**
* Configure a [[ScalaValidator]] for this [[Builder]]
* @see [[ScalaObjectMapper.DefaultValidator]]
*
* @note If you pass `withNoValidation` to the builder all case class validations will be
* bypassed, regardless of the `withValidator` configuration.
*/
final def withValidator(validator: ScalaValidator): Builder =
this.copy(validator = Some(validator))
/**
* Configure the list of additional Jackson [[Module]]s for this [[Builder]].
* @note this will overwrite (not append) the list additional Jackson [[Module]]s of this [[Builder]].
*/
final def withAdditionalJacksonModules(additionalJacksonModules: Seq[Module]): Builder =
this.copy(additionalJacksonModules = additionalJacksonModules)
/**
* Configure additional [[JacksonObjectMapper]] functionality for the underlying mapper of this [[Builder]].
* @note this will overwrite any previously set function.
*/
final def withAdditionalMapperConfigurationFn(mapperFn: JacksonObjectMapper => Unit): Builder =
this
.copy(additionalMapperConfigurationFns = this.additionalMapperConfigurationFns :+ mapperFn)
/** Method to allow changing of the default Jackson Modules for use from the `ScalaObjectMapperModule` */
private[twitter] final def withDefaultJacksonModules(
defaultJacksonModules: Seq[Module]
): Builder =
this.copy(defaultJacksonModules = defaultJacksonModules)
/**
* Disable case class validation during case class deserialization
*
* @see [[ScalaObjectMapper.DefaultValidation]]
* @note If you pass `withNoValidation` to the builder all case class validations will be
* bypassed, regardless of the `withValidator` configuration.
*/
final def withNoValidation: Builder =
this.copy(validation = false)
/* Private */
private[this] def defaultMapperConfiguration(mapper: JacksonObjectMapper): Unit = {
/* Serialization Config */
mapper.setDefaultPropertyInclusion(
JsonInclude.Value.construct(serializationInclude, serializationInclude))
mapper
.configOverride(classOf[Option[_]])
.setIncludeAsProperty(JsonInclude.Value.construct(serializationInclude, Include.ALWAYS))
for ((feature, state) <- serializationConfig) {
mapper.configure(feature, state)
}
/* Deserialization Config */
for ((feature, state) <- deserializationConfig) {
mapper.configure(feature, state)
}
}
/** Order is important: default + case class module + any additional */
private[this] def jacksonModules: Seq[Module] = {
this.defaultJacksonModules ++
Seq(new CaseClassJacksonModule(if (this.validation) this.validator else None)) ++
this.additionalJacksonModules
}
private[this] final def jacksonScalaObjectMapper: JacksonScalaObjectMapperType =
configureJacksonScalaObjectMapper(new JsonFactoryBuilder)
private[this] final def jacksonScalaObjectMapper[F <: JsonFactory](
jsonFactory: F
): JacksonScalaObjectMapperType = configureJacksonScalaObjectMapper(jsonFactory)
private[this] final def configureJacksonScalaObjectMapper[
F <: JsonFactory,
B <: TSFBuilder[F, B]
](
builder: TSFBuilder[F, B]
): JacksonScalaObjectMapperType = configureJacksonScalaObjectMapper(builder.build())
private[jackson] final def configureJacksonScalaObjectMapper(
factory: JsonFactory
): JacksonScalaObjectMapperType =
configureJacksonScalaObjectMapper(
new JacksonObjectMapper(factory) with JacksonScalaObjectMapper)
private[jackson] final def configureJacksonScalaObjectMapper(
underlying: JacksonScalaObjectMapperType
): JacksonScalaObjectMapperType = {
if (this.numbersAsStrings) {
underlying.enable(JsonWriteFeature.WRITE_NUMBERS_AS_STRINGS.mappedFeature())
}
this.defaultMapperConfiguration(underlying)
this.additionalMapperConfigurationFns.foreach(_(underlying))
underlying.setPropertyNamingStrategy(this.propertyNamingStrategy)
// Block use of a set of "unsafe" base types such as java.lang.Object
// to prevent exploitation of Remote Code Execution (RCE) vulnerability
// This line can be removed when this feature is enabled by default in Jackson 3
underlying.enable(MapperFeature.BLOCK_UNSAFE_POLYMORPHIC_BASE_TYPES)
this.jacksonModules.foreach(underlying.registerModule)
underlying
}
}
}
private[jackson] object ArrayElementsOnNewLinesPrettyPrinter extends DefaultPrettyPrinter {
_arrayIndenter = DefaultIndenter.SYSTEM_LINEFEED_INSTANCE
override def createInstance(): DefaultPrettyPrinter = this
}
/**
* A thin wrapper over a [[https://github.com/FasterXML/jackson-module-scala jackson-module-scala]]
* [[com.fasterxml.jackson.module.scala.ScalaObjectMapper]]
*
* @note this API is inspired by the [[https://github.com/codahale/jerkson Jerkson]]
* [[https://github.com/codahale/jerkson/blob/master/src/main/scala/com/codahale/jerkson/Parser.scala Parser]]
*
* @param underlying a configured [[JacksonScalaObjectMapperType]]
*/
class ScalaObjectMapper(val underlying: JacksonScalaObjectMapperType) {
assert(underlying != null, "Underlying ScalaObjectMapper cannot be null.")
/**
* Constructed [[ObjectWriter]] that will serialize objects using specified pretty printer
* for indentation (or if null, no pretty printer).
*/
lazy val prettyObjectMapper: ObjectWriter =
underlying.writer(ArrayElementsOnNewLinesPrettyPrinter)
/** Returns the currently configured [[PropertyNamingStrategy]] */
def propertyNamingStrategy: PropertyNamingStrategy =
underlying.getPropertyNamingStrategy
/**
* Factory method for constructing a [[com.fasterxml.jackson.databind.ObjectReader]] that will
* read or update instances of specified type, `T`.
* @tparam T the type for which to create a [[com.fasterxml.jackson.databind.ObjectReader]]
* @return the created [[com.fasterxml.jackson.databind.ObjectReader]].
*/
def reader[T: Manifest]: ObjectReader =
underlying.readerFor[T]
/** Read a value from a [[Buf]] into a type `T`. */
def parse[T: Manifest](buf: Buf): T =
parse[T](Buf.ByteBuffer.Shared.extract(buf))
/** Read a value from a [[ByteBuffer]] into a type `T`. */
def parse[T: Manifest](byteBuffer: ByteBuffer): T = {
val is = new ByteBufferBackedInputStream(byteBuffer)
underlying.readValue[T](is)
}
/** Convert from a [[JsonNode]] into a type `T`. */
def parse[T: Manifest](jsonNode: JsonNode): T =
convert[T](jsonNode)
/** Read a value from an [[InputStream]] (caller is responsible for closing the stream) into a type `T`. */
def parse[T: Manifest](inputStream: InputStream): T =
underlying.readValue[T](inputStream)
/** Read a value from an Array[Byte] into a type `T`. */
def parse[T: Manifest](bytes: Array[Byte]): T =
underlying.readValue[T](bytes)
/** Read a value from a String into a type `T`. */
def parse[T: Manifest](string: String): T =
underlying.readValue[T](string)
/** Read a value from a [[JsonParser]] into a type `T`. */
def parse[T: Manifest](jsonParser: JsonParser): T =
underlying.readValue[T](jsonParser)
/**
* Convenience method for doing two-step conversion from given value, into an instance of given
* value type, [[JavaType]] if (but only if!) conversion is needed. If given value is already of
* requested type, the value is returned as is.
*
* This method is functionally similar to first serializing a given value into JSON, and then
* binding JSON data into a value of the given type, but should be more efficient since full
* serialization does not (need to) occur. However, the same converters (serializers,
* deserializers) will be used for data binding, meaning the same object mapper configuration
* works.
*
* Note: it is possible that in some cases behavior does differ from full
* serialize-then-deserialize cycle. It is not guaranteed, however, that the behavior is 100%
* the same -- the goal is just to allow efficient value conversions for structurally compatible
* Objects, according to standard Jackson configuration.
*
* Further note that this functionality is not designed to support "advanced" use cases, such as
* conversion of polymorphic values, or cases where Object Identity is used.
*
* @param from value from which to convert.
* @param toValueType type to be converted into.
* @return a new instance of type [[JavaType]] converted from the given [[Any]] type.
*/
def convert(from: Any, toValueType: JavaType): AnyRef = {
try {
underlying.convertValue(from, toValueType)
} catch {
case e: IllegalArgumentException if e.getCause != null =>
throw e.getCause
}
}
/**
* Convenience method for doing two-step conversion from a given value, into an instance of a given
* type, `T`. This is functionality equivalent to first serializing the given value into JSON,
* then binding JSON data into a value of the given type, but may be executed without fully
* serializing into JSON. The same converters (serializers, deserializers) will be used for
* data binding, meaning the same object mapper configuration works.
*
* Note: when a [[com.twitter.util.jackson.caseclass.exceptions.CaseClassMappingException]]
* is thrown inside of the the `ObjectMapper#convertValue` method, Jackson wraps the exception
* inside of an [[IllegalArgumentException]]. As such we unwrap to restore the original
* exception here. The wrapping occurs because the [[com.twitter.util.jackson.caseclass.exceptions.CaseClassMappingException]]
* is a sub-type of [[java.io.IOException]] (through extension of [[com.fasterxml.jackson.databind.JsonMappingException]]
* --> [[com.fasterxml.jackson.core.JsonProcessingException]] --> [[java.io.IOException]].
*
* @param any the value to be converted.
* @tparam T the type to which to be converted.
* @return a new instance of type `T` converted from the given [[Any]] type.
*
* @see [[https://github.com/FasterXML/jackson-databind/blob/d70b9e65c5e089094ec7583fa6a38b2f484a96da/src/main/java/com/fasterxml/jackson/databind/ObjectMapper.java#L2167]]
*/
def convert[T: Manifest](any: Any): T = {
try {
underlying.convertValue[T](any)
} catch {
case e: IllegalArgumentException if e.getCause != null =>
throw e.getCause
}
}
/**
* Method that can be used to serialize any value as JSON output, using the output stream
* provided (using an encoding of [[com.fasterxml.jackson.core.JsonEncoding#UTF8]].
*
* Note: this method does not close the underlying stream explicitly here; however, the
* [[com.fasterxml.jackson.core.JsonFactory]] this mapper uses may choose to close the stream
* depending on its settings (by default, it will try to close it when
* [[com.fasterxml.jackson.core.JsonGenerator]] constructed is closed).
*
* @param any the value to serialize.
* @param outputStream the [[OutputStream]] to which to serialize.
*/
def writeValue(any: Any, outputStream: OutputStream): Unit =
underlying.writeValue(outputStream, any)
/**
* Method that can be used to serialize any value as a `Array[Byte]`. Functionally equivalent
* to calling [[JacksonObjectMapper#writeValue(Writer,Object)]] with a [[java.io.ByteArrayOutputStream]] and
* getting bytes, but more efficient. Encoding used will be UTF-8.
*
* @param any the value to serialize.
* @return the `Array[Byte]` representing the serialized value.
*/
def writeValueAsBytes(any: Any): Array[Byte] =
underlying.writeValueAsBytes(any)
/**
* Method that can be used to serialize any value as a String. Functionally equivalent to calling
* [[JacksonObjectMapper#writeValue(Writer,Object)]] with a [[java.io.StringWriter]]
* and constructing String, but more efficient.
*
* @param any the value to serialize.
* @return the String representing the serialized value.
*/
def writeValueAsString(any: Any): String =
underlying.writeValueAsString(any)
/**
* Method that can be used to serialize any value as a pretty printed String. Uses the
* [[prettyObjectMapper]] and calls [[writeValueAsString(any: Any)]] on the given value.
*
* @param any the value to serialize.
* @return the pretty printed String representing the serialized value.
*
* @see [[prettyObjectMapper]]
* @see [[writeValueAsString(any: Any)]]
*/
def writePrettyString(any: Any): String = any match {
case str: String =>
val jsonNode = underlying.readValue[JsonNode](str)
prettyObjectMapper.writeValueAsString(jsonNode)
case _ =>
prettyObjectMapper.writeValueAsString(any)
}
/**
* Method that can be used to serialize any value as a [[Buf]]. Functionally equivalent
* to calling [[writeValueAsBytes(any: Any)]] and then wrapping the results in an
* "owned" [[Buf.ByteArray]].
*
* @param any the value to serialize.
* @return the [[Buf.ByteArray.Owned]] representing the serialized value.
*
* @see [[writeValueAsBytes(any: Any)]]
* @see [[Buf.ByteArray.Owned]]
*/
def writeValueAsBuf(any: Any): Buf =
Buf.ByteArray.Owned(underlying.writeValueAsBytes(any))
/**
* Convenience method for doing the multi-step process of serializing a `Map[String, String]`
* to a [[Buf]].
*
* @param stringMap the `Map[String, String]` to convert.
* @return the [[Buf.ByteArray.Owned]] representing the serialized value.
*/
// optimized
def writeStringMapAsBuf(stringMap: Map[String, String]): Buf = {
val os = new ByteArrayOutputStream()
val jsonGenerator = underlying.getFactory.createGenerator(os)
try {
jsonGenerator.writeStartObject()
for ((key, value) <- stringMap) {
jsonGenerator.writeStringField(key, value)
}
jsonGenerator.writeEndObject()
jsonGenerator.flush()
Buf.ByteArray.Owned(os.toByteArray)
} finally {
jsonGenerator.close()
}
}
/**
* Method for registering a module that can extend functionality provided by this mapper; for
* example, by adding providers for custom serializers and deserializers.
*
* @note this mutates the [[underlying]] [[com.fasterxml.jackson.databind.ObjectMapper]] of
* this [[ScalaObjectMapper]].
*
* @param module [[com.fasterxml.jackson.databind.Module]] to register.
*/
def registerModule(module: Module): JacksonObjectMapper =
underlying.registerModule(module)
}
| twitter/util | util-jackson/src/main/scala/com/twitter/util/jackson/ScalaObjectMapper.scala | Scala | apache-2.0 | 27,658 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.clustering
import org.apache.spark.SparkException
import org.apache.spark.ml.linalg.{Vector, Vectors}
import org.apache.spark.ml.param.ParamMap
import org.apache.spark.ml.util.{DefaultReadWriteTest, MLTest, MLTestingUtils}
import org.apache.spark.ml.util.TestingUtils._
import org.apache.spark.mllib.clustering.DistanceMeasure
import org.apache.spark.sql.DataFrame
class BisectingKMeansSuite extends MLTest with DefaultReadWriteTest {
import testImplicits._
final val k = 5
@transient var dataset: DataFrame = _
@transient var sparseDataset: DataFrame = _
override def beforeAll(): Unit = {
super.beforeAll()
dataset = KMeansSuite.generateKMeansData(spark, 50, 3, k)
sparseDataset = KMeansSuite.generateSparseData(spark, 10, 1000, 42)
}
test("default parameters") {
val bkm = new BisectingKMeans()
assert(bkm.getK === 4)
assert(bkm.getFeaturesCol === "features")
assert(bkm.getPredictionCol === "prediction")
assert(bkm.getMaxIter === 20)
assert(bkm.getMinDivisibleClusterSize === 1.0)
val model = bkm.setMaxIter(1).fit(dataset)
MLTestingUtils.checkCopyAndUids(bkm, model)
assert(model.hasSummary)
val copiedModel = model.copy(ParamMap.empty)
assert(copiedModel.hasSummary)
}
test("SPARK-16473: Verify Bisecting K-Means does not fail in edge case where" +
"one cluster is empty after split") {
val bkm = new BisectingKMeans()
.setK(k)
.setMinDivisibleClusterSize(4)
.setMaxIter(4)
.setSeed(123)
// Verify fit does not fail on very sparse data
val model = bkm.fit(sparseDataset)
testTransformerByGlobalCheckFunc[Tuple1[Vector]](sparseDataset.toDF(), model, "prediction") {
rows =>
val numClusters = rows.distinct.length
// Verify we hit the edge case
assert(numClusters > 1)
}
}
test("setter/getter") {
val bkm = new BisectingKMeans()
.setK(9)
.setMinDivisibleClusterSize(2.0)
.setFeaturesCol("test_feature")
.setPredictionCol("test_prediction")
.setMaxIter(33)
.setSeed(123)
assert(bkm.getK === 9)
assert(bkm.getFeaturesCol === "test_feature")
assert(bkm.getPredictionCol === "test_prediction")
assert(bkm.getMaxIter === 33)
assert(bkm.getMinDivisibleClusterSize === 2.0)
assert(bkm.getSeed === 123)
intercept[IllegalArgumentException] {
new BisectingKMeans().setK(1)
}
intercept[IllegalArgumentException] {
new BisectingKMeans().setMinDivisibleClusterSize(0)
}
}
test("fit, transform and summary") {
val predictionColName = "bisecting_kmeans_prediction"
val bkm = new BisectingKMeans().setK(k).setPredictionCol(predictionColName).setSeed(1)
val model = bkm.fit(dataset)
assert(model.clusterCenters.length === k)
assert(model.summary.trainingCost < 0.1)
assert(model.hasParent)
testTransformerByGlobalCheckFunc[Tuple1[Vector]](dataset.toDF(), model,
"features", predictionColName) { rows =>
val clusters = rows.map(_.getAs[Int](predictionColName)).toSet
assert(clusters.size === k)
assert(clusters === Set(0, 1, 2, 3, 4))
}
// Check validity of model summary
val numRows = dataset.count()
assert(model.hasSummary)
val summary: BisectingKMeansSummary = model.summary
assert(summary.predictionCol === predictionColName)
assert(summary.featuresCol === "features")
assert(summary.predictions.count() === numRows)
for (c <- Array(predictionColName, "features")) {
assert(summary.predictions.columns.contains(c))
}
assert(summary.cluster.columns === Array(predictionColName))
val clusterSizes = summary.clusterSizes
assert(clusterSizes.length === k)
assert(clusterSizes.sum === numRows)
assert(clusterSizes.forall(_ >= 0))
assert(summary.numIter == 20)
assert(summary.trainingCost < 0.1)
assert(model.summary.trainingCost == summary.trainingCost)
model.setSummary(None)
assert(!model.hasSummary)
}
test("read/write") {
def checkModelData(model: BisectingKMeansModel, model2: BisectingKMeansModel): Unit = {
assert(model.clusterCenters === model2.clusterCenters)
}
val bisectingKMeans = new BisectingKMeans()
testEstimatorAndModelReadWrite(bisectingKMeans, dataset, BisectingKMeansSuite.allParamSettings,
BisectingKMeansSuite.allParamSettings, checkModelData)
}
test("BisectingKMeans with cosine distance is not supported for 0-length vectors") {
val model = new BisectingKMeans().setK(2).setDistanceMeasure(DistanceMeasure.COSINE).setSeed(1)
val df = spark.createDataFrame(spark.sparkContext.parallelize(Seq(
Vectors.dense(0.0, 0.0),
Vectors.dense(10.0, 10.0),
Vectors.dense(1.0, 0.5)
)).map(v => TestRow(v)))
val e = intercept[SparkException](model.fit(df))
assert(e.getCause.isInstanceOf[AssertionError])
assert(e.getCause.getMessage.contains("Cosine distance is not defined"))
}
test("BisectingKMeans with cosine distance") {
val df = spark.createDataFrame(spark.sparkContext.parallelize(Seq(
Vectors.dense(1.0, 1.0),
Vectors.dense(10.0, 10.0),
Vectors.dense(1.0, 0.5),
Vectors.dense(10.0, 4.4),
Vectors.dense(-1.0, 1.0),
Vectors.dense(-100.0, 90.0)
)).map(v => TestRow(v)))
val model = new BisectingKMeans()
.setK(3)
.setDistanceMeasure(DistanceMeasure.COSINE)
.setSeed(1)
.fit(df)
val predictionDf = model.transform(df)
checkNominalOnDF(predictionDf, "prediction", model.getK)
assert(predictionDf.select("prediction").distinct().count() == 3)
val predictionsMap = predictionDf.collect().map(row =>
row.getAs[Vector]("features") -> row.getAs[Int]("prediction")).toMap
assert(predictionsMap(Vectors.dense(1.0, 1.0)) ==
predictionsMap(Vectors.dense(10.0, 10.0)))
assert(predictionsMap(Vectors.dense(1.0, 0.5)) ==
predictionsMap(Vectors.dense(10.0, 4.4)))
assert(predictionsMap(Vectors.dense(-1.0, 1.0)) ==
predictionsMap(Vectors.dense(-100.0, 90.0)))
assert(model.clusterCenters.forall(Vectors.norm(_, 2) ~== 1.0 absTol 1e-6))
}
test("Comparing with and without weightCol with cosine distance") {
val df1 = spark.createDataFrame(spark.sparkContext.parallelize(Seq(
Vectors.dense(1.0, 1.0),
Vectors.dense(10.0, 10.0),
Vectors.dense(1.0, 0.5),
Vectors.dense(10.0, 4.4),
Vectors.dense(-1.0, 1.0),
Vectors.dense(-100.0, 90.0)
)).map(v => TestRow(v)))
val model1 = new BisectingKMeans()
.setK(3)
.setDistanceMeasure(DistanceMeasure.COSINE)
.setSeed(1)
.fit(df1)
val predictionDf1 = model1.transform(df1)
checkNominalOnDF(predictionDf1, "prediction", model1.getK)
assert(predictionDf1.select("prediction").distinct().count() == 3)
val predictionsMap1 = predictionDf1.collect().map(row =>
row.getAs[Vector]("features") -> row.getAs[Int]("prediction")).toMap
assert(predictionsMap1(Vectors.dense(1.0, 1.0)) ==
predictionsMap1(Vectors.dense(10.0, 10.0)))
assert(predictionsMap1(Vectors.dense(1.0, 0.5)) ==
predictionsMap1(Vectors.dense(10.0, 4.4)))
assert(predictionsMap1(Vectors.dense(-1.0, 1.0)) ==
predictionsMap1(Vectors.dense(-100.0, 90.0)))
assert(model1.clusterCenters.forall(Vectors.norm(_, 2) ~== 1.0 absTol 1e-6))
val df2 = spark.createDataFrame(spark.sparkContext.parallelize(Seq(
(Vectors.dense(1.0, 1.0), 2.0), (Vectors.dense(10.0, 10.0), 2.0),
(Vectors.dense(1.0, 0.5), 2.0), (Vectors.dense(10.0, 4.4), 2.0),
(Vectors.dense(-1.0, 1.0), 2.0), (Vectors.dense(-100.0, 90.0), 2.0))))
.toDF("features", "weightCol")
val model2 = new BisectingKMeans()
.setK(3)
.setDistanceMeasure(DistanceMeasure.COSINE)
.setSeed(1)
.setWeightCol("weightCol")
.fit(df2)
val predictionDf2 = model2.transform(df2)
checkNominalOnDF(predictionDf2, "prediction", model2.getK)
assert(predictionDf2.select("prediction").distinct().count() == 3)
val predictionsMap2 = predictionDf2.collect().map(row =>
row.getAs[Vector]("features") -> row.getAs[Int]("prediction")).toMap
assert(predictionsMap2(Vectors.dense(1.0, 1.0)) ==
predictionsMap2(Vectors.dense(10.0, 10.0)))
assert(predictionsMap2(Vectors.dense(1.0, 0.5)) ==
predictionsMap2(Vectors.dense(10.0, 4.4)))
assert(predictionsMap2(Vectors.dense(-1.0, 1.0)) ==
predictionsMap2(Vectors.dense(-100.0, 90.0)))
assert(model2.clusterCenters.forall(Vectors.norm(_, 2) ~== 1.0 absTol 1e-6))
assert(model1.clusterCenters === model2.clusterCenters)
}
test("Comparing with and without weightCol") {
val df1 = spark.createDataFrame(spark.sparkContext.parallelize(Seq(
Vectors.dense(1.0, 1.0),
Vectors.dense(10.0, 10.0),
Vectors.dense(10.0, 10.0),
Vectors.dense(1.0, 0.5),
Vectors.dense(1.0, 0.5),
Vectors.dense(10.0, 4.4),
Vectors.dense(10.0, 4.4),
Vectors.dense(10.0, 4.4),
Vectors.dense(-1.0, 1.0),
Vectors.dense(-1.0, 1.0),
Vectors.dense(-1.0, 1.0),
Vectors.dense(-100.0, 90.0),
Vectors.dense(-100.0, 90.0),
Vectors.dense(-100.0, 90.0),
Vectors.dense(-100.0, 90.0)
)).map(v => TestRow(v)))
val model1 = new BisectingKMeans()
.setK(3)
.setSeed(1)
.fit(df1)
val predictionDf1 = model1.transform(df1)
checkNominalOnDF(predictionDf1, "prediction", model1.getK)
assert(predictionDf1.select("prediction").distinct().count() == 3)
val predictionsMap1 = predictionDf1.collect().map(row =>
row.getAs[Vector]("features") -> row.getAs[Int]("prediction")).toMap
assert(predictionsMap1(Vectors.dense(1.0, 1.0)) ==
predictionsMap1(Vectors.dense(1.0, 0.5)))
assert(predictionsMap1(Vectors.dense(1.0, 1.0)) ==
predictionsMap1(Vectors.dense(-1.0, 1.0)))
assert(predictionsMap1(Vectors.dense(10.0, 10.0)) ==
predictionsMap1(Vectors.dense(10.0, 4.4)))
val df2 = spark.createDataFrame(spark.sparkContext.parallelize(Seq(
(Vectors.dense(1.0, 1.0), 1.0), (Vectors.dense(10.0, 10.0), 2.0),
(Vectors.dense(1.0, 0.5), 2.0), (Vectors.dense(10.0, 4.4), 3.0),
(Vectors.dense(-1.0, 1.0), 3.0), (Vectors.dense(-100.0, 90.0), 4.0))))
.toDF("features", "weightCol")
val model2 = new BisectingKMeans()
.setK(3)
.setSeed(1)
.setWeightCol("weightCol")
.fit(df2)
val predictionDf2 = model2.transform(df2)
checkNominalOnDF(predictionDf2, "prediction", model2.getK)
assert(predictionDf2.select("prediction").distinct().count() == 3)
val predictionsMap2 = predictionDf2.collect().map(row =>
row.getAs[Vector]("features") -> row.getAs[Int]("prediction")).toMap
assert(predictionsMap2(Vectors.dense(1.0, 1.0)) ==
predictionsMap2(Vectors.dense(1.0, 0.5)))
assert(predictionsMap2(Vectors.dense(1.0, 1.0)) ==
predictionsMap2(Vectors.dense(-1.0, 1.0)))
assert(predictionsMap2(Vectors.dense(10.0, 10.0)) ==
predictionsMap2(Vectors.dense(10.0, 4.4)))
assert(model1.clusterCenters(0) === model2.clusterCenters(0))
assert(model1.clusterCenters(1) === model2.clusterCenters(1))
assert(model1.clusterCenters(2) ~== model2.clusterCenters(2) absTol 1e-6)
}
test("BisectingKMeans with Array input") {
def trainAndComputeCost(dataset: DataFrame): Double = {
val model = new BisectingKMeans().setK(k).setMaxIter(1).setSeed(1).fit(dataset)
model.summary.trainingCost
}
val (newDataset, newDatasetD, newDatasetF) = MLTestingUtils.generateArrayFeatureDataset(dataset)
val trueCost = trainAndComputeCost(newDataset)
val doubleArrayCost = trainAndComputeCost(newDatasetD)
val floatArrayCost = trainAndComputeCost(newDatasetF)
// checking the cost is fine enough as a sanity check
assert(trueCost ~== doubleArrayCost absTol 1e-6)
assert(trueCost ~== floatArrayCost absTol 1e-6)
}
test("prediction on single instance") {
val bikm = new BisectingKMeans().setSeed(123L)
val model = bikm.fit(dataset)
testClusteringModelSinglePrediction(model, model.predict, dataset,
model.getFeaturesCol, model.getPredictionCol)
}
}
object BisectingKMeansSuite {
val allParamSettings: Map[String, Any] = Map(
"k" -> 3,
"maxIter" -> 2,
"seed" -> -1L,
"minDivisibleClusterSize" -> 2.0
)
}
| shaneknapp/spark | mllib/src/test/scala/org/apache/spark/ml/clustering/BisectingKMeansSuite.scala | Scala | apache-2.0 | 13,244 |
package br.unb.cic.poo.gol
/* Parte do padrao de projeto Memento. Cria o "memento", a restauracao fica por conta do CareTaker. */
object Originator {
/* Metodo para criar o memento. */
def createMemento(currentGen: Array[Array[Cell]], revivedCells: Int, killedCells: Int): Memento = {
val generation = new Memento
generation.setGeneration(currentGen, revivedCells: Int, killedCells: Int)
return generation
}
}
| PeterTowers/TP1-022017 | GoLScala/GoLScala_UN/src/br/unb/cic/poo/gol/Originator.scala | Scala | mit | 431 |
package roc
package postgresql
package transport
import java.nio.ByteOrder
import java.nio.charset.Charset
import org.jboss.netty.buffer.{ChannelBuffer, ChannelBuffers}
object Buffer {
val NullLength = -1 // denotes a SQL NULL value when reading a length coded binary.
val EmptyString = new String
/**
* Calculates the size required to store a length
* according to the MySQL protocol for length coded
* binary.
*/
def sizeOfLen(l: Long) =
if (l < 251) 1 else if (l < 65536) 3 else if (l < 16777216) 4 else 9
def apply(bytes: Array[Byte]): Buffer = new Buffer {
val underlying = ChannelBuffers.wrappedBuffer(bytes)
}
def fromChannelBuffer(cb: ChannelBuffer): Buffer = {
require(cb != null)
require(cb.order == ByteOrder.BIG_ENDIAN, "Invalid ChannelBuffer ByteOrder")
new Buffer { val underlying = cb }
}
}
sealed trait Buffer {
val underlying: ChannelBuffer
def capacity: Int = underlying.capacity
}
trait BufferReader extends Buffer {
/** Current reader offset in the buffer. */
def offset: Int
/**
* Denotes if the buffer is readable upto the given width
* based on the current offset.
*/
def readable(width: Int): Boolean
def readByte: Byte
def readUnsignedByte: Short
def readShort: Short
def readUnsignedShort: Int
def readInt24: Int
def readUnsignedInt24: Int
def readInt: Int
def readUnsignedInt: Long
def readLong: Long
def readFloat: Float
def readDouble: Double
/**
* Increases offset by n.
*/
def skip(n: Int): Unit
/**
* Consumes the rest of the buffer and returns
* it in a new Array[Byte].
* @return Array[Byte] containing the rest of the buffer.
*/
def takeRest(): Array[Byte] = take(capacity - offset)
/**
* Consumes n bytes in the buffer and
* returns them in a new Array.
* @return An Array[Byte] containing bytes from offset to offset+n
*/
def take(n: Int): Array[Byte]
/**
* Reads a MySQL data field. A variable-length numeric value.
* Depending on the first byte, reads a different width from
* the buffer. For more info, refer to MySQL Client/Server protocol
* documentation.
* @return a numeric value representing the number of
* bytes expected to follow.
*/
def readLengthCodedBinary: Long = readUnsignedByte match {
case byte if byte < 251 => byte
case byte if byte == 251 => Buffer.NullLength
case byte if byte == 252 => readUnsignedShort
case byte if byte == 253 => readUnsignedInt24
case byte if byte == 254 => readLong
case _ => throw new IllegalStateException("Invalid length byte")
}
/**
* Reads a null-terminated string where
* null is denoted by '\\0'. Uses Charset.defaultCharset by default
* to decode strings.
* @return a null-terminated String starting at offset.
*/
def readNullTerminatedString(charset: Charset = Charset.defaultCharset): String = {
val start = offset
var length = 0
while (readByte != 0x00)
length += 1
this.toString(start, length, charset)
}
/**
* Reads a length encoded string according to the MySQL
* Client/Server protocol. Uses Charset.defaultCharset by default
* to decode strings. For more details refer to MySQL
* documentation.
* @return a MySQL length coded String starting at
* offset.
*/
def readLengthCodedString(charset: Charset = Charset.defaultCharset): String = {
val length = readLengthCodedBinary.toInt
if (length == Buffer.NullLength)
null
else if (length == 0)
Buffer.EmptyString
else {
val start = offset
skip(length)
this.toString(start, length, charset)
}
}
/**
* Returns the bytes from start to start+length
* into a string using the given java.nio.charset.Charset.
*/
def toString(start: Int, length: Int, charset: Charset): String
}
object BufferReader {
def apply(buf: Buffer, offset: Int = 0): BufferReader = {
require(offset >= 0, "Invalid reader offset")
buf.underlying.readerIndex(offset)
new Netty3BufferReader(buf.underlying)
}
def apply(bytes: Array[Byte]): BufferReader =
apply(Buffer(bytes), 0)
/**
* BufferReader implementation backed by a Netty3 ChannelBuffer.
*/
private[this] final class Netty3BufferReader(val underlying: ChannelBuffer)
extends BufferReader with Buffer {
def offset: Int = underlying.readerIndex
def readable(width: Int) = underlying.readableBytes >= width
def readByte: Byte = underlying.readByte()
def readUnsignedByte: Short = underlying.readUnsignedByte()
def readShort: Short = underlying.readShort()
def readUnsignedShort: Int = underlying.readUnsignedShort()
def readInt24: Int = underlying.readMedium()
def readUnsignedInt24: Int = underlying.readUnsignedMedium()
def readInt: Int = underlying.readInt()
def readUnsignedInt: Long = underlying.readUnsignedInt()
def readLong: Long = underlying.readLong()
def readFloat: Float = underlying.readFloat()
def readDouble: Double = underlying.readDouble()
def skip(n: Int) = underlying.skipBytes(n)
def take(n: Int) = {
val res = new Array[Byte](n)
underlying.readBytes(res)
res
}
def toString(start: Int, length: Int, charset: Charset) =
underlying.toString(start, length, charset)
}
}
/**
* Provides convenient methods for writing the
* data in a postgresql packet body. All data is encoded
* in big endian byte order in accordance with
* the mysql protocol. Operations are side-effecting,
* that is, all operations increase the offset
* into the underlying buffer.
*/
trait BufferWriter extends Buffer {
/**
* Current writer offset.
*/
def offset: Int
/**
* Denotes if the buffer is writable upto the given width
* based on the current offset.
*/
def writable(width: Int): Boolean
def writeBoolean(b: Boolean): BufferWriter
def writeByte(n: Int): BufferWriter
def writeShort(n: Int): BufferWriter
def writeInt24(n: Int): BufferWriter
def writeInt(n: Int): BufferWriter
def writeLong(n: Long): BufferWriter
def writeFloat(f: Float): BufferWriter
def writeDouble(d: Double): BufferWriter
def writeNull: BufferWriter
def skip(n: Int): BufferWriter
def toBytes: Array[Byte]
/**
* Fills the rest of the buffer with the given byte.
* @param b Byte used to fill.
*/
def fillRest(b: Byte) = fill(capacity - offset, b)
/**
* Fills the buffer from current offset to offset+n with b.
* @param n width to fill
* @param b Byte used to fill.
*/
def fill(n: Int, b: Byte) = {
(offset until offset + n) foreach { j => writeByte(b) }
this
}
/**
* Writes bytes onto the buffer.
* @param bytes Array[Byte] to copy onto the buffer.
*/
def writeBytes(bytes: Array[Byte]): BufferWriter
/**
* Writes a length coded binary according the the MySQL
* Client/Server protocol. Refer to MySQL documentation for
* more information.
*/
def writeLengthCodedBinary(length: Long): BufferWriter = {
if (length < 251) {
writeByte(length.toInt)
} else if (length < 65536) {
writeByte(252)
writeShort(length.toInt)
} else if (length < 16777216) {
writeByte(253)
writeInt24(length.toInt)
} else {
writeByte(254)
writeLong(length)
}
}
/**
* Writes a null terminated string onto the buffer where
* '\\0' denotes null. Uses Charset.defaultCharset by default
* to decode the given String.
* @param s String to write.
*/
def writeNullTerminatedString(
s: String,
charset: Charset = Charset.defaultCharset
): BufferWriter = {
writeBytes(s.getBytes(charset))
writeByte('\\u0000')
this
}
/**
* Writes a length coded string using the MySQL Client/Server
* protocol. Uses Charset.defaultCharset by default to decode
* the given String.
* @param s String to write to buffer.
*/
def writeLengthCodedString(s: String,
charset: Charset = Charset.defaultCharset
): BufferWriter = writeLengthCodedBytes(s.getBytes(charset))
/**
* Writes a length coded set of bytes according to the MySQL
* client/server protocol.
*/
def writeLengthCodedBytes(bytes: Array[Byte]): BufferWriter = {
writeLengthCodedBinary(bytes.length)
writeBytes(bytes)
this
}
}
private[roc] object BufferWriter {
def apply(buf: Buffer, offset: Int = 0): BufferWriter = {
require(offset >= 0, "Inavlid writer offset.")
buf.underlying.writerIndex(offset)
new Netty3BufferWriter(buf.underlying)
}
def apply(bytes: Array[Byte]): BufferWriter =
apply(Buffer(bytes), 0)
/**
* BufferWriter implementation backed by a Netty ChannelBuffer.
*/
private[this] class Netty3BufferWriter(val underlying: ChannelBuffer)
extends BufferWriter with Buffer {
def offset = underlying.writerIndex
def writable(width: Int = 1): Boolean = underlying.writableBytes >= width
def writeBoolean(b: Boolean): BufferWriter = if(b) writeByte(1) else writeByte(0)
def writeByte(n: Int): BufferWriter = {
underlying.writeByte(n)
this
}
def writeShort(n: Int): BufferWriter = {
underlying.writeShort(n)
this
}
def writeInt24(n: Int): BufferWriter = {
underlying.writeMedium(n)
this
}
def writeInt(n: Int): BufferWriter = {
underlying.writeInt(n)
this
}
def writeLong(n: Long): BufferWriter = {
underlying.writeLong(n)
this
}
def writeFloat(f: Float): BufferWriter = {
underlying.writeFloat(f)
this
}
def writeDouble(d: Double): BufferWriter = {
underlying.writeDouble(d)
this
}
def skip(n: Int) = {
underlying.writerIndex(offset + n)
this
}
def writeBytes(bytes: Array[Byte]) = {
underlying.writeBytes(bytes)
this
}
def writeNull: BufferWriter = {
underlying.writeZero(1)
this
}
def toBytes: Array[Byte] = {
val bytes = new Array[Byte](underlying.writerIndex)
underlying.getBytes(0, bytes)
bytes
}
}
}
| penland365/roc | core/src/main/scala/roc/postgresql/transport/Buffer.scala | Scala | bsd-3-clause | 10,253 |
import sbt._
class Plugins(info: ProjectInfo) extends PluginDefinition(info) {
val sbtIdeaRepo = "sbt-idea-repo" at "http://mpeltonen.github.com/maven/"
val sbtIdea = "com.github.mpeltonen" % "sbt-idea-plugin" % "0.4.0"
}
// vim: set ts=4 sw=4 et:
| masayukig/sb | project/plugins/Plugins.scala | Scala | apache-2.0 | 263 |
package org.jetbrains.plugins.scala
package codeInsight
package intention
package controlFlow
import com.intellij.testFramework.EditorTestUtil
/**
* @author Ksenia.Sautina
* @since 6/6/12
*/
class InvertIfConditionIntentionTest extends intentions.ScalaIntentionTestBase {
import EditorTestUtil.{CARET_TAG => CARET}
override def familyName = ScalaCodeInsightBundle.message("family.name.invert.if.condition")
def testInvertIf1(): Unit = {
val text =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| ${CARET}if (a) b = false
| }
|}""".stripMargin
val resultText =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| ${CARET}if (!a) {
|
| } else {
| b = false
| }
| }
|}""".stripMargin
doTest(text, resultText)
}
def testInvertIf2(): Unit = {
val text =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (a) {
| b = false
| }
| System.out.println()
| }
|}""".stripMargin
val resultText =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (!a) {
|
| } else {
| b = false
| }
| System.out.println()
| }
|}""".stripMargin
doTest(text, resultText)
}
def testInvertIf3(): Unit = {
val text =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (a == b) {
| val c = false
| }
| println()
| }
|}""".stripMargin
val resultText =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (a != b) {
|
| } else {
| val c = false
| }
| println()
| }
|}""".stripMargin
doTest(text, resultText)
}
def testInvertIf4(): Unit = {
val text =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (!a) b = false
| }
|}""".stripMargin
val resultText =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (a) {
|
| } else {
| b = false
| }
| }
|}""".stripMargin
doTest(text, resultText)
}
def testInvertIf5(): Unit = {
val text =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (true) b = false
| }
|}""".stripMargin
val resultText =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (false) {
|
| } else {
| b = false
| }
| }
|}""".stripMargin
doTest(text, resultText)
}
def testInvertIf6(): Unit = {
val text =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (!(a == true)) b = false
| }
|}""".stripMargin
val resultText =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (a == true) {
|
| } else {
| b = false
| }
| }
|}""".stripMargin
doTest(text, resultText)
}
def testInvertIf7(): Unit = {
val text =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| if$CARET (false) {
|
| } else {
| System.out.print("else")
| }
| }
|}""".stripMargin
val resultText =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| if$CARET (true) {
| System.out.print("else")
| } else {
|
| }
| }
|}""".stripMargin
doTest(text, resultText)
}
def testInvertIf8(): Unit = {
val text =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (false) {
| System.out.print("if")
| } else {
| System.out.print("else")
| }
| }
|}""".stripMargin
val resultText =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (true) {
| System.out.print("else")
| } else {
| System.out.print("if")
| }
| }
|}""".stripMargin
doTest(text, resultText)
}
def testInvertIf_NoBraces(): Unit = {
val text =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (false)
| System.out.print("if")
| else
| System.out.print("else")
| }
|}""".stripMargin
val resultText =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (true) {
| System.out.print("else")
| } else {
| System.out.print("if")
| }
| }
|}""".stripMargin
doTest(text, resultText)
}
def testInvertIf_NoBraces_SameLine(): Unit = {
val text =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (false) System.out.print("if")
| else System.out.print("else")
| }
|}""".stripMargin
val resultText =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (true) {
| System.out.print("else")
| } else {
| System.out.print("if")
| }
| }
|}""".stripMargin
doTest(text, resultText)
}
def testInvertIf_NoIfBraces(): Unit = {
val text =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (false) System.out.print("if")
| else {
| System.out.print("else1")
| System.out.print("else2")
| }
| }
|}""".stripMargin
val resultText =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (true) {
| System.out.print("else1")
| System.out.print("else2")
| } else {
| System.out.print("if")
| }
| }
|}""".stripMargin
doTest(text, resultText)
}
def testInvertIf_NoElseBraces(): Unit = {
val text =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (false) {
| System.out.print("if1")
| System.out.print("if2")
| }
| else
| System.out.print("else")
| }
|}""".stripMargin
val resultText =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| i${CARET}f (true) {
| System.out.print("else")
| } else {
| System.out.print("if1")
| System.out.print("if2")
| }
| }
|}""".stripMargin
doTest(text, resultText)
}
def testInvertIf_CaretAtElse(): Unit = {
val text =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| if (false) {
| System.out.print("if1")
| System.out.print("if2")
| } ${CARET}else {
| System.out.print("else")
| }
| }
|}""".stripMargin
val resultText =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| if (true) {
| System.out.print("else")
| }$CARET else {
| System.out.print("if1")
| System.out.print("if2")
| }
| }
|}""".stripMargin
doTest(text, resultText)
}
def testInvertIf_CaretInsideElse(): Unit = {
val text =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| if (false) {
| System.out.print("if1")
| System.out.print("if2")
| } el${CARET}se {
| System.out.print("else")
| }
| }
|}""".stripMargin
val resultText =
s"""
|class X {
| def f(a: Boolean, b: Boolean) {
| if (true) {
| System.out.print("else")
| }$CARET else {
| System.out.print("if1")
| System.out.print("if2")
| }
| }
|}""".stripMargin
doTest(text, resultText)
}
}
| JetBrains/intellij-scala | scala/codeInsight/test/org/jetbrains/plugins/scala/codeInsight/intention/controlFlow/InvertIfConditionIntentionTest.scala | Scala | apache-2.0 | 8,919 |
import scala.language.experimental.macros
object Test extends App {
def foo[U]: Unit = macro Impls.foo[U]
foo[Int]
}
| scala/scala | test/files/run/macro-impl-tparam-typetag-is-optional/Macros_Test_2.scala | Scala | apache-2.0 | 121 |
/** A package whose purpose is used to
* 1. Compare several trivial Fibonacci sequence implementations.
* 2. Practice writing package level code off the top of my head.
* 3. Reacquaint myself with Scala after doing Haskell for a while.
* 4. Learn how to do generate nice docs from docstrings.
*/
package fibcompare
/** An imparertive implementation.
*
* @note Use case would be if you just needed one value very fast.
* @note Use fibPair directly if you desired to bootstrap a
* subsequent calculation.
*/
object FibImperitive {
/** Compute the nth Fibonacci number afresh each time in a tight loop */
def fibN(n: Int, n0: BigInt = 0, n1: BigInt = 1 ): BigInt =
fibPair(n, n0, n1)._1
/** Return a pair with the nth and (n+1)th Fibonacci numbers. */
def fibPair(n: Int, n0: BigInt = 0, n1: BigInt = 1 ): (BigInt, BigInt) = {
var a: BigInt = n0
var b: BigInt = n1
var c: BigInt = 0
for (_ <- 1 to n) {
c = a + b
a = b
b = c
}
(a, b)
}
}
/** Hold onto a LazyList of BigInt Fibonacci numbers
* @note Use case is to cache intermediate results in an object.
*/
case class FibCache(f0: BigInt = 0, f1: BigInt = 1) {
val cachedFibs: LazyList[BigInt] = f0 #:: f1 #:: {
(cachedFibs zip cachedFibs.tail) map { case ((m, n)) => m + n }
}
def apply(n: Int): BigInt = cachedFibs(n)
}
/** Name space for a method which returns stream of Fibonacci BigInts
*
* @note Just hand the client the bloody cached LazyList
* and let them deal with the LazyList API.
*
* @note Share fibs with the canonical start values 0, 1
*/
object FibLazyList {
/** Recursively, and lazily, build a stream of Fibonacci numbers. */
def fibLazyList(a: BigInt = 0, b: BigInt = 1): LazyList[BigInt] =
a #:: fibLazyList(b, a+b)
lazy val fibs: LazyList[BigInt] = fibLazyList()
}
| grscheller/scheller-linux-archive | grok/Scala2/learnScala/fib/fibcompare.scala | Scala | bsd-3-clause | 1,853 |
package edu.uw.at.iroberts.wirefugue.pcap
import java.nio.ByteOrder
import java.nio.ByteOrder.{BIG_ENDIAN, LITTLE_ENDIAN}
import scala.collection.generic.CanBuildFrom
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
/** Operations to extract from any IndexedSeq[Byte] (which includes
* akka.util.ByteString), 32- and 16-bit integers, both
* signed and unsigned, into Scala/Java's (always-signed) integer
* types. Endianness may be specified implicitly.
*
* Overloaded object method toBytesBE() is provided to convert
* Int and Short values to Array[Byte] in network order.
*
* Also, an IndexedSeq[Byte] can be formatted into various
* printable strings, notably a multiline display with offsets
* similar to that produced by `tcpdump -x`.
*
* Created by Ian Robertson <iroberts@uw.edu> on 4/3/17.
*/
object ByteSeqOps {
implicit def toByteSeqOps[A <: IndexedSeq[Byte]](bytes: A): ByteSeqOps[A] =
new ByteSeqOps[A](bytes)
def unsignedIntToSignedLong(u32: Int): Long = u32.toLong & 0xffffffff
def unsignedShortToSignedInt(u16: Short): Int = u16.toInt & 0xffff
def unsignedByteToSignedShort(u8: Byte): Short = (u8.toInt & 0xff).toShort
def toBytesBE(i32: Int): Array[Byte] = {
val buf: mutable.ArrayBuffer[Byte] = ArrayBuffer()
buf += (i32 >>> 24).toByte
buf += (i32 >>> 16).toByte
buf += (i32 >>> 8).toByte
buf += i32.toByte
buf.toArray
}
def toBytesBE(i16: Short): Array[Byte] = {
val buf: mutable.ArrayBuffer[Byte] = ArrayBuffer()
buf += (i16 >>> 8).toByte
buf += i16.toByte
buf.toArray
}
}
class ByteSeqOps[A <: IndexedSeq[Byte]](bytes: A) {
import ByteSeqOps._
def swab32[B](implicit cbf: CanBuildFrom[A, Byte, B]): B = {
require(bytes.length >= 4)
val builder = cbf()
builder ++= bytes.take(4).reverse
builder.result()
}
def swab16[B](implicit cbf: CanBuildFrom[A, Byte, B]): B = {
require(bytes.length >= 2)
val builder = cbf()
builder ++= bytes.take(2).reverse
builder.result()
}
def getUInt32(implicit byteOrder: ByteOrder): Long = {
byteOrder match {
case BIG_ENDIAN => this.getUInt32BE
case LITTLE_ENDIAN => this.getUInt32LE
}
}
def getUInt32LE: Long = {
require(bytes.length >= 4)
unsignedIntToSignedLong(getInt32LE)
}
def getUInt32BE: Long = {
require(bytes.length >= 4)
unsignedIntToSignedLong(getInt32BE)
}
def getInt32(implicit byteOrder: ByteOrder): Int = {
byteOrder match {
case LITTLE_ENDIAN => getInt32LE
case BIG_ENDIAN => getInt32BE
}
}
def getInt32LE: Int = {
(bytes(3).toInt << 24) |
((bytes(2).toInt & 0xff) << 16) |
((bytes(1).toInt & 0xff) << 8) |
(bytes(0).toInt & 0xff)
}
def getInt32BE: Int = {
(bytes(0).toInt << 24) |
((bytes(1).toInt & 0xff) << 16) |
((bytes(2).toInt & 0xff) << 8) |
(bytes(3).toInt & 0xff)
}
def getUInt16(implicit byteOrder: ByteOrder): Int = {
byteOrder match {
case LITTLE_ENDIAN => getUInt16LE
case BIG_ENDIAN => getUInt16BE
}
}
def getUInt16LE: Int = ((bytes(1).toInt & 0xff) << 8) | (bytes(0).toInt & 0xff)
def getUInt16BE: Int = ((bytes(0).toInt & 0xff) << 8) | (bytes(1).toInt & 0xff)
def getInt16(implicit byteOrder: ByteOrder): Short = {
byteOrder match {
case LITTLE_ENDIAN => getInt16LE
case BIG_ENDIAN => getInt16BE
}
}
def getInt16LE: Short = ((bytes(1) & 0xff) << 8 | (bytes(0) & 0xff)).toShort
def getInt16BE: Short = ((bytes(0) & 0xff) << 8 | (bytes(1) & 0xff)).toShort
def mkHexString: String = bytes.map(b => f"$b%02x").grouped(2).map(_.mkString("")).mkString(" ")
def mkHexLines: Iterator[String] = {
bytes.grouped(16).map(_.mkHexString)
}
def mkHexBlock(indent: Int = 8): String = {
mkHexLines.zip(Iterator.from(0, 16))
.map { case (s, i) => (" " * indent) + f"0x$i%04x: " + s }
.mkString("\\n")
}
}
| robertson-tech/wirefugue | sensor/src/main/scala/edu/uw/at/iroberts/wirefugue/pcap/ByteSeqOps.scala | Scala | gpl-3.0 | 3,958 |
package com.brkyvz.spark.linalg
import java.lang.reflect.InvocationTargetException
import java.lang.{Double => JavaDouble}
import org.apache.spark.mllib.linalg._
/** Util methods that use reflection to call into MLlib's private BLAS methods. */
object BLASUtils {
@transient private lazy val clazz: Class[_] = Class.forName("org.apache.spark.mllib.linalg.BLAS$")
@transient private lazy val _blas: Any = {
val constructor = clazz.getDeclaredConstructors.head
constructor.setAccessible(true)
constructor.newInstance()
}
private def castMatrix(mat: MatrixLike, toDense: Boolean = false): Matrix = mat match {
case dn: DenseMatrixWrapper => dn.asInstanceOf[DenseMatrix]
case sp: SparseMatrixWrapper =>
if (toDense) sp.toDense else sp.asInstanceOf[SparseMatrix]
case lzy: LazyMatrix => lzy.compute().asInstanceOf[DenseMatrix]
case _ => throw new UnsupportedOperationException(s"${mat.getClass} can't be cast to Matrix.")
}
private def castVector(mat: VectorLike, toDense: Boolean = false): Vector = mat match {
case dn: DenseVectorWrapper => dn.asInstanceOf[DenseVector]
case sp: SparseVectorWrapper =>
if (toDense) sp.toDense else sp.asInstanceOf[SparseVector]
case lzy: LazyVector => lzy.compute().asInstanceOf[DenseVector]
case _ => throw new UnsupportedOperationException(s"${mat.getClass} can't be cast to Vector.")
}
private def invokeMethod(methodName: String, args: (Class[_], AnyRef)*): Any = {
val (types, values) = args.unzip
val method = clazz.getDeclaredMethod(methodName, types: _*)
method.setAccessible(true)
try {
method.invoke(_blas, values.toSeq: _*)
} catch {
case ex: InvocationTargetException =>
throw new IllegalArgumentException(s"$methodName is not supported for arguments: $values")
}
}
/**
* y += a * x
*/
def axpy(a: Double, x: VectorLike, y: VectorLike): Unit = {
val args: Seq[(Class[_], AnyRef)] = Seq((classOf[Double], new JavaDouble(a)),
(classOf[Vector], castVector(x)), (classOf[Vector], castVector(y, toDense = true)))
invokeMethod("axpy", args: _*)
}
/**
* x^T^y
*/
def dot(x: VectorLike, y: VectorLike): Double = {
val args: Seq[(Class[_], AnyRef)] = Seq(
(classOf[Vector], castVector(x)), (classOf[Vector], castVector(y)))
invokeMethod("dot", args: _*).asInstanceOf[Double]
}
/**
* x = a * x
*/
def scal(a: Double, x: VectorLike): Unit = {
val cx = castVector(x)
val args: Seq[(Class[_], AnyRef)] = Seq(
(classOf[Double], new JavaDouble(a)), (classOf[Vector], cx))
invokeMethod("scal", args: _*)
}
/**
* A := alpha * x * x^T^ + A
* @param alpha a real scalar that will be multiplied to x * x^T^.
* @param x the vector x that contains the n elements.
* @param A the symmetric matrix A. Size of n x n.
*/
def syr(alpha: Double, x: Vector, A: MatrixLike): Unit = {
val args: Seq[(Class[_], AnyRef)] = Seq((classOf[Double], new JavaDouble(alpha)),
(classOf[Vector], castVector(x)), (classOf[DenseMatrix], castMatrix(A, toDense = true)))
invokeMethod("syr", args: _*)
}
/**
* C := alpha * A * B + beta * C
* @param alpha a scalar to scale the multiplication A * B.
* @param A the matrix A that will be left multiplied to B. Size of m x k.
* @param B the matrix B that will be left multiplied by A. Size of k x n.
* @param beta a scalar that can be used to scale matrix C.
* @param C the resulting matrix C. Size of m x n. C.isTransposed must be false.
*/
def gemm(alpha: Double, A: MatrixLike, B: MatrixLike, beta: Double, C: DenseMatrix): Unit = {
B match {
case dnB: DenseMatrixWrapper => mllibGemm(alpha, castMatrix(A), dnB, beta, C)
case spB: SparseMatrixWrapper =>
A match {
case dnA: DenseMatrixWrapper => dsgemm(alpha, dnA, spB, beta, C)
case spA: SparseMatrixWrapper => mllibGemm(alpha, spA, spB.toDense, beta, C)
case lzy: LazyMatrix =>
dsgemm(alpha, lzy.compute().asInstanceOf[DenseMatrixWrapper], spB, beta, C)
}
case lzy: LazyMatrix =>
mllibGemm(alpha, castMatrix(A), lzy.compute().asInstanceOf[DenseMatrix], beta, C)
}
}
private def mllibGemm(
alpha: Double,
A: Matrix,
B: DenseMatrix,
beta: Double,
C: DenseMatrix): Unit = {
val args: Seq[(Class[_], AnyRef)] = Seq(
(classOf[Double], new JavaDouble(alpha)), (classOf[Matrix], A), (classOf[DenseMatrix], B),
(classOf[Double], new JavaDouble(beta)), (classOf[DenseMatrix], C))
invokeMethod("gemm", args: _*)
}
private def dsgemm(
alpha: Double,
A: DenseMatrixWrapper,
B: SparseMatrixWrapper,
beta: Double,
C: DenseMatrix): Unit = {
val mA: Int = A.numRows
val nB: Int = B.numCols
val kA: Int = A.numCols
val kB: Int = B.numRows
require(kA == kB, s"The columns of A don't match the rows of B. A: $kA, B: $kB")
require(mA == C.numRows, s"The rows of C don't match the rows of A. C: ${C.numRows}, A: $mA")
require(nB == C.numCols,
s"The columns of C don't match the columns of B. C: ${C.numCols}, A: $nB")
val Avals = A.values
val Bvals = B.values
val Cvals = C.values
val BrowIndices = B.rowIndices
val BcolPtrs = B.colPtrs
// Slicing is easy in this case. This is the optimal multiplication setting for sparse matrices
if (!B.isTransposed){
var colCounterForB = 0
if (A.isTransposed) { // Expensive to put the check inside the loop
while (colCounterForB < nB) {
var rowCounterForA = 0
val Cstart = colCounterForB * mA
val Bstart = BcolPtrs(colCounterForB)
while (rowCounterForA < mA) {
var i = Bstart
val indEnd = BcolPtrs(colCounterForB + 1)
val Astart = rowCounterForA * kA
var sum = 0.0
while (i < indEnd) {
sum += Avals(Astart + BrowIndices(i)) * Bvals(i)
i += 1
}
val Cindex = Cstart + rowCounterForA
Cvals(Cindex) = beta * Cvals(Cindex) + sum * alpha
rowCounterForA += 1
}
colCounterForB += 1
}
} else {
while (colCounterForB < nB) {
var rowCounterForA = 0
val Cstart = colCounterForB * mA
while (rowCounterForA < mA) {
var i = BcolPtrs(colCounterForB)
val indEnd = BcolPtrs(colCounterForB + 1)
var sum = 0.0
while (i < indEnd) {
sum += A(rowCounterForA, BrowIndices(i)) * Bvals(i)
i += 1
}
val Cindex = Cstart + rowCounterForA
Cvals(Cindex) = beta * Cvals(Cindex) + sum * alpha
rowCounterForA += 1
}
colCounterForB += 1
}
}
} else {
// Scale matrix first if `beta` is not equal to 0.0
if (beta != 1.0) {
scal(beta, new DenseVectorWrapper(C.values))
}
// Perform matrix multiplication and add to C. The rows of A are multiplied by the columns of
// B, and added to C.
var rowCounterForB = 0 // the column to be updated in C
if (!A.isTransposed) { // Expensive to put the check inside the loop
while (rowCounterForB < kB) {
var i = BcolPtrs(rowCounterForB)
val indEnd = BcolPtrs(rowCounterForB + 1)
while (i < indEnd) {
var rowCounterForA = 0 // The column of A to multiply with the row of B
val Bval = Bvals(i) * alpha
val Cstart = BrowIndices(i) * mA
val Astart = rowCounterForB * mA
while (rowCounterForA < mA) {
Cvals(Cstart + rowCounterForA) += Avals(Astart + rowCounterForA) * Bval
rowCounterForA += 1
}
i += 1
}
rowCounterForB += 1
}
} else {
while (rowCounterForB < kB) {
var i = BcolPtrs(rowCounterForB)
val indEnd = BcolPtrs(rowCounterForB + 1)
while (i < indEnd) {
var rowCounterForA = 0 // The column of A to multiply with the row of B
val Bval = Bvals(i) * alpha
val Bcol = BrowIndices(i)
val Cstart = Bcol * mA
while (rowCounterForA < mA) {
Cvals(Cstart + rowCounterForA) += A(rowCounterForA, rowCounterForB) * Bval
rowCounterForA += 1
}
i += 1
}
rowCounterForB += 1
}
}
}
}
/**
* y := alpha * A * x + beta * y
* @param alpha a scalar to scale the multiplication A * x.
* @param A the matrix A that will be left multiplied to x. Size of m x n.
* @param x the vector x that will be left multiplied by A. Size of n x 1.
* @param beta a scalar that can be used to scale vector y.
* @param y the resulting vector y. Size of m x 1.
*/
def gemv(
alpha: Double,
A: MatrixLike,
x: VectorLike,
beta: Double,
y: VectorLike): Unit = {
val a: Matrix = castMatrix(A)
val _x: Vector = castVector(x)
val _y: Vector = castVector(y)
val args: Seq[(Class[_], AnyRef)] = Seq((classOf[Double], new JavaDouble(alpha)),
(classOf[Matrix], a), (classOf[Vector], x),
(classOf[Double], new JavaDouble(beta)), (classOf[DenseVector], y))
invokeMethod("gemv", args: _*)
}
}
| brkyvz/lazy-linalg | src/main/scala/com/brkyvz/spark/linalg/BLASUtils.scala | Scala | apache-2.0 | 9,418 |
/*
* The MIT License
*
* Copyright (c) 2021 Fulcrum Genomics
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
package com.fulcrumgenomics.commons.collection
import com.fulcrumgenomics.commons.util.UnitSpec
import org.scalatest.OptionValues
class LeastRecentlyUsedCacheTest extends UnitSpec with OptionValues{
"LeastRecentlyUsedCache" should "put and get a single element" in {
val cache = new LeastRecentlyUsedCache[Int, Int](1)
cache.iterator.isEmpty shouldBe true
cache.put(1, 2) shouldBe None
cache.get(1).value shouldBe 2
cache.get(2) shouldBe None
cache.iterator.toSeq should contain theSameElementsInOrderAs Seq((1, 2))
cache.size shouldBe 1
}
it should "keep only the least recently used when adding more than the max entries" in {
val cache = new LeastRecentlyUsedCache[Int, Int](3)
// Up to the limit
cache.put(1, 2) shouldBe None
cache.put(2, 3) shouldBe None
cache.put(3, 4) shouldBe None
cache.iterator.toSeq should contain theSameElementsInOrderAs Seq((1, 2), (2, 3), (3, 4))
// Should evict (1, 2)
cache.put(4, 5) shouldBe None
cache.iterator.toSeq should contain theSameElementsInOrderAs Seq((2, 3), (3, 4), (4, 5))
// Should do nothing, as (4, 5) was already the most recently used
cache.put(4, 5).value shouldBe 5
cache.iterator.toSeq should contain theSameElementsInOrderAs Seq((2, 3), (3, 4), (4, 5))
val keyValues = cache.iterator.toSeq
// Should make (2, 3) the most recently used
cache.put(2, 3).value shouldBe 3
cache.iterator.toSeq should contain theSameElementsInOrderAs Seq((3, 4), (4, 5), (2, 3))
// make sure that keyValues is the ordr from before the cache.put(2, 3)
keyValues should contain theSameElementsInOrderAs Seq((2, 3), (3, 4), (4, 5))
}
it should "remove elements from the cache" in {
val cache = new LeastRecentlyUsedCache[Int, Int](3)
// Up to the limit
cache.put(1, 2) shouldBe None
cache.put(2, 3) shouldBe None
cache.put(3, 4) shouldBe None
cache.iterator.toSeq should contain theSameElementsInOrderAs Seq((1, 2), (2, 3), (3, 4))
cache.size shouldBe 3
// Remove non-existent item
cache.remove(4) shouldBe None
// Remove the rest of the items
cache.remove(3).value shouldBe 4
cache.remove(1).value shouldBe 2
cache.remove(2).value shouldBe 3
// Check that there are no elements
cache.iterator.isEmpty shouldBe true
cache.size shouldBe 0
Seq(1, 2, 3).foreach { key => cache.get(key).isEmpty shouldBe true }
}
}
| fulcrumgenomics/commons | src/test/scala/com/fulcrumgenomics/commons/collection/LeastRecentlyUsedCacheTest.scala | Scala | mit | 3,583 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.fs
import slamdata.Predef._
import quasar.Data
import quasar.common.PhaseResult
import quasar.effect.{KeyValueStore, MonotonicSeq}
import quasar.fs.QueryFile._
import quasar.frontend.logicalplan.{Constant, LogicalPlan}
import matryoshka.data.Fix
import matryoshka.implicits._
import scalaz._, Scalaz._
object constantPlans {
/** Identify plans which reduce to a (set of) constant value(s). */
def asConstant(lp: Fix[LogicalPlan]): Option[List[Data]] =
lp.project match {
case Constant(value) => List(value).some
case _ => none
}
type State[A] = KeyValueStore[QueryFile.ResultHandle, Vector[Data], A]
val constantPhase =
PhaseResult.detail("Intercept Constant", "This plan is constant and can be evaluated in memory")
def queryFile[S[_]](
implicit
S0: QueryFile :<: S,
S1: ManageFile :<: S,
seq: MonotonicSeq.Ops[S],
write: WriteFile.Ops[S],
state: KeyValueStore.Ops[QueryFile.ResultHandle, Vector[Data], S]
): QueryFile ~> Free[S, ?] = {
val query = QueryFile.Ops[S]
val queryUnsafe = QueryFile.Unsafe[S]
def dataHandle(data: List[Data]): Free[S, ResultHandle] =
for {
h <- seq.next.map(ResultHandle(_))
_ <- state.put(h, data.toVector)
} yield h
λ[QueryFile ~> Free[S, ?]] {
case ExecutePlan(lp, out) =>
asConstant(lp).fold(
query.execute(lp, out).run.run)(
data => write.saveThese(out, data.toVector).run.strengthL(Vector(constantPhase)))
case EvaluatePlan(lp) =>
asConstant(lp).fold(
queryUnsafe.eval(lp).run.run)(
data => dataHandle(data).map(h => (Vector(constantPhase), h.right)))
case More(handle) =>
state.get(handle).run.flatMap {
case Some(data) => state.put(handle, Vector.empty).as(data.right)
case None => queryUnsafe.more(handle).run
}
case Close(handle) =>
state.contains(handle).ifM(state.delete(handle), queryUnsafe.close(handle))
case Explain(lp) =>
val constantExecutionPlan =
ExecutionPlan(FileSystemType("constant"), "none", ISet.empty)
asConstant(lp).fold(
query.explain(lp).run.run)(
data => (Vector(constantPhase), constantExecutionPlan.right[FileSystemError]).point[Free[S, ?]])
case ListContents(dir) =>
query.listContents(dir).run
case FileExists(file) =>
query.fileExists(file)
}
}
} | jedesah/Quasar | core/src/main/scala/quasar/fs/constantPlans.scala | Scala | apache-2.0 | 3,106 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ly.stealth.mesos.exhibitor
import ly.stealth.mesos.exhibitor.Cli.sendRequest
import ly.stealth.mesos.exhibitor.Util.parseMap
import org.junit.Assert._
import org.junit.{After, Before, Test}
import scala.collection.mutable
class HttpServerTest extends MesosTestCase {
@Before
override def before() {
super.before()
Config.api = "http://localhost:8000"
HttpServer.start(resolveDeps = false)
}
@After
override def after() {
HttpServer.stop()
super.after()
}
@Test
def addServer() {
val response = sendRequest("/add", parseMap("id=0,cpu=0.6,mem=128,port=3000..8000")).as[ApiResponse]
assertEquals(1, Scheduler.cluster.servers.size)
val server = Scheduler.cluster.servers.head
assertEquals("0", server.id)
assertEquals(0.6, server.config.cpus, 0.001)
assertEquals(128, server.config.mem, 0.001)
assertEquals(1, server.config.ports.size)
assertEquals(3000, server.config.ports.head.start)
assertEquals(8000, server.config.ports.head.end)
assertTrue(response.message.contains("Added servers"))
assert(response.success)
assertNotEquals(None, response.value)
ExhibitorServerTest.assertServerEquals(server, response.value.get.servers.head)
}
@Test
def configServer() {
sendRequest("/add", parseMap("id=0"))
val response = sendRequest("/config", parseMap("id=0,zkconfigconnect=192.168.3.1:2181,zookeeper-install-directory=/tmp/zookeeper")).as[ApiResponse]
val serverOpt = Scheduler.cluster.getServer("0")
assertNotEquals(None, serverOpt)
val server = serverOpt.get
assertEquals("0", server.id)
assertEquals(mutable.Map("zkconfigconnect" -> "192.168.3.1:2181"), server.config.exhibitorConfig)
assertEquals(mutable.Map("zookeeper-install-directory" -> "/tmp/zookeeper"), server.config.sharedConfigOverride)
assertTrue(response.success)
assertTrue(response.message.contains("Updated configuration"))
assertNotEquals(None, response.value)
ExhibitorServerTest.assertServerEquals(server, response.value.get.servers.head)
}
@Test
def clusterStatus() {
sendRequest("/add", parseMap("id=0"))
sendRequest("/add", parseMap("id=1"))
sendRequest("/add", parseMap("id=2"))
val response = sendRequest("/status", Map()).as[ApiResponse]
assertTrue(response.success)
assertNotEquals(None, response.value)
assertEquals(3, response.value.get.servers.size)
assertEquals(3, response.value.get.servers.map(_.id).distinct.size)
}
@Test
def removeServer() {
sendRequest("/add", parseMap("id=0"))
sendRequest("/add", parseMap("id=1"))
sendRequest("/add", parseMap("id=2"))
sendRequest("/remove", parseMap("id=1"))
assertEquals(2, Scheduler.cluster.servers.size)
}
@Test
def startStopServer() {
sendRequest("/add", parseMap("id=0"))
val startResponse = sendRequest("/start", parseMap("id=0")).as[ApiResponse]
assertTrue(startResponse.success)
assertTrue(startResponse.message.contains("Started servers"))
assertNotEquals(None, startResponse.value)
assertEquals(ExhibitorServer.Stopped, startResponse.value.get.servers.head.state)
val stopResponse = sendRequest("/stop", parseMap("id=0")).as[ApiResponse]
assertTrue(stopResponse.success)
assertTrue(stopResponse.message.contains("Stopped servers"))
assertNotEquals(None, stopResponse.value)
assertEquals(ExhibitorServer.Added, stopResponse.value.get.servers.head.state)
}
}
| CiscoCloud/exhibitor-mesos-framework | src/main/test/ly/stealth/mesos/exhibitor/HttpServerTest.scala | Scala | apache-2.0 | 4,273 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk
import java.nio.charset.StandardCharsets
import org.apache.openwhisk.extension.whisk.OpenWhiskProtocolBuilder
import org.apache.openwhisk.extension.whisk.Predef._
import io.gatling.core.Predef._
import io.gatling.core.structure.ScenarioBuilder
import io.gatling.core.util.Resource
import org.apache.commons.io.FileUtils
import scala.concurrent.duration._
class BlockingInvokeOneActionSimulation extends Simulation {
// Specify parameters for the run
val host = sys.env("OPENWHISK_HOST")
// Specify authentication
val Array(uuid, key) = sys.env("API_KEY").split(":")
val connections: Int = sys.env("CONNECTIONS").toInt
val seconds: FiniteDuration = sys.env.getOrElse("SECONDS", "10").toInt.seconds
// Specify thresholds
val requestsPerSec: Int = sys.env("REQUESTS_PER_SEC").toInt
val minimalRequestsPerSec: Int = sys.env.getOrElse("MIN_REQUESTS_PER_SEC", requestsPerSec.toString).toInt
val maxErrorsAllowed: Int = sys.env.getOrElse("MAX_ERRORS_ALLOWED", "0").toInt
val maxErrorsAllowedPercentage: Double = sys.env.getOrElse("MAX_ERRORS_ALLOWED_PERCENTAGE", "0.1").toDouble
// Generate the OpenWhiskProtocol
val openWhiskProtocol: OpenWhiskProtocolBuilder = openWhisk.apiHost(host)
// Specify async
val async = sys.env.getOrElse("ASYNC", "false").toBoolean
val actionName = "testActionForBlockingInvokeOneAction"
val actionfile = if (async) "nodeJSAsyncAction.js" else "nodeJSAction.js"
// Define scenario
val test: ScenarioBuilder = scenario(s"Invoke one ${if (async) "async" else "sync"} action blocking")
.doIf(_.userId == 1) {
exec(
openWhisk("Create action")
.authenticate(uuid, key)
.action(actionName)
.create(FileUtils
.readFileToString(Resource.body(actionfile).get.file, StandardCharsets.UTF_8)))
}
.rendezVous(connections)
.during(5.seconds) {
exec(openWhisk("Warm containers up").authenticate(uuid, key).action(actionName).invoke())
}
.rendezVous(connections)
.during(seconds) {
exec(openWhisk("Invoke action").authenticate(uuid, key).action(actionName).invoke())
}
.rendezVous(connections)
.doIf(_.userId == 1) {
exec(openWhisk("Delete action").authenticate(uuid, key).action(actionName).delete())
}
setUp(test.inject(atOnceUsers(connections)))
.protocols(openWhiskProtocol)
// One failure will make the build yellow
.assertions(details("Invoke action").requestsPerSec.gt(minimalRequestsPerSec))
.assertions(details("Invoke action").requestsPerSec.gt(requestsPerSec))
// Mark the build yellow, if there are failed requests. And red if both conditions fail.
.assertions(details("Invoke action").failedRequests.count.lte(maxErrorsAllowed))
.assertions(details("Invoke action").failedRequests.percent.lte(maxErrorsAllowedPercentage))
}
| cbickel/openwhisk | tests/performance/gatling_tests/src/gatling/scala/org/apache/openwhisk/BlockingInvokeOneActionSimulation.scala | Scala | apache-2.0 | 3,674 |
package com.seanshubin.templater.domain
class CommandExecutorImpl(fileSystem: FileSystem, textReplacements: Map[String, String]) extends CommandExecutor {
override def execute(command: CopyFileCommand): Unit = {
if (!fileSystem.isDirectory(command.origin)) {
val contents = fileSystem.loadFileIntoString(command.origin)
val updatedContents = textReplacements.foldLeft(contents)(replaceText)
fileSystem.storeStringIntoFile(updatedContents, command.destination)
}
}
private def replaceText(soFar: String, replacement: (String, String)): String = {
val (before, after) = replacement
soFar.replaceAllLiterally(before, after)
}
}
| SeanShubin/generate-from-template | domain/src/main/scala/com/seanshubin/templater/domain/CommandExecutorImpl.scala | Scala | unlicense | 669 |
package chess
case class Piece(color: Color, role: Role) {
def is(c: Color) = c == color
def is(r: Role) = r == role
def isNot(r: Role) = r != role
def oneOf(rs: Set[Role]) = rs(role)
def isMinor = oneOf(Set(Knight, Bishop))
def isMajor = oneOf(Set(Queen, Rook))
def forsyth: Char = if (color == White) role.forsythUpper else role.forsyth
// attackable positions assuming empty board
def eyes(from: Pos, to: Pos): Boolean = attacks(from, PosSet.empty).has(to)
// movable positions assuming empty board
def eyesMovable(from: Pos, to: Pos): Boolean =
if (role == Pawn) Piece.pawnEyes(color, from, to) || {
(from ?| to) && {
val dy = to.rank - from.rank
if (color.white) (dy == 1 || (from.rank <= Rank.Second && dy == 2))
else (dy == -1 || (from.rank >= Rank.Seventh && dy == -2))
}
}
else eyes(from, to)
def attacks(from: Pos, occupied: => PosSet): PosSet =
role match {
case King => PosSet.kingAttacks(from)
case Queen => PosSet.queenAttacks(from, occupied)
case Rook => PosSet.rookAttacks(from, occupied)
case Bishop => PosSet.bishopAttacks(from, occupied)
case Knight => PosSet.knightAttacks(from)
case Pawn => PosSet.pawnAttacks(color, from)
}
override def toString = s"$color-$role".toLowerCase
}
object Piece {
def fromChar(c: Char): Option[Piece] =
Role.allByPgn get c.toUpper map {
Piece(Color.fromWhite(c.isUpper), _)
}
private def pawnEyes(color: Color, from: Pos, to: Pos) =
(from xDist to) == 1 && (to.rank - from.rank) == {
if (color.white) 1 else -1
}
}
| niklasf/scalachess | src/main/scala/Piece.scala | Scala | mit | 1,640 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.config.factories
import java.io.FileInputStream
import java.net.URI
import java.util.Properties
import scala.collection.JavaConverters._
import org.apache.samza.config.Config
import org.apache.samza.config.ConfigFactory
import org.apache.samza.config.MapConfig
import org.apache.samza.util.Logging
import org.apache.samza.SamzaException
class PropertiesConfigFactory extends ConfigFactory with Logging {
def getConfig(configUri: URI): Config = {
val scheme = configUri.getScheme
if (scheme != null && !scheme.equals("file")) {
throw new SamzaException("only the file:// scheme is supported for properties files")
}
val configPath = configUri.getPath
val props = new Properties()
val in = new FileInputStream(configPath)
props.load(in)
in.close()
debug("got config %s from config %s" format (props, configPath))
new MapConfig(props.asScala.asJava)
}
}
| prateekm/samza | samza-core/src/main/scala/org/apache/samza/config/factories/PropertiesConfigFactory.scala | Scala | apache-2.0 | 1,740 |
package debop4s.core.utils
import java.util
import debop4s.core.{AbstractCoreFunSuite, ValueObject}
import scala.collection.immutable.IndexedSeq
import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.util.{Failure, Success}
/**
* debop4s.core.tests.tools.MappersFunSuite
* @author 배성혁 sunghyouk.bae@gmail.com
* @since 2013. 12. 14. 오전 10:24
*/
class MappersFunSuite extends AbstractCoreFunSuite {
//
// NOTE: Java용 Mapper 들은 Scala 고유의 수형이나 case class 에서는 작동하지 않는다.
// NOTE: Reflection을 이용한 Mapper 보다 차라리 implicit method 를 쓰는 것이 낫다.
//
case class Order(customer: Customer, items: List[OrderLineItem] = List()) {
def addItem(product: Product, quantity: Int) =
copy(items = OrderLineItem(product, quantity) :: items)
def total = items.foldLeft(0.0) {
_ + _.total
}
}
case class Product(name: String, price: Double)
case class OrderLineItem(product: Product, quantity: Int) {
def total = quantity * product.price
}
case class Customer(name: String)
case class OrderDTO(customerName: String, total: Double)
implicit def order2OrderDTO(order: Order) =
OrderDTO(order.customer.name, order.total)
test("Scala sytle - Implicit conversion") {
val customer = Customer("배성혁")
val bosco = Product("Bosco", 4.99)
val order = Order(customer).addItem(bosco, 15)
val dto: OrderDTO = order
assert(dto.customerName == customer.name)
assert(dto.total == order.total)
}
test("map") {
val a = new A(100)
val b = Mappers.map[B](a)
assert(b.x == a.x)
}
test("map list") {
val as: IndexedSeq[A] = Range(0, 100).map(x => new A(x)) // for (x <- 0 until 100) yield new A(x)
val bs: Seq[B] = Mappers.mapAll[B](as)
assert(bs.size == as.size)
val bsf = Mappers.mapAllAsync[B](as)
val result = Await.result(bsf, 100 milli)
result.size shouldEqual as.size
}
test("map array") {
val bs = Mappers.mapAll[B](List(new A(0), new A(1), new A(2)))
assert(bs.size == 3)
val bsf = Mappers.mapAllAsync[B](List(new A(0), new A(1), new A(2)))
bsf onComplete {
case Success(result) => result.size shouldEqual 3
case Failure(t) => throw new RuntimeException(t)
}
}
test("Nested mapping") {
val parent = createParent()
val parentDTO = Mappers.map[ParentDTO](parent)
assert(parentDTO != null)
assert(parentDTO.children.size == parent.children.size)
assert(parentDTO.name == parent.name)
val sz = parent.children.size
for (i <- 0 until sz) {
assert(parentDTO.children.get(i).id == parent.children.get(i).id)
assert(parentDTO.children.get(i).age == parent.children.get(i).age)
assert(parentDTO.children.get(i).name == parent.children.get(i).name)
assert(parentDTO.children.get(i).description == parent.children.get(i).description)
}
}
private def createParent() = {
val parent = new Parent()
parent.id = 1L
parent.age = 45L
parent.name = "배성혁"
parent.description = "부모 객체입니다."
for (i <- 0 until 10) {
val child = new Child()
child.id = i.toLong
child.age = (i + 1).toLong
child.name = "자식-" + i
child.description = "자식입니다."
child.parent = parent
parent.children.add(child)
}
parent
}
}
class A(var x: Int) {
def this() {
this(0)
}
var y: String = _
}
class B(var x: Int) {
def this() {
this(0)
}
}
class Parent extends ValueObject {
var id: java.lang.Long = _
var age: java.lang.Long = _
var name: String = _
var description: String = _
val children = new util.ArrayList[Child]()
}
class Child extends ValueObject {
var id: java.lang.Long = _
var age: java.lang.Long = _
var name: String = _
var description: String = _
var parent: Parent = _
}
class ParentDTO extends ValueObject {
var id: java.lang.Long = _
var age: java.lang.Long = _
var name: String = _
var description: String = _
val children = new util.ArrayList[ChildDTO]()
}
class ChildDTO extends ValueObject {
var id: java.lang.Long = _
var age: java.lang.Long = _
var name: String = _
var description: String = _
var parent: ParentDTO = _
} | debop/debop4s | debop4s-core/src/test/scala/debop4s/core/utils/MappersFunSuite.scala | Scala | apache-2.0 | 4,361 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.3
* @date Wed Aug 26 18:41:26 EDT 2009
* @see LICENSE (MIT style license file).
*/
package scalation.linalgebra.gen
import scala.Numeric._
import scala.collection.Traversable
import scala.math.{BigDecimal, ceil, sqrt}
import scala.reflect.ClassTag
import scala.util.Sorting.quickSort
import scalation.linalgebra.VectorD
import scalation.math.Primes.prime
import scalation.util.Error
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Vectors` object contains convenience definitions for commonly used types of
* vectors. For efficiency, non-generic versions of `VectorD`, `VectorC` and `VectorR`
* are provided in the `linalgebra` package.
*/
object Vectors
{
type VectorI = VectorN [Int] // Vector of Integers
type VectorL = VectorN [Long] // Vector of Long Integers
type VectorF = VectorN [Float] // Vector of Floating Point Numbers
type VectorB = VectorN [BigDecimal] // Vector of Arbitrary-precision Decimal Numbers
// type VectorD = VectorN [Double] // Vector of Double Precision Float
// type VectorC = VectorN [Complex] // Vector of Complex Numbers
// type VectorR = VectorN [Rational] // Vector of Rational Numbers
} // Vectors object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `VectorN` class stores and operates on Numeric Vectors of various sizes
* and types. The element type may be any subtype of Numeric. Some methods
* only work for Fractional types. When/if Scala adds 'sqrt' and 'pow' to
* `Fractional` types the following methods will be implemented: ~^, ~^=, 'normalizeU'.
* @param dim the dimension/size of the vector
* @param v the 1D array used to store vector elements
*/
class VectorN [T <% Ordered [T]: ClassTag: Numeric] (val dim: Int,
private var v: Array [T] = null)
extends Traversable [T] with PartiallyOrdered [VectorN [T]] with Error with Serializable
{
import Vectors._
{
if (v == null) {
v = new Array [T] (dim)
} else if (dim != v.length) {
flaw ("constructor", "dimension is wrong")
} // if
} // primary constructor
/** Range for the storage array
*/
private val range = 0 until dim
/** Create and import Numeric evidence
*/
private val nu = implicitly [Numeric [T]]
import nu._
/** Numeric zero (0)
*/
val _0 = nu.zero
/** Numeric one (1)
*/
val _1 = nu.one
/** Numeric minus one (-1)
*/
val _1n = -_1
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Construct a vector from an array of values.
* @param u the array of values
*/
def this (u: Array [T]) { this (u.length, u) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Construct a vector and assign values from vector 'u'.
* @param u the other vector
*/
def this (u: VectorN [T])
{
this (u.dim) // invoke primary constructor
for (i <- range) v(i) = u(i)
} // constructor
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Expand the size 'dim' of this vector by 'more' elements.
* @param factor the expansion factor
*/
def expand (more: Int = dim): VectorN [T] =
{
if (more < 1) this // no change
else new VectorN [T] (dim + more, Array.concat (v, new Array [T] (more)))
} // expand
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a vector of the form (0, ... 1, ... 0) where the 1 is at position 'j'.
* @param j the position to place the 1
* @param size the size of the vector (upper bound = size - 1)
*/
def oneAt (j: Int, size: Int = dim): VectorN [T] =
{
val c = new VectorN [T] (size)
for (i <- 0 until size) c.v(i) = if (i == j) _1 else _0
c
} // oneAt
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a vector of the form (0, ... -1, ... 0) where the -1 is at position 'j'.
* @param j the position to place the 1
* @param size the size of the vector (upper bound = size - 1)
*/
def _oneAt (j: Int, size: Int = dim): VectorN [T] =
{
val c = new VectorN [T] (size)
for (i <- 0 until size) c.v(i) = if (i == j) _1n else _0
c
} // _oneAt
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a ramp-up vector of increasing values: 0, 1, 2, ..., size - 1.
* @param size the size of the vector (upper bound = size - 1)
*/
// def ramp (size: Int = dim): VectorN [T] =
// {
// val c = new VectorN [T] (size)
// for (i <- 0 until size) c.v(i) = nu.fromInt (i)
// c
// } // ramp
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert a `VectorN [T]` into a `VectorN [Int]`, i.e., `VectorI`.
* @param u the vector to convert an integer vector
*/
def toInt: VectorI =
{
val c = new VectorI (dim)
for (i <- range) c.v(i) = nu.toInt (v(i))
c
} // toInt
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert a `VectorN [T]` into a `VectorN [Double]`, i.e., `VectorD`.
* @param u the vector to convert a double vector
*/
def toDouble: VectorD =
{
val c = new VectorD (dim)
for (i <- range) c(i) = nu.toDouble (v(i))
c
} // toDouble
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get this vector's element at the 'i'th index position.
* @param i the given index
*/
def apply (i: Int): T = v(i)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get this vector's elements within the given range (vector slicing).
* @param r the given range
*/
def apply (r: Range): VectorN [T] = slice (r.start, r.end)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get this vector's entire array.
*/
def apply (): Array [T] = v
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set this vector's element at the 'i'th index position.
* @param i the given index
* @param x the value to assign
*/
def update (i: Int, x: T) { v(i) = x }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set this vector's elements over the given range (vector slicing).
* @param r the given range
* @param x the value to assign
*/
def update (r: Range, x: T) { for (i <- r) v(i) = x }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set this vector's elements over the given range (vector slicing).
* @param r the given range
* @param u the vector to assign
*/
def update (r: Range, u: VectorN [T]) { for (i <- r) v(i) = u(i) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set each value in this vector to 'x'.
* @param x the value to be assigned
*/
def set (x: T) { for (i <- range) v(i) = x }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set the values in this vector to the values in array 'u'.
* @param u the array of values to be assigned
*/
def setAll (u: Array [T]) { for (i <- range) v(i) = u(i) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Iterate over the vector element by element.
* @param f the function to apply
*/
def foreach [U] (f: T => U)
{
var i = 0
while (i < dim) { f (v(i)); i += 1 }
} // foreach
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slice this vector 'from' to 'end'.
* @param from the start of the slice (included)
* @param till the end of the slice (excluded)
*/
override def slice (from: Int, till: Int): VectorN [T] = new VectorN [T] (till - from, v.slice (from, till))
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Select a subset of elements of this vector corresponding to a index/basis.
* @param index the set of index positions (e.g., 0, 2, 5)
*/
def select (index: Array [Int]): VectorN [T] =
{
val c = new VectorN [T] (index.length)
for (i <- c.range) c.v(i) = v(index(i))
c
} // select
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate this vector and scalar 'b'.
* @param b the vector to be concatenated
*/
def ++ (b: T): VectorN [T] =
{
val c = new VectorN [T] (dim + 1)
for (i <- c.range) c.v(i) = if (i < dim) v(i) else b
c
} // ++
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate this vector and vector 'b'.
* @param b the vector to be concatenated
*/
def ++ (b: VectorN [T]): VectorN [T] =
{
val c = new VectorN [T] (dim + b.dim)
for (i <- c.range) c.v(i) = if (i < dim) v(i) else b.v(i - dim)
c
} // ++
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add this vector and vector 'b'.
* @param b the vector to add
*/
def + (b: VectorN [T]): VectorN [T] =
{
val c = new VectorN [T] (dim)
for (i <- range) c.v(i) = v(i) + b.v(i)
c
} // +
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add this vector and scalar 's'.
* @param s the scalar to add
*/
def + (s: T): VectorN [T] =
{
val c = new VectorN [T] (dim)
for (i <- range) c.v(i) = v(i) + s
c
} // +
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add this vector and scalar 's._1' only at position 's._2'.
* @param s the (scalar, position) to add
*/
def + (s: Tuple2 [T, Int]): VectorN [T] =
{
val c = new VectorN [T] (dim)
for (i <- range) c.v(i) = if (i == s._2) v(i) + s._1 else v(i)
c
} // +
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add in-place this vector and vector 'b'.
* @param b the vector to add
*/
def += (b: VectorN [T]): VectorN [T] = { for (i <- range) v(i) += b.v(i); this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add in-place this vector and scalar 's'.
* @param s the scalar to add
*/
def += (s: T): VectorN [T] = { for (i <- range) v(i) += s; this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the negative of this vector (unary minus).
*/
def unary_-(): VectorN [T] =
{
val c = new VectorN [T] (dim)
for (i <- range) c.v(i) = -v(i)
c
} // unary_-
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From this vector subtract vector 'b'.
* @param b the vector to subtract
*/
def - (b: VectorN [T]): VectorN [T] =
{
val c = new VectorN [T] (dim)
for (i <- range) c.v(i) = v(i) - b.v(i)
c
} // -
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From this vector subtract scalar 's'.
* @param s the scalar to subtract
*/
def - (s: T): VectorN [T] =
{
val c = new VectorN [T] (dim)
for (i <- range) c.v(i) = v(i) - s
c
} // -
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From this vector subtract scalar 's._1' only at position 's._2'.
* @param s the (scalar, position) to subtract
*/
def - (s: Tuple2 [T, Int]): VectorN [T] =
{
val c = new VectorN [T] (dim)
for (i <- range) c.v(i) = if (i == s._2) v(i) - s._1 else v(i)
c
} // -
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From this vector subtract in-place vector 'b'.
* @param b the vector to add
*/
def -= (b: VectorN [T]): VectorN [T] = { for (i <- range) v(i) -= b.v(i); this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From this vector subtract in-place scalar 's'.
* @param s the scalar to add
*/
def -= (s: T): VectorN [T] = { for (i <- range) v(i) -= s; this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply this vector by scalar 's'.
* @param s the scalar to multiply by
*/
def * (s: T): VectorN [T] =
{
val c = new VectorN [T] (dim)
for (i <- range) c.v(i) = v(i) * s
c
} // *
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply this vector by vector 'b'.
* @param b the vector to multiply by
*/
def * (b: VectorN [T]): VectorN [T] =
{
val c = new VectorN [T] (dim)
for (i <- range) c.v(i) = v(i) * b.v(i)
c
} // *
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply this 'row' vector by matrix 'm'.
* @param m the matrix to multiply by
*/
def * (m: Matrix [T]): VectorN [T] = m.t * this
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply in-place this vector and vector 'b'.
* @param b the vector to add
*/
def *= (b: VectorN [T]): VectorN [T] = { for (i <- range) v(i) *= b.v(i); this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply in-place this vector and scalar 's'.
* @param s the scalar to add
*/
def *= (s: T): VectorN [T] = { for (i <- range) v(i) *= s; this }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Divide this vector by vector 'b' (element-by-element).
* @param b the vector to divide by
*/
def / (b: VectorN [T]) (implicit fr: Fractional [T]): VectorN [T] =
{
import fr._
val c = new VectorN [T] (dim)
for (i <- range) c.v(i) = v(i) / b.v(i)
c
} // /
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Divide this vector by scalar 's'.
* @param s the scalar to divide by
*/
def / (s: T) (implicit fr: Fractional [T]): VectorN [T] =
{
import fr._
val c = new VectorN [T] (dim)
for (i <- range) c.v(i) = v(i) / s
c
} // /
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Divide in-place this vector and vector 'b'.
* @param b the vector to add
*/
def /= (b: VectorN [T]) (implicit fr: Fractional [T]): VectorN [T] =
{
import fr._
for (i <- range) v(i) /= b.v(i)
this
} // /=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Divide in-place this vector and scalar 's'.
* @param s the scalar to add
*/
def /= (s: T) (implicit fr: Fractional [T]): VectorN [T] =
{
import fr._
for (i <- range) v(i) /= s
this
} // /=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the vector containing each element of this vector raised to the
* 's'th power.
* @param s the scalar exponent
*/
// def ~^ (s: T) (implicit fr: Fractional [T]): VectorN [T] =
// {
// import fr._
// val c = new VectorN [T] (dim)
// for (i <- range) c.v(i) = math.pow (v(i), s)
// c
// } // ~^
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Raise each element of this vector to the 's'th power.
* @param s the scalar exponent
*/
// def ~^= (s: T) (implicit fr: Fractional [T])
// {
// import fr._
// for (i <- range) v(i) = math.pow (v(i), s)
// } // ~^
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Square each element of this vector.
*/
def sq: VectorN [T] = this * this
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the vector that is the element-wise absolute value of this vector.
*/
def abs: VectorN [T] =
{
val c = new VectorN [T] (dim)
for (i <- range) c.v(i) = v(i).abs
c
} // abs
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Sum the elements of this vector.
*/
def sum: T = v.sum
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Sum the elements of this vector skipping the 'i'th element.
* @param i the index of the element to skip
*/
def sum_ne (i: Int): T = sum - v(i)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Sum the positive (> 0) elements of this vector.
*/
def sum_pos: T =
{
var sum = _0
for (i <- range if v(i) > _0) sum += v(i)
sum
} // sum_pos
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Cumulate the values of this vector from left to right (e.g., create a
* cdf from a pmf). Example: (4, 2, 3, 1) --> (4, 6, 9, 10)
*/
def cumulate: VectorN [T] =
{
var sum = _0
val c = new VectorN [T] (dim)
for (i <- range) { sum += v(i); c.v(i) = sum }
c
} // cumulate
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Normalize this vector so that it sums to one (like a probability vector).
*/
def normalize (implicit fr: Fractional [T]): VectorN [T] =
{
import fr._
this * (one / sum)
} // normalize
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Normalize this vector so its length is one (unit vector).
*/
// def normalizeU (implicit fr: Fractional [T]): VectorN [T] =
// {
// import fr._
// this * (one / norm)
// } // normalizeU
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Normalize this vector to have a maximum of one.
*/
def normalize1 (implicit fr: Fractional [T]): VectorN [T] =
{
import fr._
this * (one / this.max ())
} // normalize1
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the dot product (or inner product) of this vector with vector 'b'.
* @param b the other vector
*/
def dot (b: VectorN [T]): T =
{
var s = _0
for (i <- range) s += v(i) * b.v(i)
s
} // dot
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the Euclidean norm (2-norm) squared of this vector.
*/
def normSq: T = this dot this
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the Euclidean norm (2-norm) of this vector (requires `Fractional` type).
*/
def norm (implicit fr: Fractional [T]): Double = sqrt (normSq.toDouble)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the Manhattan norm (1-norm) of this vector.
*/
def norm1: T =
{
var sum = _0
for (i <- range) sum += nu.abs (v(i))
sum
} // norm1
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the maximum element in this vector.
* @param e the ending index (exclusive) for the search
*/
def max (e: Int = dim): T = v.slice (0, e).max
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Take the maximum of this vector with vector 'b' (element-by element).
* @param b the other vector
*/
def max (b: VectorN [T]): VectorN [T] =
{
val c = new VectorN [T] (dim)
for (i <- range) c.v(i) = if (b.v(i) > v(i)) b.v(i) else v(i)
c
} // max
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the minimum element in this vector.
* @param e the ending index (exclusive) for the search
*/
def min (e: Int = dim): T = v.slice (0, e).min
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Take the minimum of this vector with vector 'b' (element-by element).
* @param b the other vector
*/
def min (b: VectorN [T]): VectorN [T] =
{
val c = new VectorN [T] (dim)
for (i <- range) c.v(i) = if (b.v(i) < v(i)) b.v(i) else v(i)
c
} // min
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the element with the greatest magnitude in this vector.
*/
def mag: T = nu.abs (max ()) max nu.abs (min ())
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the argument maximum of this vector (index of maximum element).
* @param e the ending index (exclusive) for the search
*/
def argmax (e: Int = dim): Int =
{
var j = 0
for (i <- 1 until e if v(i) > v(j)) j = i
j
} // argmax
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the argument minimum of this vector (index of minimum element).
* @param e the ending index (exclusive) for the search
*/
def argmin (e: Int = dim): Int =
{
var j = 0
for (i <- 1 until e if v(i) < v(j)) j = i
j
} // argmin
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the argument minimum of this vector (-1 if its not negative).
* @param e the ending index (exclusive) for the search
*/
def argminNeg (e: Int = dim): Int =
{
val j = argmin (e); if (v(j) < _0) j else -1
} // argmaxNeg
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the argument maximum of this vector (-1 if its not positive).
* @param e the ending index (exclusive) for the search
*/
def argmaxPos (e: Int = dim): Int =
{
val j = argmax (e); if (v(j) > _0) j else -1
} // argmaxPos
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the index of the first negative element in this vector (-1 otherwise).
* @param e the ending index (exclusive) for the search
*/
def firstNeg (e: Int = dim): Int =
{
for (i <- 0 until e if v(i) < _0) return i; -1
} // firstNeg
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the index of the first positive element in this vector (-1 otherwise).
* @param e the ending index (exclusive) for the search
*/
def firstPos (e: Int = dim): Int =
{
for (i <- 0 until e if v(i) > _0) return i; -1
} // firstPos
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Count the number of strictly negative entries in this vector.
*/
def countNeg: Int =
{
var count = 0
for (i <- 0 until dim if v(i) < _0) count += 1
count
} // countNeg
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Count the number of strictly positive entries in this vector.
*/
def countPos: Int =
{
var count = 0
for (i <- 0 until dim if v(i) > _0) count += 1
count
} // countPos
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Determine whether the predicate 'pred' holds for some element in this vector.
* @param pred the predicate to test (e.g., "_ == 5.")
*/
// def exists (pred: (T) => Boolean): Boolean = v.exists (pred)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Determine whether x is contained in this vector.
* @param x the element to be checked
*/
def contains (x: T): Boolean = v.contains (x)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Sort this vector in-place in non-decreasing order.
*/
def sort () { quickSort (v) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether the other vector is at least as long as this vector.
* @param b the other vector
*/
def sameDimensions (b: VectorN [T]): Boolean = dim <= b.dim
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether this vector is nonnegative (has no negative elements).
*/
def isNonnegative: Boolean =
{
for (i <- range if v(i) < _0) return false
true
} // isNonnegative
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compare this vector with vector 'b'.
* @param b the other vector
*/
def tryCompareTo [B >: VectorN [T]] (b: B)
(implicit view$1: (B) => PartiallyOrdered [B]): Option [Int] =
{
var le = true
var ge = true
for (i <- range) {
val b_i = b.asInstanceOf [VectorN [T]] (i)
if (ge && (v(i) compare b_i) < 0) ge = false
else if (le && (v(i) compare b_i) > 0) le = false
} // for
if (ge && le) Some (0) else if (le) Some (-1) else if (ge) Some (1) else None
} // tryCompareTo
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Override equals to determine whether vector this equals vector 'b'.
* @param b the vector to compare with this
*/
override def equals (b: Any): Boolean =
{
b match {
case VectorN => (v.deep equals b.asInstanceOf [VectorN [T]].v.deep)
case _ => false
} // match
} // equals
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Must also override hashCode to be be compatible with equals.
*/
override def hashCode: Int = v.deep.hashCode
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Hash a vector into an integer. Serves as the default hash function for
* vectors. Warning, collisions may be unavoidable.
* @param x the vector of type T to hash
*/
// override def hashCode (): Int =
// {
// if (dim > prime.length) flaw ("hash", "not enough primes for computing hash function")
// var accum = 0
// for (i <- range) accum ^= (ceil (v(i).toDouble * prime(i))).toInt
// accum
// } // hashCode
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert this vector to a string.
*/
override def toString: String =
{
val sb = new StringBuilder ("VectorN(")
for (i <- range) { sb.append (v(i)); sb.append(",\\t") }
sb.replace (sb.length-2, sb.length, ")").mkString
} // toString
} // VectorN class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `VectorN` object is the companion object for `VectorN` class.
*/
object VectorN extends Error
{
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a `VectorN [T]` from one or more values (repeated values T*).
* @param u0 the first value
* @param u the rest of the values (zero or more additional values)
*/
def apply [T <% Ordered [T]: ClassTag: Numeric] (x: T, xs: T*): VectorN [T] =
{
val c = new VectorN [T] (1 + xs.length)
c(0) = x
for (i <- 1 until c.dim) c.v(i) = xs(i-1)
c
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a 'VectorN [T]' from one or more values (repeated values String*).
* @param skip dummy value to set data type
* @param x the first String
* @param xs the rest of the Strings
*/
def apply [T <% Ordered [T]: ClassTag: Numeric] (skip: T, x: String, xs: String*): VectorN [T] =
{
val c = new VectorN [T] (1 + xs.length)
for (i <- c.range) {
val xx = if (i == 0) x else xs(i-1)
c.v(i) = skip match {
case skip: Int => xx.toInt.asInstanceOf [T]
case skip: Long => xx.toLong.asInstanceOf [T]
case skip: Float => xx.toFloat.asInstanceOf [T]
case skip: BigDecimal => BigDecimal (xx).asInstanceOf [T]
case _ => { flaw ("apply", "type " + skip.getClass + " not supported"); skip }
} // match
} // for
c
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a 'VectorN [T]' from an array of strings.
* @param skip dummy value to set data type
* @param xa the array of the Strings
*/
def apply [T <% Ordered [T]: ClassTag: Numeric] (skip: T, xa: Array [String]): VectorN [T] =
{
val c = new VectorN [T] (xa.length)
for (i <- c.range) {
val xx = xa(i)
c.v(i) = skip match {
case skip: Int => xx.toInt.asInstanceOf [T]
case skip: Long => xx.toLong.asInstanceOf [T]
case skip: Float => xx.toFloat.asInstanceOf [T]
case skip: BigDecimal => BigDecimal (xx).asInstanceOf [T]
case _ => { flaw ("apply", "type " + skip.getClass + " not supported"); skip }
} // match
} // for
c
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return a `VectorN` containing a sequence of increasing integers in a range.
* @param skip dummy value to set data type
* @param start the start value of the vector, inclusive
* @param end the end value of the vector, exclusive (i.e., the first value not returned)
*/
def range [T <% Ordered [T]: ClassTag: Numeric] (skip: T, start: Int, end: Int): VectorN [T] =
{
val c = new VectorN [T] (end - start)
for (i <- c.range) {
val xx = start + i
c.v(i) = skip match {
case skip: Int => xx.toInt.asInstanceOf [T]
case skip: Long => xx.toLong.asInstanceOf [T]
case skip: Float => xx.toFloat.asInstanceOf [T]
case skip: BigDecimal => BigDecimal (xx).asInstanceOf [T]
case _ => { flaw ("apply", "type " + skip.getClass + " not supported"); skip }
} // match
} // for
c
} // range
} // VectorN object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `VectorNTest` object tests the operations provided by `VectorN` class.
*/
object VectorNTest extends App
{
import Vectors._
var a: VectorI = null
var b: VectorI = null
var c: VectorI = null
var x: VectorF = null
var y: VectorF = null
for (l <- 1 to 4) {
println ("\\n\\tTest VectorN on integer vectors of dim " + l)
a = new VectorI (l)
b = new VectorI (l)
a.set (2)
b.set (3)
println ("a + b = " + (a + b))
println ("a - b = " + (a - b))
println ("a * b = " + (a * b))
println ("a * 4 = " + (a * 4))
println ("a.max = " + a.max ())
println ("a.min = " + a.min ())
println ("a.sum = " + a.sum)
println ("a.sum_ne = " + a.sum_ne (0))
println ("a dot b = " + (a dot b))
println ("a.normSq = " + a.normSq)
println ("a < b = " + (a < b))
for (x <- a) print (" " + x)
println
println ("\\n\\tTest VectorN on real vectors of dim " + l)
x = new VectorF (l)
y = new VectorF (l)
x.set (2)
y.set (3)
println ("x + y = " + (x + y))
println ("x - y = " + (x - y))
println ("x * y = " + (x * y))
println ("x * 4.0 = " + (x * 4.0f))
println ("x.min = " + x.min ())
println ("x.max = " + x.max ())
println ("x.sum = " + x.sum)
println ("x.sum_ne = " + x.sum_ne (0))
println ("x dot y = " + (x dot y))
println ("x.normSq = " + x.normSq)
println ("x.norm = " + x.norm)
println ("x < y = " + (x < y))
} // for
c = VectorN (4, 2, 3, 1)
println ("c = " + c)
println ("c.cumulate = " + c.cumulate)
println ("range (1, 4) = " + VectorN.range (0, 1, 4))
println ("hashCode (" + a + ") = " + a.hashCode ())
println ("hashCode (" + b + ") = " + b.hashCode ())
println ("hashCode (" + c + ") = " + c.hashCode ())
println ("hashCode (" + x + ") = " + x.hashCode ())
println ("hashCode (" + y + ") = " + y.hashCode ())
val z = VectorN (0, "1", "2", "3", "4")
println ("z = " + z)
} // VectorNTest object
| NBKlepp/fda | scalation_1.3/scalation_mathstat/src/main/scala/scalation/linalgebra/gen/VectorN.scala | Scala | mit | 33,968 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.ml.params
import ai.h2o.sparkling.ml.models.H2OFeatureEstimatorBase
import org.apache.spark.ml.param.{BooleanParam, Param}
import org.apache.spark.sql.types.{DoubleType, StructField, StructType}
trait H2OAutoEncoderExtraParams extends H2OFeatureEstimatorBase with HasOutputCol with HasInputColsOnMOJO {
private val originalCol: Param[String] = new Param[String](
parent = this,
name = "originalCol",
doc = "Original column name. This column contains input values to the neural network of auto encoder.")
private val withOriginalCol: Param[Boolean] = new BooleanParam(
parent = this,
name = "withOriginalCol",
doc = "A flag identifying whether a column with input values to the neural network will be produced or not.")
private val mseCol: Param[String] = new Param[String](
parent = this,
name = "mseCol",
doc = "MSE column name. This column contains mean square error calculated from original and output values.")
private val withMSECol: Param[Boolean] = new BooleanParam(
parent = this,
name = "withMSECol",
doc = "A flag identifying whether a column with mean square error will be produced or not.")
setDefault(
originalCol -> (uid + "__original"),
withOriginalCol -> false,
mseCol -> (uid + "__mse"),
withMSECol -> false)
//
// Getters
//
def getOriginalCol(): String = $(originalCol)
def getWithOriginalCol(): Boolean = $(withOriginalCol)
def getMSECol(): String = $(mseCol)
def getWithMSECol(): Boolean = $(withMSECol)
//
// Setters
//
def setOriginalCol(name: String): this.type = set(originalCol -> name)
def setWithOriginalCol(flag: Boolean): this.type = set(withOriginalCol -> flag)
def setMSECol(name: String): this.type = set(mseCol -> name)
def setWithMSECol(flag: Boolean): this.type = set(withMSECol -> flag)
protected override def outputSchema: Seq[StructField] = {
val outputType = org.apache.spark.ml.linalg.SQLDataTypes.VectorType
val nil = Nil
val withReconstructionErrorField = if (getWithMSECol()) {
val reconstructionErrorField = StructField(getMSECol(), DoubleType, nullable = false)
reconstructionErrorField :: nil
} else {
nil
}
val withOriginalField = if (getWithOriginalCol()) {
val originalField = StructField(getOriginalCol(), outputType, nullable = false)
originalField :: withReconstructionErrorField
} else {
withReconstructionErrorField
}
val outputField = StructField(getOutputCol(), outputType, nullable = false)
outputField :: withOriginalField
}
protected override def validate(schema: StructType): Unit = {
require(getInputCols() != null && getInputCols().nonEmpty, "The list of input columns can't be null or empty!")
require(getOutputCol() != null, "The output column can't be null!")
require(getOriginalCol() != null || !getWithOriginalCol(), "The original column can't be null!")
require(getMSECol() != null || !getWithMSECol(), "The original column can't be null!")
val fieldNames = schema.fieldNames
getInputCols().foreach { inputCol =>
require(
fieldNames.contains(inputCol),
s"The specified input column '$inputCol' was not found in the input dataset!")
}
require(
!fieldNames.contains(getOutputCol()),
s"The output column '${getOutputCol()}' is already present in the dataset!")
require(
!fieldNames.contains(getOriginalCol()) || !getWithOriginalCol(),
s"The original column '${getOriginalCol()}' is already present in the dataset!")
require(
!fieldNames.contains(getMSECol()) || !getWithMSECol(),
s"The mean square error column '${getMSECol()}' is already present in the dataset!")
}
protected def copyExtraParams(to: H2OAutoEncoderExtraParams): Unit = {
to.set(to.inputCols -> getInputCols())
to.setOutputCol(getOutputCol())
to.setOriginalCol(getOriginalCol())
to.setWithOriginalCol(getWithOriginalCol())
to.setMSECol(getMSECol())
to.setWithMSECol(getWithMSECol())
}
}
| h2oai/sparkling-water | scoring/src/main/scala/ai/h2o/sparkling/ml/params/H2OAutoEncoderExtraParams.scala | Scala | apache-2.0 | 4,879 |
/*
* Copyright (c) 2013-14 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
import scala.language.existentials
import scala.language.experimental.macros
import scala.reflect.macros.whitebox
import tag.@@
trait Witness {
type T
val value: T {}
}
object Witness {
type Aux[T0] = Witness { type T = T0 }
type Lt[Lub] = Witness { type T <: Lub }
implicit def apply[T]: Witness.Aux[T] = macro SingletonTypeMacros.materializeImpl[T]
implicit def apply[T](t: T): Witness.Lt[T] = macro SingletonTypeMacros.convertImpl[T]
implicit val witness0: Witness.Aux[_0] =
new Witness {
type T = _0
val value = Nat._0
}
implicit def witnessN[P <: Nat]: Witness.Aux[Succ[P]] =
new Witness {
type T = Succ[P]
val value = new Succ[P]()
}
}
trait WitnessWith[TC[_]] extends Witness {
val instance: TC[T]
}
trait LowPriorityWitnessWith {
implicit def apply2[H, TC2[_ <: H, _], S <: H, T](t: T): WitnessWith.Lt[({ type λ[X] = TC2[S, X] })#λ, T] =
macro SingletonTypeMacros.convertInstanceImpl2[H, TC2, S, T]
}
object WitnessWith extends LowPriorityWitnessWith {
type Aux[TC[_], T0] = WitnessWith[TC] { type T = T0 }
type Lt[TC[_], Lub] = WitnessWith[TC] { type T <: Lub }
implicit def apply1[TC[_], T](t: T): WitnessWith.Lt[TC, T] = macro SingletonTypeMacros.convertInstanceImpl1[TC, T]
}
class SingletonTypeMacros(val c: whitebox.Context) {
import syntax.SingletonOps
type SingletonOpsLt[Lub] = SingletonOps { type T <: Lub }
import c.universe._
import internal._
import decorators._
val SymTpe = typeOf[scala.Symbol]
def mkWitness(sTpe: Type, s: Tree): Tree = {
val name = TypeName(c.freshName())
q"""
{
final class $name extends Witness {
type T = $sTpe
val value: $sTpe = $s
}
new $name
}
"""
}
def mkWitnessWith(parent: Type, sTpe: Type, s: Tree, i: Tree): Tree = {
val name = TypeName(c.freshName())
val iTpe = i.tpe.finalResultType
q"""
{
final class $name extends $parent {
val instance: $iTpe = $i
type T = $sTpe
val value: $sTpe = $s
}
new $name
}
"""
}
def mkOps(sTpe: Type, w: Tree): Tree = {
val name = TypeName(c.freshName())
q"""
{
final class $name extends _root_.shapeless.syntax.SingletonOps {
type T = $sTpe
val witness = $w
}
new $name
}
"""
}
object LiteralSymbol {
def unapply(t: Tree): Option[Constant] = t match {
case q""" scala.Symbol.apply(${Literal(c: Constant)}) """ => Some(c)
case _ => None
}
}
object SingletonSymbolType {
val atatTpe = typeOf[@@[_,_]].typeConstructor
val TaggedSym = typeOf[tag.Tagged[_]].typeConstructor.typeSymbol
def apply(c: Constant): Type = appliedType(atatTpe, List(SymTpe, constantType(c)))
def unapply(t: Type): Option[Constant] =
t match {
case RefinedType(List(SymTpe, TypeRef(_, TaggedSym, List(ConstantType(c)))), _) => Some(c)
case _ => None
}
}
def mkSingletonSymbol(c: Constant): Tree = {
val sTpe = SingletonSymbolType(c)
q"""_root_.scala.Symbol($c).asInstanceOf[$sTpe]"""
}
def materializeImpl[T: WeakTypeTag]: Tree = {
val tpe = weakTypeOf[T].dealias
val value =
tpe match {
case ConstantType(c: Constant) => Literal(c)
case SingleType(p, v) if !v.isParameter => q"""$v.asInstanceOf[$tpe]"""
case SingletonSymbolType(c) => mkSingletonSymbol(c)
case _ =>
c.abort(c.enclosingPosition, s"Type argument $tpe is not a singleton type")
}
mkWitness(tpe, value)
}
def extractResult[T](t: Expr[T])(mkResult: (Type, Tree) => Tree): Tree =
(t.actualType, t.tree) match {
case (tpe @ ConstantType(c: Constant), _) =>
mkResult(tpe, Literal(c))
case (tpe @ SingleType(p, v), tree) if !v.isParameter =>
mkResult(tpe, tree)
case (SymTpe, LiteralSymbol(c)) =>
mkResult(SingletonSymbolType(c), mkSingletonSymbol(c))
case _ =>
c.abort(c.enclosingPosition, s"Expression ${t.tree} does not evaluate to a constant or a stable value")
}
def convertImpl[T](t: Expr[T]): Tree = extractResult(t)(mkWitness)
def convertInstanceImpl1[TC[_], T](t: Expr[T])
(implicit tcTag: WeakTypeTag[TC[_]]): Tree =
extractResult(t) { (sTpe, value) =>
val tc = tcTag.tpe.typeConstructor
val wwTC = typeOf[WitnessWith[Nothing]].typeConstructor
val parent = appliedType(wwTC, List(tc))
val tci = appliedType(tc, List(sTpe))
val i = c.inferImplicitValue(tci, silent = false)
mkWitnessWith(parent, sTpe, value, i)
}
def convertInstanceImpl2[H, TC2[_ <: H, _], S <: H, T](t: Expr[T])
(implicit tc2Tag: WeakTypeTag[TC2[_, _]], sTag: WeakTypeTag[S]): Tree =
extractResult(t) { (sTpe, value) =>
val tc2 = tc2Tag.tpe.typeConstructor
val s = sTag.tpe
val parent = weakTypeOf[WitnessWith[({ type λ[X] = TC2[S, X] })#λ]].map {
case TypeRef(prefix, sym, args) if sym.isFreeType =>
typeRef(NoPrefix, tc2.typeSymbol, args)
case tpe => tpe
}
val tci = appliedType(tc2, List(s, sTpe))
val i = c.inferImplicitValue(tci, silent = false)
mkWitnessWith(parent, sTpe, value, i)
}
def mkSingletonOps(t: Expr[Any]): Tree =
extractResult(t) { (tpe, tree) => mkOps(tpe, mkWitness(tpe, tree)) }
def narrowSymbol[S <: String : WeakTypeTag](t: Expr[scala.Symbol]): Tree = {
(weakTypeOf[S], t.tree) match {
case (ConstantType(Constant(s1)), LiteralSymbol(Constant(s2))) if s1 == s2 =>
mkSingletonSymbol(Constant(s1))
case _ =>
c.abort(c.enclosingPosition, s"Expression ${t.tree} is not an appropriate Symbol literal")
}
}
}
| mandubian/shapeless | core/src/main/scala/shapeless/singletons.scala | Scala | apache-2.0 | 6,439 |
// Copyright 2016 zakski.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.szadowsz.maeve.core.browser
import java.util
import org.openqa.selenium.chrome.ChromeOptions
import org.openqa.selenium.remote.DesiredCapabilities
import org.openqa.selenium.{Capabilities, Platform, Proxy}
import collection.JavaConverters._
object MaeveConf {
val PROXY = "proxy"
val JAVASCRIPT = "enableJS"
val INSECURE_SSL = "useInsecureSSL"
val ENABLE_REDIRECT = "enableRedirect"
val ENABLE_CSS = "enableCSS"
val ENABLE_APPLET = "enableApplet"
val ENABLE_POPUP_BLOCK = "enablePopupBlock"
val ENABLE_GEO_TRACK = "enableGeoTrack"
val ENABLE_DONT_TRACK = "enableDoNotTrack"
val ENABLE_NATIVE_ACTIVEX = "enableActiveXNative"
val THROW_FAIL_STATUS = "throwOnFailStatCode"
val THROW_SCRIPT_ERROR = "throwOnScriptError"
val PRINT_FAIL_STATUS = "printOnFailStatCode"
val CHROME_DONT_TRACK = "enable_do_not_track"
val CHROME_PREFS = "prefs"
val SKIP_TEST_PROXY = "skipProxyTest"
}
/**
* Created on 13/05/2016.
*/
case class MaeveConf(protected val caps: Map[String, AnyRef] = Map()) extends Capabilities {
override def is(capabilityName: String): Boolean = {
val cap = caps.get(capabilityName)
cap.isDefined && cap.contains(true: java.lang.Boolean)
}
override def getCapability(capabilityName: String): AnyRef = caps.get(capabilityName).orNull
override def isJavascriptEnabled: Boolean = caps.getOrElse(MaeveConf.JAVASCRIPT, true).asInstanceOf[Boolean]
def shouldSkipProxyTest: java.lang.Boolean = caps.getOrElse(MaeveConf.SKIP_TEST_PROXY, false).asInstanceOf[Boolean]
def isUseInsecureSSL: java.lang.Boolean = caps.getOrElse(MaeveConf.INSECURE_SSL, false).asInstanceOf[Boolean]
def isRedirectEnabled: java.lang.Boolean = caps.getOrElse(MaeveConf.ENABLE_REDIRECT, true).asInstanceOf[Boolean]
def isCssEnabled: java.lang.Boolean = caps.getOrElse(MaeveConf.ENABLE_CSS, true).asInstanceOf[Boolean]
def isAppletEnabled: java.lang.Boolean = caps.getOrElse(MaeveConf.ENABLE_APPLET, false).asInstanceOf[Boolean]
def isPopupBlockerEnabled: java.lang.Boolean = caps.getOrElse(MaeveConf.ENABLE_POPUP_BLOCK, true).asInstanceOf[Boolean]
def isGeolocationEnabled: java.lang.Boolean = caps.getOrElse(MaeveConf.ENABLE_GEO_TRACK, false).asInstanceOf[Boolean]
def isDoNotTrackEnabled: java.lang.Boolean = caps.getOrElse(MaeveConf.ENABLE_DONT_TRACK, true).asInstanceOf[Boolean]
def isThrowExceptionOnFailingStatusCode: java.lang.Boolean = caps.getOrElse(MaeveConf.THROW_FAIL_STATUS, false).asInstanceOf[Boolean]
def isPrintContentOnFailingStatusCode: java.lang.Boolean = caps.getOrElse(MaeveConf.PRINT_FAIL_STATUS, true).asInstanceOf[Boolean]
def isThrowExceptionOnScriptError: java.lang.Boolean = caps.getOrElse(MaeveConf.THROW_SCRIPT_ERROR, false).asInstanceOf[Boolean]
def isActiveXNative: java.lang.Boolean = caps.getOrElse(MaeveConf.ENABLE_NATIVE_ACTIVEX, false).asInstanceOf[Boolean]
override def getVersion: String = "38"
override def getPlatform: Platform = Platform.getCurrent
override def getBrowserName: String = "firefox"
def getProxy: Proxy = caps.getOrElse(MaeveConf.PROXY, new Proxy()).asInstanceOf[Proxy]
def setNoProxy(): MaeveConf = {
copy(caps = caps + (MaeveConf.PROXY -> new Proxy()))
}
def setHTTPProxy(host: String, port: Int, noProxyHosts: List[String]): MaeveConf = {
val proxy = new Proxy()
proxy.setHttpProxy(host + ":" + port)
copy(caps = caps + (MaeveConf.PROXY -> proxy))
}
def setCapability(key : String, value : AnyRef) = copy(caps = caps + (key -> value))
def setJavaScriptEnabled(enableJS: java.lang.Boolean): MaeveConf = copy(caps = caps + (MaeveConf.JAVASCRIPT -> enableJS))
def setUseInsecureSSL(useInsecureSSL: java.lang.Boolean): MaeveConf = copy(caps + (MaeveConf.INSECURE_SSL -> useInsecureSSL))
def setRedirectEnabled(enableRedirect: java.lang.Boolean): MaeveConf = copy(caps + (MaeveConf.ENABLE_REDIRECT -> enableRedirect))
def setSkipProxyTestEnabled(enable: java.lang.Boolean): MaeveConf = copy(caps + (MaeveConf.SKIP_TEST_PROXY -> enable))
def setCssEnabled(enableCSS: java.lang.Boolean): MaeveConf = copy(caps + (MaeveConf.ENABLE_CSS -> enableCSS))
def setAppletEnabled(enableApplet: java.lang.Boolean): MaeveConf = copy(caps + (MaeveConf.ENABLE_APPLET -> enableApplet))
def setPopupBlockerEnabled(enablePopupBlock: java.lang.Boolean): MaeveConf = copy(caps + (MaeveConf.ENABLE_POPUP_BLOCK -> enablePopupBlock))
def setGeolocationEnabled(enableGeoTrack: java.lang.Boolean): MaeveConf = copy(caps + (MaeveConf.ENABLE_GEO_TRACK -> enableGeoTrack))
def setDoNotTrackEnabled(enableDoNotTrack: java.lang.Boolean): MaeveConf = copy(caps + (MaeveConf.ENABLE_DONT_TRACK -> enableDoNotTrack))
def setThrowExceptionOnFailingStatusCode(throwOnFailStatCode: java.lang.Boolean): MaeveConf = copy(caps + (MaeveConf.THROW_FAIL_STATUS -> throwOnFailStatCode))
def setPrintContentOnFailingStatusCode(printOnFailStatCode: java.lang.Boolean): MaeveConf = copy(caps + (MaeveConf.PRINT_FAIL_STATUS -> printOnFailStatCode))
def setThrowExceptionOnScriptError(throwOnScriptError: java.lang.Boolean): MaeveConf = copy(caps + (MaeveConf.THROW_SCRIPT_ERROR -> throwOnScriptError))
def setActiveXNative(enableActiveXNative: java.lang.Boolean): MaeveConf = copy(caps + (MaeveConf.ENABLE_NATIVE_ACTIVEX -> enableActiveXNative))
override def asMap(): util.Map[String, _] = caps.asJava
def overrideConf(defaultConf : MaeveConf): MaeveConf = {
caps.foldLeft(defaultConf){case (conf,(key,value)) => conf.setCapability(key,value)}
}
def buildChromeProfile: Capabilities = {
val profile = DesiredCapabilities.chrome()
val opts = new ChromeOptions()
if (getProxy.getHttpProxy != null) {
profile.setCapability(MaeveConf.PROXY, getProxy)
opts.addArguments("--proxy-server=" + getProxy.getHttpProxy)
}
val preferences = new util.HashMap[String, Object]()
// TODO translate more preferences
preferences.put(MaeveConf.CHROME_DONT_TRACK, isDoNotTrackEnabled)
opts.setExperimentalOption(MaeveConf.CHROME_PREFS, preferences)
profile.setCapability(ChromeOptions.CAPABILITY, opts)
profile
}
// def buildFirefoxProfile() ={
// val profile = new FirefoxProfile()
//
// val proxy = getProxyConfig
// if (proxy.getProxyHost != null) {
// profile.setPreference("network.proxy.type", 1)
// profile.setPreference("network.proxy.http", proxy.getProxyHost)
// profile.setPreference("network.proxy.http_port", proxy.getProxyPort)
// } else if (proxy.getProxyAutoConfigUrl != null){
// profile.setPreference("network.proxy.type", 2)
// profile.setPreference("network.proxy.autoconfig_url",proxy.getProxyAutoConfigUrl)
// }
//
// profile.setPreference("privacy.donottrackheader.enabled", true)
// if(isDoNotTrackEnabled) {
// profile.setPreference("privacy.donottrackheader.value", 1)
// } else {
// profile.setPreference("privacy.donottrackheader.value", 0)
// }
//
// profile
// }
}
| zakski/project-maeve | src/main/scala/com/szadowsz/maeve/core/browser/MaeveConf.scala | Scala | apache-2.0 | 7,721 |
package org.jetbrains.plugins.dotty.lang.psi.types
import com.intellij.psi._
import org.jetbrains.plugins.scala.extensions.PsiClassExt
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScTypeAliasDefinition
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.types.api.designator.{ScDesignatorType, ScProjectionType}
import org.jetbrains.plugins.scala.lang.psi.types.api.{Any, StdType, arrayType}
import scala.collection.JavaConverters._
/**
* @author adkozlov
*/
trait DottyPsiTypeBridge extends api.PsiTypeBridge {
typeSystem: api.TypeSystem =>
override def toScType(`type`: PsiType,
treatJavaObjectAsAny: Boolean)
(implicit visitedRawTypes: Set[PsiClass],
paramTopLevel: Boolean): ScType = `type` match {
case _: PsiClassType => Any
case _: PsiWildcardType => Any
case disjunctionType: PsiDisjunctionType =>
DottyOrType(disjunctionType.getDisjunctions.asScala.map {
toScType(_, treatJavaObjectAsAny)
})
case _ => super.toScType(`type`, treatJavaObjectAsAny)
}
override def toPsiType(`type`: ScType, noPrimitives: Boolean): PsiType = {
def createComponent: ScType => PsiType =
toPsiType(_, noPrimitives)
`type` match {
case ScDesignatorType(clazz: PsiClass) => createType(clazz)
case projectionType: ScProjectionType =>
projectionType.actualElement match {
case clazz: PsiClass => createType(clazz, raw = true)
case definition: ScTypeAliasDefinition => definition.aliasedType match {
case Right(result) => createComponent(result)
case _ => createJavaObject
}
case _ => createJavaObject
}
case refinedType@DottyRefinedType(ScDesignatorType(clazz: PsiClass), _, _) if clazz.qualifiedName == "scala.Array" =>
refinedType.typeArguments match {
case Seq(designator) => new PsiArrayType(createComponent(designator))
case seq => createType(clazz,
seq.zip(clazz.getTypeParameters)
.foldLeft(PsiSubstitutor.EMPTY) {
case (substitutor, (scType, typeParameter)) => substitutor.put(typeParameter,
toPsiType(scType, noPrimitives = true))
})
}
case arrayType(arg) => new PsiArrayType(toPsiType(arg))
case std: StdType => stdToPsiType(std, noPrimitives)
case _ => createJavaObject
}
}
}
| gtache/intellij-lsp | intellij-lsp-dotty/src/org/jetbrains/plugins/dotty/lang/psi/types/DottyPsiTypeBridge.scala | Scala | apache-2.0 | 2,499 |
package org.phenoscape.scowl.ofn
import org.semanticweb.owlapi.apibinding.OWLManager
import org.semanticweb.owlapi.model._
import scala.jdk.CollectionConverters._
trait ClassAxioms {
private val factory = OWLManager.getOWLDataFactory
object SubClassOf {
def apply(annotations: Set[OWLAnnotation], subClass: OWLClassExpression, superClass: OWLClassExpression): OWLSubClassOfAxiom =
factory.getOWLSubClassOfAxiom(subClass, superClass, annotations.asJava)
def apply(annotations: OWLAnnotation*)(subClass: OWLClassExpression, superClass: OWLClassExpression): OWLSubClassOfAxiom =
SubClassOf(annotations.toSet, subClass, superClass)
def apply(subClass: OWLClassExpression, superClass: OWLClassExpression): OWLSubClassOfAxiom =
SubClassOf(Set.empty, subClass, superClass)
def unapply(axiom: OWLSubClassOfAxiom): Option[(Set[OWLAnnotation], OWLClassExpression, OWLClassExpression)] =
Option((axiom.getAnnotations.asScala.toSet, axiom.getSubClass, axiom.getSuperClass))
}
object EquivalentClasses extends NaryClassAxiom[OWLEquivalentClassesAxiom] {
def apply(annotations: Set[OWLAnnotation], classExpressions: Set[_ <: OWLClassExpression]): OWLEquivalentClassesAxiom =
factory.getOWLEquivalentClassesAxiom(classExpressions.asJava, annotations.asJava)
}
object DisjointClasses extends NaryClassAxiom[OWLDisjointClassesAxiom] {
def apply(annotations: Set[OWLAnnotation], classExpressions: Set[_ <: OWLClassExpression]): OWLDisjointClassesAxiom =
factory.getOWLDisjointClassesAxiom(classExpressions.asJava, annotations.asJava)
}
object DisjointUnion {
def apply(annotations: Set[OWLAnnotation], aClass: OWLClass, classExpressions: Set[_ <: OWLClassExpression]): OWLDisjointUnionAxiom =
factory.getOWLDisjointUnionAxiom(aClass, classExpressions.asJava, annotations.asJava)
def apply(aClass: OWLClass, classExpressions: Set[_ <: OWLClassExpression]): OWLDisjointUnionAxiom =
DisjointUnion(Set.empty[OWLAnnotation], aClass, classExpressions)
def apply(annotations: OWLAnnotation*)(aClass: OWLClass, classExpressions: OWLClassExpression*): OWLDisjointUnionAxiom =
DisjointUnion(annotations.toSet, aClass, classExpressions.toSet)
def apply(aClass: OWLClass, classExpressions: OWLClassExpression*): OWLDisjointUnionAxiom =
DisjointUnion(Set.empty[OWLAnnotation], aClass, classExpressions.toSet)
def unapply(axiom: OWLDisjointUnionAxiom): Option[(Set[OWLAnnotation], OWLClass, Set[_ <: OWLClassExpression])] =
Option((axiom.getAnnotations.asScala.toSet, axiom.getOWLClass, axiom.getClassExpressions.asScala.toSet))
}
object HasKey {
def apply(annotations: Set[OWLAnnotation], classExpression: OWLClassExpression, objectProperties: Set[OWLObjectPropertyExpression], dataProperties: Set[OWLDataPropertyExpression]): OWLHasKeyAxiom =
factory.getOWLHasKeyAxiom(classExpression, (objectProperties ++ dataProperties).asJava, annotations.asJava)
def apply(annotations: OWLAnnotation*)(classExpression: OWLClassExpression, properties: OWLPropertyExpression*): OWLHasKeyAxiom =
factory.getOWLHasKeyAxiom(classExpression, properties.toSet.asJava, annotations.toSet.asJava)
def apply(classExpression: OWLClassExpression, properties: OWLPropertyExpression*): OWLHasKeyAxiom =
factory.getOWLHasKeyAxiom(classExpression, properties.toSet.asJava)
def unapply(axiom: OWLHasKeyAxiom): Option[(Set[OWLAnnotation], OWLClassExpression, Set[OWLObjectPropertyExpression], Set[OWLDataPropertyExpression])] =
Option(axiom.getAnnotations.asScala.toSet, axiom.getClassExpression, axiom.getObjectPropertyExpressions.asScala.toSet, axiom.getDataPropertyExpressions.asScala.toSet)
}
}
trait NaryClassAxiom[T <: OWLNaryClassAxiom] {
def apply(annotations: Set[OWLAnnotation], classExpressions: Set[_ <: OWLClassExpression]): T
def apply(classExpressions: Set[_ <: OWLClassExpression]): T =
apply(Set.empty[OWLAnnotation], classExpressions)
def apply(annotations: OWLAnnotation*)(classExpressions: OWLClassExpression*): T =
apply(annotations.toSet, classExpressions.toSet)
def apply(classExpressions: OWLClassExpression*): T =
apply(Set.empty[OWLAnnotation], classExpressions.toSet)
def unapply(axiom: T): Option[(Set[OWLAnnotation], Set[_ <: OWLClassExpression])] =
Option((axiom.getAnnotations.asScala.toSet, axiom.getClassExpressions.asScala.toSet))
} | phenoscape/scowl | src/main/scala/org/phenoscape/scowl/ofn/ClassAxioms.scala | Scala | mit | 4,440 |
package pages.theme
import net.liftweb.http.js.JsCmds._
import net.liftweb.util.Helpers
trait BWAUtils {
def timeFrom(ts: Long) = {
val id = Helpers.nextFuncName
<span id={id}></span> ++ Script(Run(s"$$('#$id').text(moment.unix(${ts/1000}).fromNow());"))
}
}
| slynx-fw/slynx-demo | app/pages/theme/utils.scala | Scala | apache-2.0 | 274 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.yggdrasil
package table
import quasar.blueeyes._, json._
import quasar.precog.common._
import scalaz._, Scalaz._
import quasar.precog.TestSupport._
// import org.scalacheck._, Gen._, Arbitrary._
import SampleData._
import TableModule._
/** Ugh, without this import it still compiles but the tests
* no longer pass (specifically "heterogeneous sort keys case 2")
*/
import PrecogJValueOrder._
trait BlockSortSpec extends SpecificationLike with ScalaCheck {
def testSortDense(sample: SampleData, sortOrder: DesiredSortOrder, unique: Boolean, sortKeys: JPath*) = {
val module = BlockStoreTestModule.empty[Need]
val jvalueOrdering = scalaz.Order[JValue].toScalaOrdering
val desiredJValueOrder = if (sortOrder.isAscending) jvalueOrdering else jvalueOrdering.reverse
val globalIdPath = JPath(".globalId")
val original = if (unique) {
sample.data.map(_.toJValue).map { jv => JArray(sortKeys.map(_.extract(jv \\ "value")).toList) -> jv }.toMap.toList.unzip._2.toStream
} else {
sample.data.map(_.toJValue)
}
// We have to add in and then later remove the global Id (insert
// order) to match real sort semantics for disambiguation of equal
// values
val sorted = original.zipWithIndex.map {
case (jv, i) if sortOrder.isAscending => JValue.unsafeInsert(jv, globalIdPath, JNum(i))
case (jv, i) if !sortOrder.isAscending => JValue.unsafeInsert(jv, globalIdPath, JNum(-i))
}.sortBy { v =>
JArray(sortKeys.map(_.extract(v \\ "value")).toList ::: List(v \\ "globalId")).asInstanceOf[JValue]
}(desiredJValueOrder).map(_.delete(globalIdPath).get).toList
val cSortKeys = sortKeys map { CPath(_) }
val resultM = for {
sorted <- module.fromSample(sample).sort(module.sortTransspec(cSortKeys: _*), sortOrder)
json <- sorted.toJson
} yield (json, sorted)
val (result, resultTable) = resultM.copoint
result.toList.map(_.toJValue) must_== sorted
resultTable.size mustEqual ExactSize(sorted.size)
}
def checkSortDense(sortOrder: DesiredSortOrder) = {
implicit val gen = sample(objectSchema(_, 3))
prop { (sample: SampleData) => {
val Some((_, schema)) = sample.schema
testSortDense(sample, sortOrder, false, schema.map(_._1).head)
}}
}
// Simple test of sorting on homogeneous data
def homogeneousSortSample = {
val sampleData = SampleData(
(JParser.parseUnsafe("""[
{
"value":{
"uid":"joe",
"u":false,
"md":"t",
"l":[]
},
"key":[1]
},
{
"value":{
"uid":"al",
"u":false,
"md":"t",
"l":[]
},
"key":[2]
}
]""") --> classOf[JArray]).elements.toStream.flatMap(RValue.fromJValue),
Some(
(1 , List(JPath(".uid") -> CString, JPath(".u") -> CBoolean, JPath(".md") -> CString, JPath(".l") -> CEmptyArray))
)
)
testSortDense(sampleData, SortDescending, false, JPath(".uid"))
}
// Simple test of sorting on homogeneous data with objects
def homogeneousSortSampleWithNonexistentSortKey = {
val sampleData = SampleData(
(JParser.parseUnsafe("""[
{"key":[2],"value":6},
{"key":[1],"value":5}
]""") --> classOf[JArray]).elements.toStream.flatMap(RValue.fromJValue),
Some(
(1 , List(JPath(".") -> CString))
)
)
testSortDense(sampleData, SortDescending, false, JPath(".uid"))
}
// Simple test of partially undefined sort key data
def partiallyUndefinedSortSample = {
val sampleData = SampleData(
(JParser.parseUnsafe("""[
{
"value":{
"uid":"ted",
"rzp":{ },
"hW":1.0,
"fa":null
},
"key":[1]
},
{
"value":{
"rzp":{ },
"hW":2.0,
"fa":null
},
"key":[1]
}
]""") --> classOf[JArray]).elements.toStream.flatMap(RValue.fromJValue),
Some(
(1, List(JPath(".uid") -> CString, JPath(".fa") -> CNull, JPath(".hW") -> CDouble, JPath(".rzp") -> CEmptyObject))
)
)
testSortDense(sampleData, SortAscending, false, JPath(".uid"), JPath(".hW"))
}
def heterogeneousBaseValueTypeSample = {
val sampleData = SampleData(
(JParser.parseUnsafe("""[
{
"value": [0, 1],
"key":[1]
},
{
"value":{
"uid": "tom",
"abc": 2
},
"key":[2]
}
]""") --> classOf[JArray]).elements.toStream.flatMap(RValue.fromJValue),
Some(
(1, List(JPath("[0]") -> CLong, JPath("[1]") -> CLong, JPath(".uid") -> CString, JPath("abc") -> CLong))
)
)
testSortDense(sampleData, SortAscending, false, JPath(".uid"))
}
def badSchemaSortSample = {
val sampleData = SampleData(
(JParser.parseUnsafe("""[
{
"value":{
"vxu":[],
"q":-103811160446995821.5,
"u":5.548109504404496E+307
},
"key":[1.0,1.0]
},
{
"value":{
"vxu":[],
"q":-8.40213736307813554E+18,
"u":8.988465674311579E+307
},
"key":[1.0,2.0]
},
{
"value":{
"m":[],
"f":false
},
"key":[2.0,1.0]
}
]""") --> classOf[JArray]).elements.toStream.flatMap(RValue.fromJValue),
Some((2,List(
JPath(".m") -> CEmptyArray,
JPath(".f") -> CBoolean,
JPath(".u") -> CDouble,
JPath(".q") -> CNum,
JPath(".vxu") -> CEmptyArray))))
testSortDense(sampleData, SortAscending, false, JPath("q"))
}
// Simple test of sorting on heterogeneous data
def heterogeneousSortSample2 = {
val sampleData = SampleData(
(JParser.parseUnsafe("""[
{"key":[1,4,3],"value":{"b0":["",{"alxk":-1},-5.170005125478374E+307],"y":{"pvbT":[-1458654748381439976,{}]}}},
{"key":[1,4,4],"value":{"y":false,"qvd":[],"aden":{}}},
{"key":[3,3,3],"value":{"b0":["gxy",{"alxk":-1},6.614267528783459E+307],"y":{"pvbT":[1,{}]}}}
]""") --> classOf[JArray]).elements.toStream.flatMap(RValue.fromJValue),
None)
testSortDense(sampleData, SortDescending, false, JPath(".y"))
}
// Simple test of sorting on heterogeneous data
def heterogeneousSortSampleDescending = {
val sampleData = SampleData(
(JParser.parseUnsafe("""[
{"key":[2],"value":{"y":false}},
{"key":[3],"value":{"y":{"pvbT":1}}}
]""") --> classOf[JArray]).elements.toStream.flatMap(RValue.fromJValue),
None)
testSortDense(sampleData, SortDescending, false, JPath(".y"))
}
// Simple test of sorting on heterogeneous data
def heterogeneousSortSampleAscending = {
val sampleData = SampleData(
(JParser.parseUnsafe("""[
{"key":[2],"value":{"y":false}},
{"key":[3],"value":{"y":{"pvbT":1}}}
]""") --> classOf[JArray]).elements.toStream.flatMap(RValue.fromJValue),
None)
testSortDense(sampleData, SortAscending, false, JPath(".y"))
}
// Simple test of heterogeneous sort keys
def heterogeneousSortSample = {
val sampleData = SampleData(
(JParser.parseUnsafe("""[
{
"value":{
"uid": 12,
"f":{
"bn":[null],
"wei":1.0
},
"ljz":[null,["W"],true],
"jmy":4.639428637939817E307
},
"key":[1,2,2]
},
{
"value":{
"uid": 1.5,
"f":{
"bn":[null],
"wei":5.615997508833152E307
},
"ljz":[null,[""],false],
"jmy":-2.612503123965922E307
},
"key":[2,1,1]
}
]""") --> classOf[JArray]).elements.toStream.flatMap(RValue.fromJValue),
Some(
(3, List(JPath(".uid") -> CLong,
JPath(".uid") -> CDouble,
JPath(".f.bn[0]") -> CNull,
JPath(".f.wei") -> CDouble,
JPath(".ljz[0]") -> CNull,
JPath(".ljz[1][0]") -> CString,
JPath(".ljz[2]") -> CBoolean,
JPath(".jmy") -> CDouble))
)
)
testSortDense(sampleData, SortAscending, false, JPath(".uid"))
}
def secondHetSortSample = {
val sampleData = SampleData(
(JParser.parseUnsafe("""[
{
"value":[1.0,0,{
}],
"key":[3.0]
}, {
"value":{
"e":null,
"chl":-1.0,
"zw1":-4.611686018427387904E-27271
},
"key":[1.0]
}, {
"value":{
"e":null,
"chl":-8.988465674311579E+307,
"zw1":81740903825956729.9
},
"key":[2.0]
}]""") --> classOf[JArray]).elements.toStream.flatMap(RValue.fromJValue),
Some(
(1, List(JPath(".e") -> CNull,
JPath(".chl") -> CNum,
JPath(".zw1") -> CNum,
JPath("[0]") -> CLong,
JPath("[1]") -> CLong,
JPath("[2]") -> CEmptyObject))
)
)
testSortDense(sampleData, SortAscending, false, JPath(".zw1"))
}
/* The following data set results in three separate JDBM
* indices due to formats. This exposed a bug in mergeProjections
* where we weren't properly inverting the cell matrix reorder
* once one of the index slices expired. See commit
* a253d47f3f6d09fd39afc2986c529e84e5443e7f for details
*/
def threeCellMerge = {
val sampleData = SampleData(
(JParser.parseUnsafe("""[
{
"value":-2355162409801206381,
"key":[1.0,1.0,11.0]
}, {
"value":416748368221569769,
"key":[12.0,10.0,5.0]
}, {
"value":1,
"key":[9.0,13.0,2.0]
}, {
"value":4220813543874929309,
"key":[13.0,10.0,11.0]
}, {
"value":{
"viip":8.988465674311579E+307,
"ohvhwN":-1.911181119089705905E+11774,
"zbtQhnpnun":-4364598680493823671
},
"key":[8.0,12.0,6.0]
}, {
"value":{
"viip":-8.610170336058498E+307,
"ohvhwN":0.0,
"zbtQhnpnun":-3072439692643750408
},
"key":[3.0,1.0,12.0]
}, {
"value":{
"viip":1.0,
"ohvhwN":1.255850949484045134E-25873,
"zbtQhnpnun":-2192537798839555684
},
"key":[12.0,10.0,4.0]
}, {
"value":{
"viip":-1.0,
"ohvhwN":1E-18888,
"zbtQhnpnun":-1
},
"key":[2.0,4.0,11.0]
}, {
"value":{
"viip":1.955487389945603E+307,
"ohvhwN":-2.220603033978414186E+19,
"zbtQhnpnun":-1
},
"key":[6.0,11.0,5.0]
}, {
"value":{
"viip":-4.022335964233546E+307,
"ohvhwN":0E+1,
"zbtQhnpnun":-1
},
"key":[8.0,7.0,13.0]
}, {
"value":{
"viip":1.0,
"ohvhwN":-4.611686018427387904E+50018,
"zbtQhnpnun":0
},
"key":[1.0,13.0,12.0]
}, {
"value":{
"viip":0.0,
"ohvhwN":4.611686018427387903E+26350,
"zbtQhnpnun":0
},
"key":[2.0,7.0,7.0]
}, {
"value":{
"viip":-6.043665565176412E+307,
"ohvhwN":-4.611686018427387904E+27769,
"zbtQhnpnun":0
},
"key":[2.0,11.0,6.0]
}, {
"value":{
"viip":-1.0,
"ohvhwN":-1E+36684,
"zbtQhnpnun":0
},
"key":[6.0,4.0,8.0]
}, {
"value":{
"viip":-1.105552122908816E+307,
"ohvhwN":6.78980055408249814E-41821,
"zbtQhnpnun":1
},
"key":[13.0,6.0,11.0]
}, {
"value":{
"viip":1.0,
"ohvhwN":3.514965842146513368E-43185,
"zbtQhnpnun":1133522166006977485
},
"key":[13.0,11.0,13.0]
}, {
"value":{
"viip":8.988465674311579E+307,
"ohvhwN":2.129060503704072469E+45099,
"zbtQhnpnun":1232928328066014683
},
"key":[11.0,3.0,6.0]
}, {
"value":{
"viip":6.651090528711015E+307,
"ohvhwN":-1.177821034245149979E-49982,
"zbtQhnpnun":2406980638624125853
},
"key":[4.0,5.0,7.0]
}, {
"value":{
"viip":4.648002254349813E+307,
"ohvhwN":4.611686018427387903E-42682,
"zbtQhnpnun":2658995085512919727
},
"key":[12.0,2.0,8.0]
}, {
"value":{
"viip":0.0,
"ohvhwN":4.611686018427387903E-33300,
"zbtQhnpnun":3464601040437655780
},
"key":[8.0,10.0,4.0]
}, {
"value":{
"viip":-8.988465674311579E+307,
"ohvhwN":1E-42830,
"zbtQhnpnun":3709226396529427859
},
"key":[10.0,1.0,4.0]
}
]""") --> classOf[JArray]).elements.toStream.flatMap(RValue.fromJValue),
Some(
(3, List(JPath(".zbtQhnpnun") -> CLong,
JPath(".ohvhwN") -> CNum,
JPath(".viip") -> CNum))
)
)
testSortDense(sampleData, SortAscending, false, JPath(".zbtQhnpnun"))
}
def uniqueSort = {
val sampleData = SampleData(
(JParser.parseUnsafe("""[
{ "key" : [2], "value" : { "foo" : 10 } },
{ "key" : [1], "value" : { "foo" : 10 } }
]""") --> classOf[JArray]).elements.toStream.flatMap(RValue.fromJValue),
Some(
(1 , List())
)
)
testSortDense(sampleData, SortAscending, false, JPath(".foo"))
}
def emptySort = {
val sampleData = SampleData(
(JParser.parseUnsafe("""[]""") --> classOf[JArray]).elements.toStream.flatMap(RValue.fromJValue),
Some(
(1 , List())
)
)
testSortDense(sampleData, SortAscending, false, JPath(".foo"))
}
}
| jedesah/Quasar | yggdrasil/src/test/scala/quasar/yggdrasil/table/BlockSortSpec.scala | Scala | apache-2.0 | 14,976 |
package com.twitter.zipkin.storage.hbase.mapping
import com.twitter.zipkin.storage.hbase.utils.{HBaseTable, IDGenerator}
import org.apache.hadoop.hbase.util.Bytes
case class ServiceMapping(id: Long, value: Array[Byte], mappingTable: HBaseTable, idGen: IDGenerator) extends Mapping {
val parent: Option[Mapping] = None
val annotationMapper = new AnnotationMapper(this)
val spanNameMapper = new SpanNameMapper(this)
lazy val name = Bytes.toString(value)
}
| pteichman/zipkin | zipkin-hbase/src/main/scala/com/twitter/zipkin/storage/hbase/mapping/ServiceMapping.scala | Scala | apache-2.0 | 465 |
package org.http4s
package headers
class ContentTypeHeaderSpec extends HeaderLaws {
checkAll("Content-Type", headerLaws(`Content-Type`))
}
| aeons/http4s | tests/src/test/scala/org/http4s/headers/ContentTypeHeaderSpec.scala | Scala | apache-2.0 | 142 |
package at.logic.gapt.integration_tests
import at.logic.gapt.examples.LinearExampleProof
import at.logic.gapt.expr._
import at.logic.gapt.expr.fol.Utils
import at.logic.gapt.expr.hol.containsQuantifier
import at.logic.gapt.proofs.{ Sequent, Ant }
import at.logic.gapt.proofs.expansionTrees.FOLInstanceTermEncoding
import at.logic.gapt.cutintro._
import at.logic.gapt.proofs.lkNew.quantRulesNumber
import at.logic.gapt.provers.basicProver.BasicProver
import at.logic.gapt.provers.prover9.Prover9
import org.specs2.mutable._
class CutIntroTest extends Specification {
private def LinearExampleTermset( n: Int ): List[FOLTerm] =
if ( n == 0 )
List[FOLTerm]()
else
Utils.numeral( n - 1 ) :: LinearExampleTermset( n - 1 )
"CutIntroduction" should {
"extract and decompose the termset of the linear example proof (n = 4)" in {
if ( !Prover9.isInstalled ) skipped( "Prover9 is not installed" )
val proof = LinearExampleProof( 4 )
val ( termset, _ ) = FOLInstanceTermEncoding( proof )
val set = termset collect { case FOLFunction( _, List( arg ) ) => arg }
CutIntroduction.one_cut_one_quantifier( proof, false ) must beSome
set must contain( exactly( LinearExampleTermset( 4 ): _* ) )
}
"introduce two cuts into linear example proof with improveSolutionLK" in {
def fun( n: Int, t: FOLTerm ): FOLTerm = if ( n == 0 ) t else FOLFunction( "s", fun( n - 1, t ) :: Nil )
val proof = LinearExampleProof( 8 )
val f = proof.endSequent( Ant( 0 ) ).asInstanceOf[FOLFormula]
val a1 = FOLVar( "α_1" )
val a2 = FOLVar( "α_2" )
val zero = FOLConst( "0" )
val u1 = a1
val u2 = fun( 1, a1 )
val us = for ( f <- proof.endSequent )
yield f.asInstanceOf[FOLFormula] -> ( if ( containsQuantifier( f ) ) List( List( u1 ), List( u2 ) ) else List( List() ) )
val s11 = a2
val s12 = fun( 2, a2 )
val s21 = zero
val s22 = fun( 4, zero )
val ss = ( a1 :: Nil, ( s11 :: Nil ) :: ( s12 :: Nil ) :: Nil ) :: ( a2 :: Nil, ( s21 :: Nil ) :: ( s22 :: Nil ) :: Nil ) :: Nil
val grammar = new SchematicExtendedHerbrandSequent( us, ss )
val ehs = ExtendedHerbrandSequent( grammar, CutIntroduction.computeCanonicalSolution( grammar ) )
val prover = BasicProver
val result_new = improveSolutionLK( ehs, prover, hasEquality = false )
val r_proof = CutIntroduction.buildProofWithCut( result_new, prover )
// expected result
val cf1 = All( a1, FOLAtom( "P", a1 ) --> FOLAtom( "P", fun( 2, a1 ) ) )
val cf2 = All( a2, FOLAtom( "P", a2 ) --> FOLAtom( "P", fun( 4, a2 ) ) )
result_new.cutFormulas must beEqualTo( cf1 :: cf2 :: Nil )
quantRulesNumber( r_proof ) must_== grammar.size
}
}
}
| loewenheim/gapt | src/test/scala/at/logic/gapt/integration_tests/CutIntroTest.scala | Scala | gpl-3.0 | 2,783 |
package play.api.libs.json
import org.specs2.mutable._
import play.api.libs.json._
import play.api.libs.json.Json._
import play.api.libs.functional.syntax._
import scala.util.control.Exception._
import java.text.ParseException
import play.api.data.validation.ValidationError
object JsonSpec extends Specification {
case class User(id: Long, name: String, friends: List[User])
implicit val UserFormat: Format[User] = (
(__ \\ 'id).format[Long] and
(__ \\ 'name).format[String] and
(__ \\ 'friends).lazyFormat(Reads.list(UserFormat), Writes.list(UserFormat))
)(User, unlift(User.unapply))
case class Car(id: Long, models: Map[String, String])
implicit val CarFormat = (
(__ \\ 'id).format[Long] and
(__ \\ 'models).format[Map[String, String]]
)(Car, unlift(Car.unapply))
import java.util.Date
import java.text.SimpleDateFormat
val dateFormat = "yyyy-MM-dd'T'HH:mm:ss'Z'" // Iso8601 format (forgot timezone stuff)
val dateParser = new SimpleDateFormat(dateFormat)
case class Post(body: String, created_at: Option[Date])
implicit val PostFormat = (
(__ \\ 'body).format[String] and
(__ \\ 'created_at).formatNullable[Option[Date]](
Format(
Reads.optionWithNull(Reads.dateReads(dateFormat)),
Writes.optionWithNull(Writes.dateWrites(dateFormat))
)
).inmap( optopt => optopt.flatten, (opt: Option[Date]) => Some(opt) )
)(Post, unlift(Post.unapply))
"JSON" should {
"equals JsObject independently of field order" in {
Json.obj(
"field1" -> 123,
"field2" -> "beta",
"field3" -> Json.obj(
"field31" -> true,
"field32" -> 123.45,
"field33" -> Json.arr("blabla", 456L, JsNull)
)
) must beEqualTo(
Json.obj(
"field2" -> "beta",
"field3" -> Json.obj(
"field31" -> true,
"field33" -> Json.arr("blabla", 456L, JsNull),
"field32" -> 123.45
),
"field1" -> 123
)
)
Json.obj(
"field1" -> 123,
"field2" -> "beta",
"field3" -> Json.obj(
"field31" -> true,
"field32" -> 123.45,
"field33" -> Json.arr("blabla", JsNull)
)
) must not equalTo(
Json.obj(
"field2" -> "beta",
"field3" -> Json.obj(
"field31" -> true,
"field33" -> Json.arr("blabla", 456L),
"field32" -> 123.45
),
"field1" -> 123
)
)
Json.obj(
"field1" -> 123,
"field2" -> "beta",
"field3" -> Json.obj(
"field31" -> true,
"field32" -> 123.45,
"field33" -> Json.arr("blabla", 456L, JsNull)
)
) must not equalTo(
Json.obj(
"field3" -> Json.obj(
"field31" -> true,
"field33" -> Json.arr("blabla", 456L, JsNull),
"field32" -> 123.45
),
"field1" -> 123
)
)
}
"serialize and deserialize maps properly" in {
val c = Car(1, Map("ford" -> "1954 model"))
val jsonCar = toJson(c)
jsonCar.as[Car] must equalTo(c)
}
"serialize and deserialize" in {
val luigi = User(1, "Luigi", List())
val kinopio = User(2, "Kinopio", List())
val yoshi = User(3, "Yoshi", List())
val mario = User(0, "Mario", List(luigi, kinopio, yoshi))
val jsonMario = toJson(mario)
jsonMario.as[User] must equalTo(mario)
}
"Complete JSON should create full Post object" in {
val postJson = """{"body": "foobar", "created_at": "2011-04-22T13:33:48Z"}"""
val expectedPost = Post("foobar", Some(dateParser.parse("2011-04-22T13:33:48Z")))
val resultPost = Json.parse(postJson).as[Post]
resultPost must equalTo(expectedPost)
}
"Optional parameters in JSON should generate post w/o date" in {
val postJson = """{"body": "foobar"}"""
val expectedPost = Post("foobar", None)
val resultPost = Json.parse(postJson).as[Post]
resultPost must equalTo(expectedPost)
}
"Invalid parameters shoud be ignored" in {
val postJson = """{"body": "foobar", "created_at":null}"""
val expectedPost = Post("foobar", None)
val resultPost = Json.parse(postJson).as[Post]
resultPost must equalTo(expectedPost)
}
"Serialize long integers correctly" in {
val t = 1330950829160L
val m = Map("timestamp" -> t)
val jsonM = toJson(m)
(jsonM \\ "timestamp").as[Long] must equalTo(t)
(jsonM.toString must equalTo("{\\"timestamp\\":1330950829160}"))
}
"Serialize and deserialize BigDecimals" in {
val n = BigDecimal("12345678901234567890.42")
val json = toJson(n)
json must equalTo (JsNumber(n))
fromJson[BigDecimal](json) must equalTo(JsSuccess(n))
}
"Not lose precision when parsing BigDecimals" in {
val n = BigDecimal("12345678901234567890.123456789")
val json = toJson(n)
parse(stringify(json)) must equalTo(json)
}
"Not lose precision when parsing big integers" in {
// By big integers, we just mean integers that overflow long, since Jackson has different code paths for them
// from decimals
val i = BigDecimal("123456789012345678901234567890")
val json = toJson(i)
parse(stringify(json)) must equalTo(json)
}
"Serialize and deserialize Lists" in {
val xs: List[Int] = (1 to 5).toList
val json = arr(1, 2, 3, 4, 5)
toJson(xs) must equalTo (json)
fromJson[List[Int]](json) must equalTo (JsSuccess(xs))
}
"Map[String,String] should be turned into JsValue" in {
val f = toJson(Map("k"->"v"))
f.toString must equalTo("{\\"k\\":\\"v\\"}")
}
"Can parse recursive object" in {
val recursiveJson = """{"foo": {"foo":["bar"]}, "bar": {"foo":["bar"]}}"""
val expectedJson = JsObject(List(
"foo" -> JsObject(List(
"foo" -> JsArray(List[JsValue](JsString("bar")))
)),
"bar" -> JsObject(List(
"foo" -> JsArray(List[JsValue](JsString("bar")))
))
))
val resultJson = Json.parse(recursiveJson)
resultJson must equalTo(expectedJson)
}
"Can parse null values in Object" in {
val postJson = """{"foo": null}"""
val parsedJson = Json.parse(postJson)
val expectedJson = JsObject(List("foo" -> JsNull))
parsedJson must equalTo(expectedJson)
}
"Can parse null values in Array" in {
val postJson = """[null]"""
val parsedJson = Json.parse(postJson)
val expectedJson = JsArray(List(JsNull))
parsedJson must equalTo(expectedJson)
}
"JSON pretty print" in {
val js = Json.obj(
"key1" -> "toto",
"key2" -> Json.obj("key21" -> "tata", "key22" -> 123),
"key3" -> Json.arr(1, "tutu")
)
Json.prettyPrint(js) must beEqualTo("""{
"key1" : "toto",
"key2" : {
"key21" : "tata",
"key22" : 123
},
"key3" : [ 1, "tutu" ]
}""")
}
"null root object should be parsed as JsNull" in {
parse("null") must_== JsNull
}
}
"JSON Writes" should {
"write list/seq/set/map" in {
import util._
import Writes._
Json.toJson(List(1, 2, 3)) must beEqualTo(Json.arr(1, 2, 3))
Json.toJson(Set("alpha", "beta", "gamma")) must beEqualTo(Json.arr("alpha", "beta", "gamma"))
Json.toJson(Seq("alpha", "beta", "gamma")) must beEqualTo(Json.arr("alpha", "beta", "gamma"))
Json.toJson(Map("key1" -> "value1", "key2" -> "value2")) must beEqualTo(Json.obj("key1" -> "value1", "key2" -> "value2"))
implicit val myWrites = (
(__ \\ 'key1).write(constraints.list[Int]) and
(__ \\ 'key2).write(constraints.set[String]) and
(__ \\ 'key3).write(constraints.seq[String]) and
(__ \\ 'key4).write(constraints.map[String])
).tupled
Json.toJson( List(1, 2, 3),
Set("alpha", "beta", "gamma"),
Seq("alpha", "beta", "gamma"),
Map("key1" -> "value1", "key2" -> "value2")
) must beEqualTo(
Json.obj(
"key1" -> Json.arr(1, 2, 3),
"key2" -> Json.arr("alpha", "beta", "gamma"),
"key3" -> Json.arr("alpha", "beta", "gamma"),
"key4" -> Json.obj("key1" -> "value1", "key2" -> "value2")
)
)
}
"write in 2nd level" in {
case class TestCase(id: String, attr1: String, attr2: String)
val js = Json.obj(
"id" -> "my-id",
"data" -> Json.obj(
"attr1" -> "foo",
"attr2" -> "bar"
)
)
implicit val testCaseWrites: Writes[TestCase] = (
(__ \\ "id").write[String] and
(__ \\ "data" \\ "attr1").write[String] and
(__ \\ "data" \\ "attr2").write[String]
)(unlift(TestCase.unapply))
Json.toJson(TestCase("my-id", "foo", "bar")) must beEqualTo(js)
}
}
}
| michaelahlers/team-awesome-wedding | vendor/play-2.2.1/framework/src/play-json/src/test/scala/play/api/libs/json/JsonSpec.scala | Scala | mit | 8,943 |
package com.cloudray.scalapress.search
/** @author Stephen Samuel */
case class SearchResult(refs: Seq[ItemRef] = Nil,
facets: Seq[Facet] = Nil,
count: Long = 0)
case class ItemRef(id: Long,
itemType: Long,
name: String,
status: String,
attributes: Map[Long, String],
folders: Seq[Long],
prioritized: Boolean = false) {
@deprecated
def objectType = itemType
}
| vidyacraghav/scalapress | src/main/scala/com/cloudray/scalapress/search/SearchResult.scala | Scala | apache-2.0 | 529 |
package org.apache.spark.ml.parity.clustering
import org.apache.spark.ml.parity.SparkParityBase
import org.apache.spark.ml.clustering.KMeans
import org.apache.spark.ml.feature.{StringIndexer, VectorAssembler}
import org.apache.spark.ml.{Pipeline, Transformer}
import org.apache.spark.sql.DataFrame
/**
* Created by hollinwilkins on 10/30/16.
*/
class KMeansParitySpec extends SparkParityBase {
override val dataset: DataFrame = baseDataset.select("dti", "loan_amount", "fico_score_group_fnl")
override val sparkTransformer: Transformer = new Pipeline().setStages(Array(new StringIndexer().
setInputCol("fico_score_group_fnl").
setOutputCol("fico_index"),
new VectorAssembler().
setInputCols(Array("fico_index", "dti")).
setOutputCol("features"),
new KMeans().
setFeaturesCol("features").
setPredictionCol("prediction"))).fit(dataset)
override val unserializedParams = Set("stringOrderType", "initMode", "initSteps", "maxIter", "tol", "k", "seed")
}
| combust-ml/mleap | mleap-spark/src/test/scala/org/apache/spark/ml/parity/clustering/KMeansParitySpec.scala | Scala | apache-2.0 | 1,002 |
package angular
import com.greencatsoft.angularjs._
import com.greencatsoft.angularjs.core.Scope
import com.greencatsoft.angularjs.internal.ServiceProxy
import scala.language.experimental.macros
import scala.scalajs.js
import scala.scalajs.js.annotation.JSExportDescendentClasses
import scalatags.Text
trait DirectiveTags extends Text.Cap with Text.Aggregate with Text.Attrs {
val ngModel = "ng-model".attr
val ngClick = "ng-click".attr
}
trait ScalaTagsDirective extends ElementDirective with TemplateSourceDirective with DirectiveTags {
def tag: Tag
override lazy val template = tag.toString()
}
trait ComponentDirective extends ScalaTagsDirective with IsolatedScope {
protected def proxy[A <: Controller[_]]: js.Any = macro ServiceProxy.newClassWrapper[A]
}
@JSExportDescendentClasses
abstract class ComponentController[S <: Scope](scope: S) extends AbstractController(scope) | tmonney/scalajs-ionic | scalajs/src/main/scala/angular/BaseDirective.scala | Scala | mit | 922 |
package health
import akka.actor.{ActorSystem, Props}
import akka.io.IO
import spray.can.Http
import akka.pattern.ask
import akka.util.Timeout
object Boot extends App {
implicit val system = ActorSystem("healt-check")
val service = system.actorOf(Props[CheckerActor], "checker-service")
implicit val timeout = Timeout(Environment.timeout)
IO(Http) ? Http.Bind(service, interface = "0.0.0.0", port = Environment.port)
}
| ExpatConnect/health | src/main/scala/health/Boot.scala | Scala | mit | 433 |
/**
* Copyright (C) 2007 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.model
import org.orbeon.oxf.xforms.event.{XFormsEventObserver, EventHandlerImpl, XFormsEventHandler}
import org.orbeon.oxf.xforms.submission.XFormsModelSubmission
import org.orbeon.oxf.xforms._
import org.orbeon.xforms.XFormsId
class XFormsModelAction(parent: XFormsEventObserver, eventHandler: EventHandlerImpl) extends XFormsEventHandler with XFormsObject {
val getEffectiveId = XFormsId.getRelatedEffectiveId(parent.getEffectiveId, eventHandler.staticId)
def container = parent.container
def containingDocument = parent.containingDocument
// This is called by EventHandlerImpl when determining the XPath context for nested event handlers
def bindingContext = parent match {
case model: XFormsModel ⇒
// Use the model's inner context
model.getDefaultEvaluationContext
case submission: XFormsModelSubmission ⇒
// Evaluate the binding of the submission element based on the model's inner context
// NOTE: When the submission actually starts processing, the binding will be re-evaluated
val contextStack = new XFormsContextStack(submission.container, submission.getModel.getDefaultEvaluationContext)
contextStack.pushBinding(submission.getSubmissionElement, submission.getEffectiveId, submission.getModel.getResolutionScope)
contextStack.getCurrentBindingContext
case _ ⇒
// We know we are either nested directly within the model, or within a submission
throw new IllegalStateException
}
} | brunobuzzi/orbeon-forms | xforms/jvm/src/main/scala/org/orbeon/oxf/xforms/model/XFormsModelAction.scala | Scala | lgpl-2.1 | 2,161 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package utils
import models.status.SubmissionStatus
import org.mockito.Matchers.any
import org.mockito.Mockito.when
import org.scalatestplus.mockito.MockitoSugar
import services.StatusService
import scala.concurrent.Future
trait StatusMocks extends MockitoSugar {
implicit val mockStatusService = mock[StatusService]
def mockApplicationStatus(status: SubmissionStatus)(implicit service: StatusService) = when {
service.getStatus(any[Option[String]], any(), any())(any(), any())
} thenReturn Future.successful(status)
}
| hmrc/amls-frontend | test/utils/StatusMocks.scala | Scala | apache-2.0 | 1,140 |
/*
* Copyright 2001-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.SharedHelpers.{EventRecordingReporter, thisLineNumber}
import scala.concurrent.{Promise, ExecutionContext, Future}
import org.scalatest.concurrent.SleepHelper
import org.scalatest.events.{InfoProvided, MarkupProvided}
import org.scalatest.exceptions.DuplicateTestNameException
import scala.util.Success
class AsyncFlatSpecSpec extends FunSpec {
describe("AsyncFlatSpec") {
it("can be used for tests that return Future under parallel async test execution") {
class ExampleSpec extends AsyncFlatSpec with ParallelTestExecution {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
val a = 1
it should "test 1" in {
Future {
assert(a == 1)
}
}
it should "test 2" in {
Future {
assert(a == 2)
}
}
it should "test 3" in {
Future {
pending
}
}
it should "test 4" in {
Future {
cancel
}
}
it should "test 5" ignore {
Future {
cancel
}
}
override def newInstance = new ExampleSpec
}
val rep = new EventRecordingReporter
val spec = new ExampleSpec
val status = spec.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 4)
assert(rep.testSucceededEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived(0).testName == "should test 1")
assert(rep.testFailedEventsReceived.length == 1)
assert(rep.testFailedEventsReceived(0).testName == "should test 2")
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "should test 3")
assert(rep.testCanceledEventsReceived.length == 1)
assert(rep.testCanceledEventsReceived(0).testName == "should test 4")
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "should test 5")
}
it("can be used for tests that did not return Future under parallel async test execution") {
class ExampleSpec extends AsyncFlatSpec with ParallelTestExecution {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
val a = 1
it should "test 1" in {
assert(a == 1)
}
it should "test 2" in {
assert(a == 2)
}
it should "test 3" in {
pending
}
it should "test 4" in {
cancel
}
it should "test 5" ignore {
cancel
}
override def newInstance = new ExampleSpec
}
val rep = new EventRecordingReporter
val spec = new ExampleSpec
val status = spec.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 4)
assert(rep.testSucceededEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived(0).testName == "should test 1")
assert(rep.testFailedEventsReceived.length == 1)
assert(rep.testFailedEventsReceived(0).testName == "should test 2")
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "should test 3")
assert(rep.testCanceledEventsReceived.length == 1)
assert(rep.testCanceledEventsReceived(0).testName == "should test 4")
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "should test 5")
}
it("can be used with is for pending tests that don't return a Future") {
class ExampleSpec extends AsyncFlatSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
val a = 1
it should "test 1" is {
pending
}
it should "test 2" ignore {
pending
}
}
val rep = new EventRecordingReporter
val spec = new ExampleSpec
val status = spec.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived.length == 0)
assert(rep.testFailedEventsReceived.length == 0)
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "should test 1")
assert(rep.testCanceledEventsReceived.length == 0)
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "should test 2")
}
it("should run tests that return Future in serial by default") {
@volatile var count = 0
class ExampleSpec extends AsyncFlatSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
it should "test 1" in {
Future {
SleepHelper.sleep(30)
assert(count == 0)
count = 1
succeed
}
}
it should "test 2" in {
Future {
assert(count == 1)
SleepHelper.sleep(50)
count = 2
succeed
}
}
it should "test 3" in {
Future {
assert(count == 2)
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived.length == 3)
}
it("should run tests that does not return Future in serial by default") {
@volatile var count = 0
class ExampleSpec extends AsyncFlatSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
it should "test 1" in {
SleepHelper.sleep(30)
assert(count == 0)
count = 1
succeed
}
it should "test 2" in {
assert(count == 1)
SleepHelper.sleep(50)
count = 2
succeed
}
it should "test 3" in {
assert(count == 2)
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived.length == 3)
}
// SKIP-SCALATESTJS,NATIVE-START
it("should run tests and its future in same main thread when use SerialExecutionContext") {
var mainThread = Thread.currentThread
var test1Thread: Option[Thread] = None
var test2Thread: Option[Thread] = None
var onCompleteThread: Option[Thread] = None
class ExampleSpec extends AsyncFlatSpec {
it should "test 1" in {
Future {
test1Thread = Some(Thread.currentThread)
succeed
}
}
it should "test 2" in {
Future {
test2Thread = Some(Thread.currentThread)
succeed
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
status.whenCompleted { s =>
onCompleteThread = Some(Thread.currentThread)
}
status.waitUntilCompleted()
assert(test1Thread.isDefined)
assert(test1Thread.get == mainThread)
assert(test2Thread.isDefined)
assert(test2Thread.get == mainThread)
assert(onCompleteThread.isDefined)
assert(onCompleteThread.get == mainThread)
}
it("should run tests and its true async future in the same thread when use SerialExecutionContext") {
var mainThread = Thread.currentThread
@volatile var test1Thread: Option[Thread] = None
@volatile var test2Thread: Option[Thread] = None
var onCompleteThread: Option[Thread] = None
class ExampleSpec extends AsyncFlatSpec {
it should "test 1" in {
val promise = Promise[Assertion]
val timer = new java.util.Timer
timer.schedule(
new java.util.TimerTask {
def run(): Unit = {
promise.complete(Success(succeed))
}
},
1000
)
promise.future.map { s =>
test1Thread = Some(Thread.currentThread)
s
}
}
it should "test 2" in {
val promise = Promise[Assertion]
val timer = new java.util.Timer
timer.schedule(
new java.util.TimerTask {
def run(): Unit = {
promise.complete(Success(succeed))
}
},
500
)
promise.future.map { s =>
test2Thread = Some(Thread.currentThread)
s
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
status.whenCompleted { s =>
onCompleteThread = Some(Thread.currentThread)
}
status.waitUntilCompleted()
assert(test1Thread.isDefined)
assert(test1Thread.get == mainThread)
assert(test2Thread.isDefined)
assert(test2Thread.get == mainThread)
assert(onCompleteThread.isDefined)
assert(onCompleteThread.get == mainThread)
}
it("should not run out of stack space with nested futures when using SerialExecutionContext") {
class ExampleSpec extends AsyncFlatSpec {
// Note we get a StackOverflowError with the following execution
// context.
// override implicit def executionContext: ExecutionContext = new ExecutionContext { def execute(runnable: Runnable) = runnable.run; def reportFailure(cause: Throwable) = () }
def sum(xs: List[Int]): Future[Int] =
xs match {
case Nil => Future.successful(0)
case x :: xs => Future(x).flatMap(xx => sum(xs).map(xxx => xx + xxx))
}
it should "test 1" in {
val fut: Future[Int] = sum((1 to 50000).toList)
fut.map(total => assert(total == 1250025000))
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
status.waitUntilCompleted()
assert(!rep.testSucceededEventsReceived.isEmpty)
}
// SKIP-SCALATESTJS,NATIVE-END
it("should run tests that returns Future and report their result in serial") {
class ExampleSpec extends AsyncFlatSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
it should "test 1" in {
Future {
SleepHelper.sleep(60)
succeed
}
}
it should "test 2" in {
Future {
SleepHelper.sleep(30)
succeed
}
}
it should "test 3" in {
Future {
succeed
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testStartingEventsReceived(0).testName == "should test 1")
assert(rep.testStartingEventsReceived(1).testName == "should test 2")
assert(rep.testStartingEventsReceived(2).testName == "should test 3")
assert(rep.testSucceededEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived(0).testName == "should test 1")
assert(rep.testSucceededEventsReceived(1).testName == "should test 2")
assert(rep.testSucceededEventsReceived(2).testName == "should test 3")
}
it("should run tests that does not return Future and report their result in serial") {
class ExampleSpec extends AsyncFlatSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
it should "test 1" in {
SleepHelper.sleep(60)
succeed
}
it should "test 2" in {
SleepHelper.sleep(30)
succeed
}
it should "test 3" in {
succeed
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testStartingEventsReceived(0).testName == "should test 1")
assert(rep.testStartingEventsReceived(1).testName == "should test 2")
assert(rep.testStartingEventsReceived(2).testName == "should test 3")
assert(rep.testSucceededEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived(0).testName == "should test 1")
assert(rep.testSucceededEventsReceived(1).testName == "should test 2")
assert(rep.testSucceededEventsReceived(2).testName == "should test 3")
}
it("should send an InfoProvided event for an info in main spec body") {
class MySuite extends AsyncFlatSpec {
info(
"hi there"
)
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val infoList = reporter.infoProvidedEventsReceived
assert(infoList.size == 1)
assert(infoList(0).message == "hi there")
}
it("should send an InfoProvided event for an info in test body") {
class MySuite extends AsyncFlatSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
"test feature" should "test 1" in {
info("hi there")
succeed
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val infoList = reporter.infoProvidedEventsReceived
assert(infoList.size == 0)
val testSucceededList = reporter.testSucceededEventsReceived
assert(testSucceededList.size == 1)
assert(testSucceededList(0).recordedEvents.size == 1)
val recordedEvent = testSucceededList(0).recordedEvents(0)
assert(recordedEvent.isInstanceOf[InfoProvided])
val infoProvided = recordedEvent.asInstanceOf[InfoProvided]
assert(infoProvided.message == "hi there")
}
it("should send an InfoProvided event for an info in Future returned by scenario body") {
class MySuite extends AsyncFlatSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
"test feature" should "test 1" in {
Future {
info("hi there")
succeed
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val infoList = reporter.infoProvidedEventsReceived
assert(infoList.size == 0)
val testSucceededList = reporter.testSucceededEventsReceived
assert(testSucceededList.size == 1)
assert(testSucceededList(0).recordedEvents.size == 1)
val recordedEvent = testSucceededList(0).recordedEvents(0)
assert(recordedEvent.isInstanceOf[InfoProvided])
val infoProvided = recordedEvent.asInstanceOf[InfoProvided]
assert(infoProvided.message == "hi there")
}
it("should send a NoteProvided event for a note in main spec body") {
class MySuite extends AsyncFlatSpec {
note(
"hi there"
)
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val noteList = reporter.noteProvidedEventsReceived
assert(noteList.size == 1)
assert(noteList(0).message == "hi there")
}
it("should send a NoteProvided event for a note in test body") {
class MySuite extends AsyncFlatSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
"test feature" should "test 1" in {
note("hi there")
succeed
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val noteList = reporter.noteProvidedEventsReceived
assert(noteList.size == 1)
assert(noteList(0).message == "hi there")
}
it("should send a NoteProvided event for a note in Future returned by test body") {
class MySuite extends AsyncFlatSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
"test feature" should "test 1" in {
Future {
note("hi there")
succeed
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val noteList = reporter.noteProvidedEventsReceived
assert(noteList.size == 1)
assert(noteList(0).message == "hi there")
}
it("should send an AlertProvided event for an alert in main spec body") {
class MySuite extends AsyncFlatSpec {
alert(
"hi there"
)
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val alertList = reporter.alertProvidedEventsReceived
assert(alertList.size == 1)
assert(alertList(0).message == "hi there")
}
it("should send an AlertProvided event for an alert in test body") {
class MySuite extends AsyncFlatSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
"test feature" should "test 1" in {
alert("hi there")
succeed
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val alertList = reporter.alertProvidedEventsReceived
assert(alertList.size == 1)
assert(alertList(0).message == "hi there")
}
it("should send an AlertProvided event for an alert in Future returned by test body") {
class MySuite extends AsyncFlatSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
"test feature" should "test 1" in {
Future {
alert("hi there")
succeed
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val alertList = reporter.alertProvidedEventsReceived
assert(alertList.size == 1)
assert(alertList(0).message == "hi there")
}
it("should send a MarkupProvided event for a markup in main spec body") {
class MySuite extends AsyncFlatSpec {
markup(
"hi there"
)
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val markupList = reporter.markupProvidedEventsReceived
assert(markupList.size == 1)
assert(markupList(0).text == "hi there")
}
it("should send a MarkupProvided event for a markup in test body") {
class MySuite extends AsyncFlatSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
"test feature" should "test 1" in {
markup("hi there")
succeed
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val markupList = reporter.markupProvidedEventsReceived
assert(markupList.size == 0)
val testSucceededList = reporter.testSucceededEventsReceived
assert(testSucceededList.size == 1)
assert(testSucceededList(0).recordedEvents.size == 1)
val recordedEvent = testSucceededList(0).recordedEvents(0)
assert(recordedEvent.isInstanceOf[MarkupProvided])
val markupProvided = recordedEvent.asInstanceOf[MarkupProvided]
assert(markupProvided.text == "hi there")
}
it("should send a MarkupProvided event for a markup in Future returned by scenario body") {
class MySuite extends AsyncFlatSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
"test feature" should "test 1" in {
Future {
markup("hi there")
succeed
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val markupList = reporter.markupProvidedEventsReceived
assert(markupList.size == 0)
val testSucceededList = reporter.testSucceededEventsReceived
assert(testSucceededList.size == 1)
assert(testSucceededList(0).recordedEvents.size == 1)
val recordedEvent = testSucceededList(0).recordedEvents(0)
assert(recordedEvent.isInstanceOf[MarkupProvided])
val markupProvided = recordedEvent.asInstanceOf[MarkupProvided]
assert(markupProvided.text == "hi there")
}
it("should generate a DuplicateTestNameException is detected") {
class TestSpec extends AsyncFlatSpec {
behavior of "a feature"
it should "test 1" in { succeed }
it should "test 1" in { succeed }
}
val e = intercept[DuplicateTestNameException] {
new TestSpec
}
assert("AsyncFlatSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 6)
assert(!e.cause.isDefined)
}
it("should allow other execution context to be used") {
class TestSpec extends AsyncFlatSpec {
// SKIP-SCALATESTJS,NATIVE-START
override implicit val executionContext = scala.concurrent.ExecutionContext.Implicits.global
// SKIP-SCALATESTJS,NATIVE-END
// SCALATESTJS-ONLY override implicit val executionContext = scala.scalajs.concurrent.JSExecutionContext.runNow
val a = 1
"feature 1" should "test A" in {
Future { assert(a == 1) }
}
"feature 2" should "test B" in {
Future { assert(a == 1) }
}
"feature 3" should "test C" in {
Future { assert(a == 1) }
}
}
val suite = new TestSpec
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(reporter.scopeOpenedEventsReceived.length == 3)
assert(reporter.scopeClosedEventsReceived.length == 3)
assert(reporter.testStartingEventsReceived.length == 3)
assert(reporter.testSucceededEventsReceived.length == 3)
}
}
}
| dotty-staging/scalatest | scalatest-test/src/test/scala/org/scalatest/AsyncFlatSpecSpec.scala | Scala | apache-2.0 | 27,935 |
package io.youi.path
object ClosePath extends PathAction {
override def draw(context: Context, x: Double, y: Double, scaleX: Double, scaleY: Double): Unit = context.close()
override def toString: String = "ClosePath"
}
| outr/youi | ui/js/src/main/scala/io/youi/path/ClosePath.scala | Scala | mit | 225 |
package views.html.helper
import play.api.templates.Html
import play.api.mvc.{Call}
import play.api.{Play, Mode}
import controllers.routes
/** Make the app explicit for testing */
trait RequiresApp {
implicit val app = play.api.Play.current
}
/**
* Resolves the path to a script depending on the current environment.
* Uses uglified file (-min) in production.
*/
object mainScriptSrc extends RequiresApp {
def apply(folder: String = "javascripts", scriptName: String): String = app.mode match {
case Mode.Dev => s"${folder}/${scriptName}"
case Mode.Test => s"${folder}/${scriptName}"
case Mode.Prod => s"${folder}-min/${scriptName}"
}
}
| myclabs/CarbonDB-UI | app/views/mainScriptSrc.scala | Scala | gpl-3.0 | 661 |
package mesosphere.marathon
package integration
import mesosphere.AkkaIntegrationTest
import mesosphere.marathon.integration.facades.ITEnrichedTask
import mesosphere.marathon.integration.facades.MarathonFacade._
import mesosphere.marathon.integration.facades.MesosFacade.{ ITMesosState, ITResources }
import mesosphere.marathon.integration.setup.{ EmbeddedMarathonTest, RestResult }
import mesosphere.marathon.raml.{ App, AppUpdate }
import mesosphere.marathon.state.PathId
import scala.collection.immutable.Seq
import scala.util.Try
@IntegrationTest
class ResidentTaskIntegrationTest extends AkkaIntegrationTest with EmbeddedMarathonTest {
import Fixture._
"ResidentTaskIntegrationTest" should {
"resident task can be deployed and write to persistent volume" in new Fixture {
Given("An app that writes into a persistent volume")
val containerPath = "persistent-volume"
val app = residentApp(
id = appId("resident-task-can-be-deployed-and-write-to-persistent-volume"),
containerPath = containerPath,
cmd = s"""echo "data" > $containerPath/data""")
When("A task is launched")
val result = createAsynchronously(app)
Then("It writes successfully to the persistent volume and finishes")
waitForStatusUpdates(StatusUpdate.TASK_RUNNING)
waitForDeployment(result)
waitForStatusUpdates(StatusUpdate.TASK_FINISHED)
}
"resident task can be deployed along with constraints" in new Fixture {
// background: Reserved tasks may not be considered while making sure constraints are met, because they
// would prevent launching a task because there `is` already a task (although not launched)
Given("A resident app that uses a hostname:UNIQUE constraints")
val containerPath = "persistent-volume"
val unique = raml.Constraints("hostname" -> "UNIQUE")
val app = residentApp(
id = appId("resident-task-that-uses-hostname-unique"),
containerPath = containerPath,
cmd = """sleep 1""",
constraints = unique)
When("A task is launched")
val result = createAsynchronously(app)
Then("It it successfully launched")
waitForStatusUpdates(StatusUpdate.TASK_RUNNING)
waitForDeployment(result)
}
"persistent volume will be re-attached and keep state" in new Fixture {
Given("An app that writes into a persistent volume")
val containerPath = "persistent-volume"
val app = residentApp(
id = appId("resident-task-with-persistent-volumen-will-be-reattached-and-keep-state"),
containerPath = containerPath,
cmd = s"""echo data > $containerPath/data && sleep 1000""")
When("a task is launched")
val result = createAsynchronously(app)
Then("it successfully writes to the persistent volume and then finishes")
waitForStatusUpdates(StatusUpdate.TASK_RUNNING)
waitForDeployment(result)
And("default residency values are set")
val deployedApp = marathon.app(PathId(app.id))
val residency = deployedApp.value.app.residency.get
residency.taskLostBehavior shouldEqual raml.TaskLostBehavior.WaitForever
residency.relaunchEscalationTimeoutSeconds shouldEqual 3600L
When("the app is suspended")
suspendSuccessfully(PathId(app.id))
And("a new task is started that checks for the previously written file")
// deploy a new version that checks for the data written the above step
val update = marathon.updateApp(
PathId(app.id),
AppUpdate(
instances = Some(1),
cmd = Some(s"""test -e $containerPath/data && sleep 2""")
)
)
update should be(OK)
// we do not wait for the deployment to finish here to get the task events
waitForStatusUpdates(StatusUpdate.TASK_RUNNING)
waitForDeployment(update)
waitForStatusUpdates(StatusUpdate.TASK_FINISHED)
}
"resident task is launched completely on reserved resources" in new Fixture {
Given("A clean state of the cluster since we check reserved resources")
cleanUp()
And("A resident app")
val app = residentApp(
id = appId("resident-task-is-launched-completely-on-reserved-resources"))
When("A task is launched")
createSuccessfully(app)
Then("used and reserved resources correspond to the app")
val state: RestResult[ITMesosState] = mesos.state
withClue("used_resources") {
state.value.agents.head.usedResources should equal(itMesosResources)
}
withClue("reserved_resources") {
state.value.agents.head.reservedResourcesByRole.get("foo") should equal(Some(itMesosResources))
}
When("the app is suspended")
suspendSuccessfully(PathId(app.id))
Then("there are no used resources anymore but there are the same reserved resources")
val state2: RestResult[ITMesosState] = mesos.state
withClue("used_resources") {
state2.value.agents.head.usedResources should be(empty)
}
withClue("reserved_resources") {
state2.value.agents.head.reservedResourcesByRole.get("foo") should equal(Some(itMesosResources))
}
// we check for a blank slate of mesos reservations after each test
// TODO: Once we wait for the unreserves before finishing the StopApplication deployment step,
// we should test that here
}
"Scale Up" in new Fixture {
Given("A resident app with 0 instances")
val app = createSuccessfully(residentApp(
id = appId("scale-up-resident-app-with-zero-instances"),
instances = 0))
When("We scale up to 5 instances")
scaleToSuccessfully(PathId(app.id), 5)
Then("exactly 5 tasks have been created")
val all = allTasks(PathId(app.id))
all.count(_.launched) shouldBe 5 withClue (s"Found ${all.size}/5 tasks: ${all}")
}
"Scale Down" in new Fixture {
Given("a resident app with 5 instances")
val app = createSuccessfully(residentApp(
id = appId("scale-down-resident-app-with-five-instances"),
instances = 5))
When("we scale down to 0 instances")
suspendSuccessfully(PathId(app.id))
Then("all tasks are suspended")
val all = allTasks(PathId(app.id))
all.size shouldBe 5 withClue (s"Found ${all.size}/5 tasks: ${all}")
all.count(_.launched) shouldBe 0 withClue (s"${all.count(_.launched)} launched tasks (should be 0)")
all.count(_.suspended) shouldBe 5 withClue (s"${all.count(_.suspended)} suspended tasks (should be 5)")
}
"Restart" in new Fixture {
Given("a resident app with 5 instances")
val app = createSuccessfully(
residentApp(
id = appId("restart-resident-app-with-five-instances"),
instances = 5
)
)
val launchedTasks = allTasks(PathId(app.id))
launchedTasks should have size 5
When("we restart the app")
val newVersion = restartSuccessfully(app) withClue ("The app did not restart.")
val all = allTasks(PathId(app.id))
logger.info("tasks after relaunch: {}", all.mkString(";"))
Then("no extra task was created")
all.size shouldBe 5 withClue (s"Found ${all.size}/5 tasks: ${all}")
And("exactly 5 instances are running")
all.count(_.launched) shouldBe 5 withClue (s"${all.count(_.launched)} launched tasks (should be 5)")
And("all 5 tasks are restarted and of the new version")
all.map(_.version).forall(_.contains(newVersion)) shouldBe true withClue (s"5 launched tasks should have new version ${newVersion}: ${all}")
}
"Config Change" in new Fixture {
Given("a resident app with 5 instances")
val app = createSuccessfully(
residentApp(
id = appId("config-change-resident-app-with-five-instances"),
instances = 5
)
)
val launchedTasks = allTasks(PathId(app.id))
launchedTasks should have size 5
When("we change the config")
val newVersion = updateSuccessfully(PathId(app.id), AppUpdate(cmd = Some("sleep 1234"))).toString
val all = allTasks(PathId(app.id))
logger.info("tasks after config change: {}", all.mkString(";"))
Then("no extra task was created")
all should have size 5
And("exactly 5 instances are running")
all.filter(_.launched) should have size 5
And("all 5 tasks are of the new version")
all.map(_.version).forall(_.contains(newVersion)) shouldBe true
}
}
class Fixture {
val cpus: Double = 0.001
val mem: Double = 1.0
val disk: Double = 1.0
val gpus: Double = 0.0
val persistentVolumeSize = 2L
val itMesosResources = ITResources(
"mem" -> mem,
"cpus" -> cpus,
"disk" -> (disk + persistentVolumeSize),
"gpus" -> gpus
)
def appId(suffix: String): PathId = PathId(s"/$testBasePath/app-$suffix")
def createSuccessfully(app: App): App = {
waitForDeployment(createAsynchronously(app))
app
}
def createAsynchronously(app: App): RestResult[App] = {
val result = marathon.createAppV2(app)
result should be(Created)
extractDeploymentIds(result) should have size 1
result
}
def scaleToSuccessfully(appId: PathId, instances: Int): Seq[ITEnrichedTask] = {
val result = marathon.updateApp(appId, AppUpdate(instances = Some(instances)))
result should be(OK)
waitForDeployment(result)
waitForTasks(appId, instances)
}
def suspendSuccessfully(appId: PathId): Seq[ITEnrichedTask] = scaleToSuccessfully(appId, 0)
def updateSuccessfully(appId: PathId, update: AppUpdate): VersionString = {
val result = marathon.updateApp(appId, update)
result should be(OK)
waitForDeployment(result)
result.value.version.toString
}
def restartSuccessfully(app: App): VersionString = {
val result = marathon.restartApp(PathId(app.id))
result should be(OK)
waitForDeployment(result)
result.value.version.toString
}
def allTasks(appId: PathId): Seq[ITEnrichedTask] = {
Try(marathon.tasks(appId)).map(_.value).getOrElse(Nil)
}
def launchedTasks(appId: PathId): Seq[ITEnrichedTask] = allTasks(appId).filter(_.launched)
def suspendedTasks(appId: PathId): Seq[ITEnrichedTask] = allTasks(appId).filter(_.suspended)
}
object Fixture {
type VersionString = String
object StatusUpdate {
val TASK_FINISHED = "TASK_FINISHED"
val TASK_RUNNING = "TASK_RUNNING"
val TASK_FAILED = "TASK_FAILED"
}
/**
* Resident Tasks reside in the TaskTracker even after they terminate and after the associated app is deleted.
* To prevent spurious state in the above test cases, each test case should use a unique appId.
*/
object IdGenerator {
private[this] var index: Int = 0
def generate(): String = {
index += 1
index.toString
}
}
}
}
| guenter/marathon | src/test/scala/mesosphere/marathon/integration/ResidentTaskIntegrationTest.scala | Scala | apache-2.0 | 10,976 |
package com.gu.notificationschedule.notifications
import com.gu.notificationschedule.NotificationScheduleConfig
import com.gu.notificationschedule.cloudwatch.CloudWatchMetrics
import com.gu.notificationschedule.dynamo.NotificationsScheduleEntry
import okhttp3._
import org.apache.http.client.utils.URIBuilder
import org.apache.logging.log4j.{LogManager, Logger}
import scala.util.{Failure, Success, Try}
class RequestNotificationException(message: String) extends Exception(message)
trait RequestNotification {
def apply(nowEpoch: Long, notificationsScheduleEntry: NotificationsScheduleEntry): Try[Unit]
}
class RequestNotificationImpl(
config: NotificationScheduleConfig,
okHttpClient: OkHttpClient,
cloudWatchMetrics: CloudWatchMetrics
) extends RequestNotification {
private val logger: Logger = LogManager.getLogger(classOf[RequestNotificationImpl])
private val url = new URIBuilder(config.pushTopicsUrl).build().toURL
private val jsonMediaType = MediaType.parse("application/json; charset=utf-8")
private val authHeaderValue = s"Bearer ${config.apiKey}"
def apply(nowEpoch: Long, notificationsScheduleEntry: NotificationsScheduleEntry): Try[Unit] = {
cloudWatchMetrics.timeTry("notification-request", () =>
tryRequestNotification(notificationsScheduleEntry) match {
case Success(Some(response)) => {
try {
if (response.isSuccessful) {
logger.info("Success: request: {}\\n Got response {} ", notificationsScheduleEntry: Any, response: Any)
Success(())
}
else {
logger.warn("Unsuccessful response: request: {}\\nGot response {} ", notificationsScheduleEntry: Any, response: Any)
Failure(new RequestNotificationException(s"Unsuccessful response.\\nRequest: $notificationsScheduleEntry\\nResponse: $response"))
}
}
finally {
Option(response.body).foreach(_.close)
}
}
case Success(None) => {
logger.warn("No response: request: {}", notificationsScheduleEntry)
Failure(new RequestNotificationException(s"Missing response.\\nRequest: $notificationsScheduleEntry\\n"))
}
case Failure(t) => {
logger.warn(s"Failed request: $notificationsScheduleEntry", t)
Failure(t)
}
}
)
}
private def tryRequestNotification(notificationsScheduleEntry: NotificationsScheduleEntry): Try[Option[Response]] = Try {
Option(okHttpClient.newCall(new Request.Builder()
.url(url)
.header("Authorization", authHeaderValue)
.post(RequestBody.create(
jsonMediaType,
notificationsScheduleEntry.notification
))
.build()).execute())
}
}
| guardian/mobile-n10n | schedulelambda/src/main/scala/com/gu/notificationschedule/notifications/RequestNotification.scala | Scala | apache-2.0 | 2,864 |
/*
* Copyright 2015 Otto (GmbH & Co KG)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.flinkspector.datastream.functions
import java.util.concurrent.TimeUnit
import io.flinkspector.CoreSpec
import io.flinkspector.datastream.input.EventTimeInputBuilder
import io.flinkspector.datastream.input.time.After
import org.apache.flink.api.common.ExecutionConfig
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.typeutils.TypeExtractor
import org.apache.flink.streaming.api.functions.source.SourceFunction
import org.apache.flink.streaming.api.operators.StreamingRuntimeContext
import org.apache.flink.streaming.api.watermark.Watermark
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord
import org.mockito.Mockito._
import scala.collection.JavaConversions._
class ParallelFromStreamRecordsFunctionSpec extends CoreSpec {
val config = new ExecutionConfig()
val typeInfo: TypeInformation[StreamRecord[String]] =
TypeExtractor.getForObject(new StreamRecord[String]("t", 0))
val serializer = typeInfo.createSerializer(config)
"the source" should "produce input" in {
val input = valuesToRecords(List("1", "2", "3", "4", "5")).toList
val source = new ParallelFromStreamRecordsFunction[String](serializer, input)
val streamContext = mock[StreamingRuntimeContext]
when(streamContext.getNumberOfParallelSubtasks).thenReturn(1)
when(streamContext.getIndexOfThisSubtask).thenReturn(0)
source.setRuntimeContext(streamContext)
val ctx = mock[SourceFunction.SourceContext[String]]
when(ctx.getCheckpointLock).thenReturn(config, null)
source.run(ctx)
verify(ctx).collectWithTimestamp("1", 0)
verify(ctx).collectWithTimestamp("2", 0)
verify(ctx).collectWithTimestamp("3", 0)
verify(ctx).collectWithTimestamp("4", 0)
verify(ctx).collectWithTimestamp("5", 0)
}
it should "produce input in parallel" in {
val input = valuesToRecords(List("1", "2", "3", "4", "5")).toList
val source1 = new ParallelFromStreamRecordsFunction[String](serializer, input)
val source2 = new ParallelFromStreamRecordsFunction[String](serializer, input)
val streamContext1 = mock[StreamingRuntimeContext]
when(streamContext1.getNumberOfParallelSubtasks).thenReturn(2)
when(streamContext1.getIndexOfThisSubtask).thenReturn(0)
val streamContext2 = mock[StreamingRuntimeContext]
when(streamContext2.getNumberOfParallelSubtasks).thenReturn(2)
when(streamContext2.getIndexOfThisSubtask).thenReturn(1)
source1.setRuntimeContext(streamContext1)
source2.setRuntimeContext(streamContext2)
val ctx = mock[SourceFunction.SourceContext[String]]
when(ctx.getCheckpointLock).thenReturn(config, config)
source1.run(ctx)
source2.run(ctx)
verify(ctx).collectWithTimestamp("1", 0)
verify(ctx).collectWithTimestamp("2", 0)
verify(ctx).collectWithTimestamp("3", 0)
verify(ctx).collectWithTimestamp("4", 0)
verify(ctx).collectWithTimestamp("5", 0)
}
it should "emit continuously rising watermarks" in {
val input = EventTimeInputBuilder.startWith("1")
.emit("2", After.period(1, TimeUnit.SECONDS))
.emit("3", After.period(1, TimeUnit.SECONDS))
.emit("4", After.period(1, TimeUnit.SECONDS))
.emit("5", After.period(1, TimeUnit.SECONDS))
.emit("6", After.period(1, TimeUnit.SECONDS))
val source = new ParallelFromStreamRecordsFunction[String](serializer, input.getInput)
val streamContext = mock[StreamingRuntimeContext]
when(streamContext.getNumberOfParallelSubtasks).thenReturn(1)
when(streamContext.getIndexOfThisSubtask).thenReturn(0)
source.setRuntimeContext(streamContext)
val ctx = mock[SourceFunction.SourceContext[String]]
when(ctx.getCheckpointLock).thenReturn(config, null)
source.run(ctx)
verify(ctx).collectWithTimestamp("1", 0)
verify(ctx).collectWithTimestamp("2", 1000)
verify(ctx).collectWithTimestamp("3", 2000)
verify(ctx).collectWithTimestamp("4", 3000)
verify(ctx).collectWithTimestamp("5", 4000)
verify(ctx).emitWatermark(new Watermark(1000))
verify(ctx).emitWatermark(new Watermark(2000))
verify(ctx).emitWatermark(new Watermark(3000))
verify(ctx).emitWatermark(new Watermark(4000))
verify(ctx).emitWatermark(new Watermark(5000))
}
it should "emit continously rising watermarks with MaxValue" in {
val input = EventTimeInputBuilder.startWith("1")
.emit("2", After.period(1, TimeUnit.SECONDS))
.emit("3", After.period(1, TimeUnit.SECONDS))
.emit("4", After.period(1, TimeUnit.SECONDS))
.emit("5", After.period(1, TimeUnit.SECONDS))
.emit("6", After.period(1, TimeUnit.SECONDS))
.flushOpenWindowsOnTermination()
val source = new ParallelFromStreamRecordsFunction[String](serializer, input.getInput, true)
val streamContext = mock[StreamingRuntimeContext]
when(streamContext.getNumberOfParallelSubtasks).thenReturn(1)
when(streamContext.getIndexOfThisSubtask).thenReturn(0)
source.setRuntimeContext(streamContext)
val ctx = mock[SourceFunction.SourceContext[String]]
when(ctx.getCheckpointLock).thenReturn(config, null)
source.run(ctx)
verify(ctx).collectWithTimestamp("1", 0)
verify(ctx).collectWithTimestamp("2", 1000)
verify(ctx).collectWithTimestamp("3", 2000)
verify(ctx).collectWithTimestamp("4", 3000)
verify(ctx).collectWithTimestamp("5", 4000)
verify(ctx).collectWithTimestamp("6", 5000)
verify(ctx).emitWatermark(new Watermark(1000))
verify(ctx).emitWatermark(new Watermark(2000))
verify(ctx).emitWatermark(new Watermark(3000))
verify(ctx).emitWatermark(new Watermark(4000))
verify(ctx).emitWatermark(new Watermark(Long.MaxValue))
}
}
| ottogroup/flink-spector | flinkspector-datastream/src/test/scala/io/flinkspector/datastream/functions/ParallelFromStreamRecordsFunctionSpec.scala | Scala | apache-2.0 | 6,274 |
/*
* Copyright 2007-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package record
package field
import scala.xml._
import net.liftweb.util._
import net.liftweb.common._
import net.liftweb.proto._
import net.liftweb.http.{S}
import java.util.regex._
import Helpers._
import S._
object EmailField {
def emailPattern = ProtoRules.emailRegexPattern.vend
def validEmailAddr_?(email: String): Boolean = emailPattern.matcher(email).matches
}
trait EmailTypedField extends TypedField[String] {
private def validateEmail(emailValue: ValueType): List[FieldError] = {
toBoxMyType(emailValue) match {
case Full(email) if (optional_? && email.isEmpty) => Nil
case Full(email) if EmailField.validEmailAddr_?(email) => Nil
case _ => Text(S.?("invalid.email.address"))
}
}
override def validations = validateEmail _ :: Nil
}
class EmailField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) owner: OwnerType, maxLength: Int)
extends StringField[OwnerType](owner, maxLength) with EmailTypedField
class OptionalEmailField[OwnerType <: Record[OwnerType]](@deprecatedName('rec) owner: OwnerType, maxLength: Int)
extends OptionalStringField[OwnerType](owner, maxLength) with EmailTypedField
| lift/framework | persistence/record/src/main/scala/net/liftweb/record/field/EmailField.scala | Scala | apache-2.0 | 1,793 |
/*
* Copyright 2017-2018 47 Degrees, LLC. <http://www.47deg.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package freestyle.free
import _root_.fs2._
import _root_.fs2.util.{Attempt, Catchable, Free, Monad, Suspendable}
import cats.{~>, MonadError, Monoid}
import cats.free.{Free => CFree}
object fs2 {
type Eff[A] = Free[CFree[Attempt, ?], A]
implicit val catsFreeAttemptCatchable: Catchable[CFree[Attempt, ?]] =
new Catchable[CFree[Attempt, ?]] {
def pure[A](a: A): CFree[Attempt, A] = CFree.pure(a)
def attempt[A](fa: CFree[Attempt, A]): CFree[Attempt, Attempt[A]] = fa.map(Attempt(_))
def fail[A](err: Throwable): CFree[Attempt, A] = CFree.liftF(Left(err))
def flatMap[A, B](a: CFree[Attempt, A])(f: A => CFree[Attempt, B]): CFree[Attempt, B] =
a.flatMap(f)
}
private[fs2] sealed class EffMonad extends Monad[Eff] {
def pure[A](a: A): Eff[A] = Free.pure(a)
def flatMap[A, B](a: Eff[A])(f: A => Eff[B]): Eff[B] = a.flatMap(f)
}
implicit val effCatchable: Catchable[Eff] = new EffMonad with Catchable[Eff] {
def attempt[A](fa: Eff[A]): Eff[Attempt[A]] = fa.attempt
def fail[A](err: Throwable): Eff[A] = Free.fail(err)
}
implicit val effSuspendable: Suspendable[Eff] = new EffMonad with Suspendable[Eff] {
def suspend[A](fa: => Eff[A]): Eff[A] = fa
}
@free sealed trait StreamM {
def run[A](s: Stream[Eff, A]): FS[Unit]
def runLog[A](s: Stream[Eff, A]): FS[Vector[A]]
def runFold[A, B](z: B, f: (B, A) => B)(s: Stream[Eff, A]): FS[B]
def runLast[A](s: Stream[Eff, A]): FS[Option[A]]
}
trait Implicits {
implicit def freeStyleFs2StreamHandler[F[_]](
implicit ME: MonadError[F, Throwable]
): StreamM.Handler[F] = {
val attemptF = λ[Attempt ~> F](_.fold(ME.raiseError, ME.pure))
new StreamM.Handler[F] {
def run[A](s: Stream[Eff, A]): F[Unit] =
s.run.run.foldMap(attemptF)
def runLog[A](s: Stream[Eff, A]): F[Vector[A]] =
s.runLog.run.foldMap(attemptF)
def runFold[A, B](z: B, f: (B, A) => B, s: Stream[Eff, A]): F[B] =
s.runFold(z)(f).run.foldMap(attemptF)
def runLast[A](s: Stream[Eff, A]): F[Option[A]] =
s.runLast.run.foldMap(attemptF)
}
}
}
object implicits extends Implicits {
implicit class Fs2FreeSyntax[A](private val s: Stream[Eff, A]) extends AnyVal {
def liftFS[F[_]](implicit MA: Monoid[A], SF: StreamM[F]): FreeS[F, A] =
liftFSPar.freeS
def liftFSPar[F[_]](implicit MA: Monoid[A], SF: StreamM[F]): FreeS.Par[F, A] =
SF.runFold(MA.empty, MA.combine)(s)
}
}
}
| frees-io/freestyle | modules/integrations/fs2/shared/src/main/scala/free/fs2.scala | Scala | apache-2.0 | 3,242 |
package controllers
import akka.stream.scaladsl.{Source,Sink}
import akka.util.ByteString
import org.specs2.mock.Mockito
import org.specs2.specification.Scope
import scala.concurrent.Future
import com.overviewdocs.blobstorage.BlobStorage
import com.overviewdocs.models.BlobStorageRef
import com.overviewdocs.test.factories.{PodoFactory=>factory}
import controllers.backend.{DocumentSetFileBackend,File2Backend}
class DocumentSetFileControllerSpec extends ControllerSpecification with Mockito {
trait BaseScope extends Scope {
val mockDocumentSetFileBackend = mock[DocumentSetFileBackend]
val mockFile2Backend = smartMock[File2Backend]
val mockBlobStorage = smartMock[BlobStorage]
val controller = new DocumentSetFileController(mockDocumentSetFileBackend, mockFile2Backend, mockBlobStorage, fakeControllerComponents)
}
"#head" should {
trait HeadScope extends BaseScope {
val documentSetId = 123L
val sha1 = Array(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19).map(_.toByte)
lazy val result = controller.head(documentSetId, sha1)(fakeAuthorizedRequest)
}
"return 204 No Content when a match exists" in new HeadScope {
mockDocumentSetFileBackend.existsByIdAndSha1(documentSetId, sha1) returns Future.successful(true)
h.status(result) must beEqualTo(h.NO_CONTENT)
}
"return 404 Not Found when no match exists" in new HeadScope {
mockDocumentSetFileBackend.existsByIdAndSha1(documentSetId, sha1) returns Future.successful(false)
h.status(result) must beEqualTo(h.NOT_FOUND)
}
}
"#show" should {
trait ShowScope extends BaseScope {
val documentSetId = 123L
val file2 = factory.file2(filename="a file.doc", contentType="application/content-type", blob=Some(BlobStorageRef("s3:hi:there", 8)))
lazy val result = controller.show(documentSetId, file2.id)(fakeAuthorizedRequest)
def mockBytes(location: String, s: String): Unit = {
mockBlobStorage.get(location) returns Source.single(ByteString(s.getBytes("utf-8")))
}
}
"return 404 Not Found when DocumentSetFile2 does not exist" in new ShowScope {
mockDocumentSetFileBackend.existsForRoot(documentSetId, file2.id) returns Future.successful(false)
h.status(result) must beEqualTo(h.NOT_FOUND)
}
"return data" in new ShowScope {
mockDocumentSetFileBackend.existsForRoot(documentSetId, file2.id) returns Future.successful(true)
mockFile2Backend.lookup(file2.id) returns Future.successful(Some(file2))
mockBytes("s3:hi:there", "hi there")
h.contentType(result) must beSome("application/content-type")
h.header(h.CONTENT_DISPOSITION, result) must beSome("attachment; filename*=UTF-8''a%20file.doc")
h.contentAsString(result) must beEqualTo("hi there")
}
}
}
| overview/overview-server | web/test/controllers/DocumentSetFileControllerSpec.scala | Scala | agpl-3.0 | 2,831 |
/*
* Copyright (c) 2014. Regents of the University of California
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.berkeley.cs.amplab.avocado.preprocessing
import org.apache.commons.configuration.SubnodeConfiguration
import org.apache.spark.rdd.RDD
import edu.berkeley.cs.amplab.adam.avro.ADAMRecord
import edu.berkeley.cs.amplab.adam.rdd.AdamContext._
object MarkDuplicates extends PreprocessingStage {
val stageName = "markDuplicates"
def apply (rdd: RDD[ADAMRecord], config: SubnodeConfiguration): RDD[ADAMRecord] = {
// no configuration needed, simply call mark duplicates
rdd.adamMarkDuplicates()
}
}
| fnothaft/avocado | avocado-core/src/main/scala/edu/berkeley/cs/amplab/avocado/preprocessing/MarkDuplicates.scala | Scala | apache-2.0 | 1,153 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.