code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
package epic.sequences
import breeze.util.Index
import breeze.numerics
import breeze.linalg.{argmax, softmax, DenseVector}
import epic.framework._
import java.util
import collection.mutable.ArrayBuffer
import breeze.features.FeatureVector
import epic.constraints.{LabeledSpanConstraints, TagConstraints}
import epic.util.{Optional, CacheBroker}
import breeze.optimize.FirstOrderMinimizer.OptParams
import breeze.optimize.CachedBatchDiffFunction
import epic.features.WordFeaturizer
/**
* A Linear Chain Conditional Random Field. Useful for POS tagging, etc.
*
* As usual in Epic, all the heavy lifting is done in the companion object and Marginals.
*
* CRFs can produce [[epic.sequences.TaggedSequence]] from an input sequence of words.
* They can also produce marginals, etc.
* @author dlwh
*/
@SerialVersionUID(1L)
trait CRF[L, W] extends Serializable {
def labelIndex : Index[L]
def startSymbol: L
def anchor(w: IndexedSeq[W]):CRF.Anchoring[L, W]
def marginal(w: IndexedSeq[W]) = {
CRF.Marginal(anchor(w))
}
def goldMarginal(tags: IndexedSeq[L], w: IndexedSeq[W]):CRF.Marginal[L, W] = {
CRF.Marginal.goldMarginal(anchor(w), tags)
}
def bestSequence(w: IndexedSeq[W], id: String = ""): TaggedSequence[L, W] = {
CRF.viterbi(anchor(w), id)
}
}
object CRF {
/**
* Builds a CRF from a corpus of TaggedSequences using reasonable defaults.
* @param data
* @param startSymbol
* @param gazetteer
* @param opt
* @tparam L
* @return
*/
def buildSimple[L](data: IndexedSeq[TaggedSequence[L, String]],
startSymbol: L,
gazetteer: Gazetteer[Any, String] = Gazetteer.empty[Any, String],
wordFeaturizer: Optional[WordFeaturizer[String]] = None,
transitionFeaturizer: Optional[WordFeaturizer[String]] = None,
opt: OptParams = OptParams(),
hashFeatures: Double = 1.0)(implicit cache: CacheBroker = CacheBroker()):CRF[L, String] = {
val model: CRFModel[L, String] = new TaggedSequenceModelFactory[L](startSymbol, gazetteer = gazetteer, wordFeaturizer = wordFeaturizer, transitionFeaturizer = transitionFeaturizer, hashFeatureScale = hashFeatures).makeModel(data)
val obj = new ModelObjective(model, data)
val cached = new CachedBatchDiffFunction(obj)
val weights = opt.minimize(cached, obj.initialWeightVector(randomize = false))
model.extractCRF(weights)
}
def buildIOModel[L](data: IndexedSeq[TaggedSequence[L, String]],
outsideSymbol: L,
gazetteer: Gazetteer[Any, String] = Gazetteer.empty[Any, String],
opt: OptParams = OptParams()): CRF[Boolean, String] = {
val fixedData: IndexedSeq[TaggedSequence[Boolean, String]] = data.map{s =>
s.copy(tags=s.tags.map{l => (l != outsideSymbol)})
}
buildSimple(fixedData, false, gazetteer, opt = opt)
}
trait Anchoring[L, W] extends TagConstraints[L] {
def words : IndexedSeq[W]
def length: Int = words.length
def scoreTransition(pos: Int, prev: Int, cur: Int):Double
def labelIndex: Index[L]
def startSymbol: L
def validSymbols(pos: Int): Set[Int]
override def allowedTags(pos: Int): Set[Int] = validSymbols(pos)
def *(other: Anchoring[L, W]):Anchoring[L, W] = {
(this, other) match {
case (x: IdentityAnchoring[L, W], _) => other
case (_, x: IdentityAnchoring[L, W]) => this
case (x, y) => new ProductAnchoring(this, other)
}
}
}
trait Marginal[L, W] extends VisitableMarginal[TransitionVisitor[L, W]] {
def anchoring: Anchoring[L, W]
def words: IndexedSeq[W] = anchoring.words
def length: Int = anchoring.length
/** Visits spans with non-zero score, useful for expected counts */
def visit( f: TransitionVisitor[L, W])
/** normalized probability of seeing segment with transition */
def transitionMarginal(pos: Int, prev:Int, cur: Int):Double
def logPartition: Double
def positionMarginal(pos: Int, label: L):Double = positionMarginal(pos, anchoring.labelIndex(label))
def positionMarginal(pos: Int):DenseVector[Double] = DenseVector.tabulate(anchoring.labelIndex.size)(positionMarginal(pos, _))
def positionMarginal(pos: Int, label: Int):Double = {
var prev = 0
val numLabels: Int = anchoring.labelIndex.size
var sum = 0.0
while(prev < numLabels) {
sum += transitionMarginal(pos, prev, label)
prev += 1
}
sum
}
}
object Marginal {
def apply[L, W](scorer: Anchoring[L, W]):Marginal[L, W] = {
val forwardScores: Array[Array[Double]] = this.forwardScores(scorer)
val backwardScore: Array[Array[Double]] = this.backwardScores(scorer)
val partition = softmax(forwardScores.last)
val _s = scorer
new Marginal[L, W] {
def anchoring: Anchoring[L, W] = _s
def visit(f: TransitionVisitor[L, W]) {
val numLabels = scorer.labelIndex.size
var pos = 0
while (pos < length) {
var label = 0
while (label < numLabels) {
if(!backwardScore(pos+1)(label).isInfinite) {
var prevLabel = 0
while (prevLabel < numLabels) {
val score = transitionMarginal(pos, prevLabel, label)
if(score != 0.0)
f(pos, prevLabel, label, score)
prevLabel += 1
}
}
label += 1
}
pos += 1
}
}
/** Log-normalized probability of seing segment with transition */
def transitionMarginal(pos: Int, prev: Int, cur: Int): Double = {
val withoutTrans = forwardScores(pos)(prev) + backwardScore(pos+1)(cur)
if(withoutTrans.isInfinite) 0.0
else math.exp(withoutTrans + anchoring.scoreTransition(pos, prev, cur) - logPartition)
}
def logPartition: Double = partition
// println(words + " " + partition)
}
}
def goldMarginal[L, W](scorer: Anchoring[L, W], tags: IndexedSeq[L]):Marginal[L, W] = {
var lastSymbol = scorer.labelIndex(scorer.startSymbol)
var score = 0.0
for( (l, pos) <- tags.zipWithIndex) {
val symbol = scorer.labelIndex(l)
assert(symbol != -1, s"$l not in index: ${scorer.labelIndex}")
score += scorer.scoreTransition(pos, lastSymbol, symbol)
lastSymbol = symbol
}
val s = scorer
new Marginal[L, W] {
def anchoring: Anchoring[L, W] = s
def visit(f: TransitionVisitor[L, W]) {
var lastSymbol = scorer.labelIndex(scorer.startSymbol)
for( (l,pos) <- tags.zipWithIndex) {
val symbol = scorer.labelIndex(l)
f.apply(pos, lastSymbol, symbol, 1.0)
lastSymbol = symbol
}
}
val indexedSymbols = scorer.labelIndex(scorer.startSymbol) +: tags.map(scorer.labelIndex(_))
def transitionMarginal(pos: Int, prev: Int, cur: Int): Double = {
numerics.I(prev == indexedSymbols(pos) && cur == indexedSymbols(pos + 1))
}
def logPartition: Double = score
}
}
/**
*
* @param scorer
* @return forwardScore(end position)(label) = forward score of ending a segment labeled label in position end position
*/
private def forwardScores[L, W](scorer: CRF.Anchoring[L, W]): Array[Array[Double]] = {
val length = scorer.length
val numLabels = scorer.labelIndex.size
// total weight (logSum) for reaching the fence post before position i with label l. i.e. forward(0) is the start state
val forwardScores = Array.fill(length+1, numLabels)(Double.NegativeInfinity)
forwardScores(0)(scorer.labelIndex(scorer.startSymbol)) = 0.0
val cache = new Array[Double](numLabels * length)
// forward
for(i <- 0 until length) {
val cur = forwardScores(i+1)
for ( next <- scorer.validSymbols(i)) {
var offset = 0
for ( previous <- if(i == 0) IndexedSeq(scorer.labelIndex(scorer.startSymbol)) else scorer.validSymbols(i-1)) {
val score = scorer.scoreTransition(i, previous, next) + forwardScores(i)(previous)
if(score != Double.NegativeInfinity) {
cache(offset) = score
offset += 1
}
}
cur(next) = softmax.array(cache, offset)
}
}
forwardScores
}
/**
* computes the sum of all completions of derivations, starting with label l at pos.
* at the end of the sequence
* @param scorer
* @tparam L
* @tparam W
* @return backwardScore(pos)(label)
*/
private def backwardScores[L, W](scorer: CRF.Anchoring[L, W]): Array[Array[Double]] = {
val length = scorer.length
val numLabels = scorer.labelIndex.size
// total completion weight (logSum) for being in state l at fencepost i + 1,
val backwardScores = Array.fill(length+1, numLabels)(Double.NegativeInfinity)
util.Arrays.fill(backwardScores(length), 0.0)
val accumArray = new Array[Double](numLabels)
for(i <- (length-1) until 0 by -1) {
val cur = backwardScores(i)
for ( curLabel <- scorer.validSymbols(i-1)) {
var offset = 0
for( next <- scorer.validSymbols(i)) {
val nextScore = backwardScores(i+1)(next)
val score = scorer.scoreTransition(i, curLabel, next) + nextScore
if(score != Double.NegativeInfinity) {
accumArray(offset) = score
offset += 1
}
}
cur(curLabel) = softmax(new DenseVector(accumArray, 0, 1, offset))
}
}
backwardScores
}
}
trait TransitionVisitor[L, W] {
def apply(pos: Int, prev: Int, cur: Int, count: Double)
}
trait IndexedFeaturizer[L, W] {
def anchor(w: IndexedSeq[W]):AnchoredFeaturizer[L, W]
def startSymbol: L
def labelIndex: Index[L]
def featureIndex: Index[Feature]
}
trait AnchoredFeaturizer[L, W] {
def featureIndex: Index[Feature]
def featuresForTransition(pos: Int, prev: Int, cur: Int):FeatureVector
def validSymbols(pos: Int):Set[Int]
}
def viterbi[L, W](scorer: Anchoring[L ,W], id: String=""):TaggedSequence[L, W] = {
val length = scorer.length
val numLabels = scorer.labelIndex.size
// total weight (logSum) for reaching the fence post before position i with label l. i.e. forward(0) is the start state
val forwardScores = Array.fill(length+1, numLabels)(Double.NegativeInfinity)
forwardScores(0)(scorer.labelIndex(scorer.startSymbol)) = 0.0
val backPointer = Array.fill(length, numLabels)(-1)
// forward
for(i <- 0 until length) {
val cur = forwardScores(i+1)
for ( next <- scorer.validSymbols(i)) {
var currentMax = Double.NegativeInfinity
var currentArgMax = -1
for ( previous <- scorer.validSymbols(i-1)) {
val score = scorer.scoreTransition(i, previous, next) + forwardScores(i)(previous)
if(score > currentMax) {
currentMax = score
currentArgMax = previous
}
}
assert(!currentMax.isNaN)
assert(!currentMax.isInfinite)
cur(next) = currentMax
backPointer(i)(next) = currentArgMax
}
}
val tags = ArrayBuffer[L]()
def rec(end: Int, label: Int) {
tags += scorer.labelIndex.get(label)
if(end > 0) {
val bestCurrentLabel = backPointer(end)(label)
rec(end-1, bestCurrentLabel)
}
}
rec(length-1, (0 until numLabels).maxBy(forwardScores(length)(_)))
TaggedSequence(tags.reverse, scorer.words, id)
}
def posteriorDecode[L, W](m: Marginal[L, W], id: String = "") = {
val length = m.length
val labels = (0 until length).map(pos => (0 until m.anchoring.labelIndex.size).maxBy(m.positionMarginal(pos, _)))
TaggedSequence(labels.map(m.anchoring.labelIndex.get), m.words, id)
}
case class ProductAnchoring[L, W](a: Anchoring[L ,W], b: Anchoring[L, W]) extends Anchoring[L, W] {
if((a.labelIndex ne b.labelIndex) && (a.labelIndex != b.labelIndex)) throw new IllegalArgumentException("Elements of product anchoring must have the same labelIndex!")
if(a.startSymbol != b.startSymbol) throw new IllegalArgumentException("Elements of product anchoring must have the same startSymbol!")
def words: IndexedSeq[W] = a.words
def scoreTransition(i: Int, prev: Int, cur: Int): Double = {
var score = a.scoreTransition(i, prev, cur)
if (score != Double.NegativeInfinity) {
score += b.scoreTransition(i, prev, cur)
}
score
}
def validSymbols(pos: Int): Set[Int] = a.validSymbols(pos)
def labelIndex: Index[L] = a.labelIndex
def startSymbol: L = a.startSymbol
}
class IdentityAnchoring[L, W](val words: IndexedSeq[W], val validSyms: IndexedSeq[Set[Int]], val labelIndex: Index[L], val startSymbol: L) extends Anchoring[L, W] {
def scoreTransition(pos: Int, prev: Int, cur: Int): Double = 0.0
def validSymbols(pos: Int): Set[Int] = validSyms(pos)
def canStartLongSegment(pos: Int): Boolean = true
}
}
| followyourheart/epic | src/main/scala/epic/sequences/CRF.scala | Scala | apache-2.0 | 13,322 |
package io.getquill.context.jdbc.sqlite
import io.getquill.{ JdbcContext, Literal, SqliteDialect, TestEntities }
import io.getquill.context.jdbc.TestEncoders
import io.getquill.context.sql.TestDecoders
object testContext extends JdbcContext[SqliteDialect, Literal]("testSqliteDB") with TestEntities with TestEncoders with TestDecoders
| jcranky/quill | quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/TestContext.scala | Scala | apache-2.0 | 337 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import org.apache.spark.internal.Logging
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.catalog.BucketSpec
import org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.planning.ScanOperation
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.{FileSourceScanExec, SparkPlan}
import org.apache.spark.sql.execution.datasources.FileFormat.METADATA_NAME
import org.apache.spark.sql.types.{DoubleType, FloatType, StructType}
import org.apache.spark.util.collection.BitSet
/**
* A strategy for planning scans over collections of files that might be partitioned or bucketed
* by user specified columns.
*
* At a high level planning occurs in several phases:
* - Split filters by when they need to be evaluated.
* - Prune the schema of the data requested based on any projections present. Today this pruning
* is only done on top level columns, but formats should support pruning of nested columns as
* well.
* - Construct a reader function by passing filters and the schema into the FileFormat.
* - Using a partition pruning predicates, enumerate the list of files that should be read.
* - Split the files into tasks and construct a FileScanRDD.
* - Add any projection or filters that must be evaluated after the scan.
*
* Files are assigned into tasks using the following algorithm:
* - If the table is bucketed, group files by bucket id into the correct number of partitions.
* - If the table is not bucketed or bucketing is turned off:
* - If any file is larger than the threshold, split it into pieces based on that threshold
* - Sort the files by decreasing file size.
* - Assign the ordered files to buckets using the following algorithm. If the current partition
* is under the threshold with the addition of the next file, add it. If not, open a new bucket
* and add it. Proceed to the next file.
*/
object FileSourceStrategy extends Strategy with PredicateHelper with Logging {
// should prune buckets iff num buckets is greater than 1 and there is only one bucket column
private def shouldPruneBuckets(bucketSpec: Option[BucketSpec]): Boolean = {
bucketSpec match {
case Some(spec) => spec.bucketColumnNames.length == 1 && spec.numBuckets > 1
case None => false
}
}
private def getExpressionBuckets(
expr: Expression,
bucketColumnName: String,
numBuckets: Int): BitSet = {
def getBucketNumber(attr: Attribute, v: Any): Int = {
BucketingUtils.getBucketIdFromValue(attr, numBuckets, v)
}
def getBucketSetFromIterable(attr: Attribute, iter: Iterable[Any]): BitSet = {
val matchedBuckets = new BitSet(numBuckets)
iter
.map(v => getBucketNumber(attr, v))
.foreach(bucketNum => matchedBuckets.set(bucketNum))
matchedBuckets
}
def getBucketSetFromValue(attr: Attribute, v: Any): BitSet = {
val matchedBuckets = new BitSet(numBuckets)
matchedBuckets.set(getBucketNumber(attr, v))
matchedBuckets
}
expr match {
case expressions.Equality(a: Attribute, Literal(v, _)) if a.name == bucketColumnName =>
getBucketSetFromValue(a, v)
case expressions.In(a: Attribute, list)
if list.forall(_.isInstanceOf[Literal]) && a.name == bucketColumnName =>
getBucketSetFromIterable(a, list.map(e => e.eval(EmptyRow)))
case expressions.InSet(a: Attribute, hset) if a.name == bucketColumnName =>
getBucketSetFromIterable(a, hset)
case expressions.IsNull(a: Attribute) if a.name == bucketColumnName =>
getBucketSetFromValue(a, null)
case expressions.IsNaN(a: Attribute)
if a.name == bucketColumnName && a.dataType == FloatType =>
getBucketSetFromValue(a, Float.NaN)
case expressions.IsNaN(a: Attribute)
if a.name == bucketColumnName && a.dataType == DoubleType =>
getBucketSetFromValue(a, Double.NaN)
case expressions.And(left, right) =>
getExpressionBuckets(left, bucketColumnName, numBuckets) &
getExpressionBuckets(right, bucketColumnName, numBuckets)
case expressions.Or(left, right) =>
getExpressionBuckets(left, bucketColumnName, numBuckets) |
getExpressionBuckets(right, bucketColumnName, numBuckets)
case _ =>
val matchedBuckets = new BitSet(numBuckets)
matchedBuckets.setUntil(numBuckets)
matchedBuckets
}
}
private def genBucketSet(
normalizedFilters: Seq[Expression],
bucketSpec: BucketSpec): Option[BitSet] = {
if (normalizedFilters.isEmpty) {
return None
}
val bucketColumnName = bucketSpec.bucketColumnNames.head
val numBuckets = bucketSpec.numBuckets
val normalizedFiltersAndExpr = normalizedFilters
.reduce(expressions.And)
val matchedBuckets = getExpressionBuckets(normalizedFiltersAndExpr, bucketColumnName,
numBuckets)
val numBucketsSelected = matchedBuckets.cardinality()
logInfo {
s"Pruned ${numBuckets - numBucketsSelected} out of $numBuckets buckets."
}
// None means all the buckets need to be scanned
if (numBucketsSelected == numBuckets) {
None
} else {
Some(matchedBuckets)
}
}
def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case ScanOperation(projects, filters,
l @ LogicalRelation(fsRelation: HadoopFsRelation, _, table, _)) =>
// Filters on this relation fall into four categories based on where we can use them to avoid
// reading unneeded data:
// - partition keys only - used to prune directories to read
// - bucket keys only - optionally used to prune files to read
// - keys stored in the data only - optionally used to skip groups of data in files
// - filters that need to be evaluated again after the scan
val filterSet = ExpressionSet(filters)
val normalizedFilters = DataSourceStrategy.normalizeExprs(
filters.filter(_.deterministic), l.output)
val partitionColumns =
l.resolve(
fsRelation.partitionSchema, fsRelation.sparkSession.sessionState.analyzer.resolver)
val partitionSet = AttributeSet(partitionColumns)
// this partitionKeyFilters should be the same with the ones being executed in
// PruneFileSourcePartitions
val partitionKeyFilters = DataSourceStrategy.getPushedDownFilters(partitionColumns,
normalizedFilters)
// subquery expressions are filtered out because they can't be used to prune buckets or pushed
// down as data filters, yet they would be executed
val normalizedFiltersWithoutSubqueries =
normalizedFilters.filterNot(SubqueryExpression.hasSubquery)
val bucketSpec: Option[BucketSpec] = fsRelation.bucketSpec
val bucketSet = if (shouldPruneBuckets(bucketSpec)) {
genBucketSet(normalizedFiltersWithoutSubqueries, bucketSpec.get)
} else {
None
}
val dataColumns =
l.resolve(fsRelation.dataSchema, fsRelation.sparkSession.sessionState.analyzer.resolver)
// Partition keys are not available in the statistics of the files.
// `dataColumns` might have partition columns, we need to filter them out.
val dataColumnsWithoutPartitionCols = dataColumns.filterNot(partitionColumns.contains)
val dataFilters = normalizedFiltersWithoutSubqueries.flatMap { f =>
if (f.references.intersect(partitionSet).nonEmpty) {
extractPredicatesWithinOutputSet(f, AttributeSet(dataColumnsWithoutPartitionCols))
} else {
Some(f)
}
}
val supportNestedPredicatePushdown =
DataSourceUtils.supportNestedPredicatePushdown(fsRelation)
val pushedFilters = dataFilters
.flatMap(DataSourceStrategy.translateFilter(_, supportNestedPredicatePushdown))
logInfo(s"Pushed Filters: ${pushedFilters.mkString(",")}")
// Predicates with both partition keys and attributes need to be evaluated after the scan.
val afterScanFilters = filterSet -- partitionKeyFilters.filter(_.references.nonEmpty)
logInfo(s"Post-Scan Filters: ${afterScanFilters.mkString(",")}")
val filterAttributes = AttributeSet(afterScanFilters)
val requiredExpressions: Seq[NamedExpression] = filterAttributes.toSeq ++ projects
val requiredAttributes = AttributeSet(requiredExpressions)
val readDataColumns =
dataColumns
.filter(requiredAttributes.contains)
.filterNot(partitionColumns.contains)
val outputSchema = readDataColumns.toStructType
logInfo(s"Output Data Schema: ${outputSchema.simpleString(5)}")
val metadataStructOpt = requiredAttributes.collectFirst {
case MetadataAttribute(attr) => attr
}
// TODO (yaohua): should be able to prune the metadata struct only containing what needed
val metadataColumns = metadataStructOpt.map { metadataStruct =>
metadataStruct.dataType.asInstanceOf[StructType].fields.map { field =>
MetadataAttribute(field.name, field.dataType)
}.toSeq
}.getOrElse(Seq.empty)
// outputAttributes should also include the metadata columns at the very end
val outputAttributes = readDataColumns ++ partitionColumns ++ metadataColumns
val scan =
FileSourceScanExec(
fsRelation,
outputAttributes,
outputSchema,
partitionKeyFilters.toSeq,
bucketSet,
None,
dataFilters,
table.map(_.identifier))
// extra Project node: wrap flat metadata columns to a metadata struct
val withMetadataProjections = metadataStructOpt.map { metadataStruct =>
val metadataAlias =
Alias(CreateStruct(metadataColumns), METADATA_NAME)(exprId = metadataStruct.exprId)
execution.ProjectExec(
scan.output.dropRight(metadataColumns.length) :+ metadataAlias, scan)
}.getOrElse(scan)
val afterScanFilter = afterScanFilters.toSeq.reduceOption(expressions.And)
val withFilter = afterScanFilter
.map(execution.FilterExec(_, withMetadataProjections))
.getOrElse(withMetadataProjections)
val withProjections = if (projects == withFilter.output) {
withFilter
} else {
execution.ProjectExec(projects, withFilter)
}
withProjections :: Nil
case _ => Nil
}
}
| holdenk/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FileSourceStrategy.scala | Scala | apache-2.0 | 11,322 |
package com.eevolution.context.dictionary.infrastructure.repository
import com.eevolution.context.dictionary.domain.model.PackageImportDetail
import com.eevolution.context.dictionary.infrastructure.db.DbContext._
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: emeris.hernandez@e-evolution.com, http://www.e-evolution.com , http://github.com/EmerisScala
* Created by emeris.hernandez@e-evolution.com , www.e-evolution.com on 26/10/17.
*/
/**
* Package Import Detail Mapping
*/
trait PackageImportDetailMapping {
val queryPackageImportDetail = quote {
querySchema[PackageImportDetail]("AD_Package_Imp_Detail",
_.packageImportDetailId-> "AD_Package_Imp_Detail_ID",
_.tenantId-> "AD_Client_ID",
_.organizationId-> "AD_Org_ID",
_.isActive-> "IsActive",
_.created-> "Created",
_.createdBy-> "CreatedBy",
_.updated-> "Updated",
_.updatedBy-> "UpdatedBy",
_.name-> "Name",
_.packageImportId-> "AD_Package_Imp_ID",
_.originalId-> "AD_Original_ID",
_.backupId-> "AD_Backup_ID",
_.action-> "Action",
_.success-> "Success",
_.typePackage-> "Type",
_.tableName-> "TableName",
_.entityId-> "AD_Table_ID",
_.uninstall-> "Uninstall",
_.uuid-> "UUID")
}
}
| adempiere/ADReactiveSystem | dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/repository/PackageImportDetailMapping.scala | Scala | gpl-3.0 | 1,978 |
/*
* Copyright 2015 Webtrends (http://www.webtrends.com)
*
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webtrends.harness.component.socko
import java.net.{HttpURLConnection, URL}
import akka.actor.ActorRef
import akka.util.Timeout
import com.webtrends.harness.command.AddCommand
import com.webtrends.harness.component.socko.client._
import com.webtrends.harness.component.socko.command.TestCommand
import com.webtrends.harness.service.test.TestHarness
import org.junit.runner.RunWith
import org.specs2.runner.JUnitRunner
import scala.concurrent.Await
import scala.concurrent.duration._
/**
* Tests the functionality around the Commands and the mixin socko traits
*
* @author Michael Cuthbert on 2/2/15.
*/
@RunWith(classOf[JUnitRunner])
class CommandSpec extends SockoTestBase {
var sockoClient:Option[ActorRef] = None
val commandManager = TestHarness.harness.get.commandManager.get
commandManager ! AddCommand("TestCommand", classOf[TestCommand])
// wait a second for the command to be registered in the CommandManager
Thread.sleep(1000)
sequential
def getClient : ActorRef = {
sockoClient match {
case Some(sc) => sc
case None =>
val d = Duration(2L, "seconds")
implicit val timeout = Timeout(d)
val a = actorSystem.actorSelection(sockoManager.path.toStringWithoutAddress + "/" + SockoClient.SockoClientName).resolveOne()
sockoClient = Some(Await.result(a, d))
sockoClient.get
}
}
"Commands with Socko Mixin Traits" should {
"handle a GET request" in {
val url = new URL("http://127.0.0.1:8080/_wt_internal/testcommand")
val conn = url.openConnection().asInstanceOf[HttpURLConnection]
conn.setRequestMethod("GET")
val resp = getResponseContent(conn)
resp.status mustEqual "200"
}
"handle a POST request" in {
val url = new URL("http://127.0.0.1:8080/_wt_internal/testcommand")
val conn = url.openConnection().asInstanceOf[HttpURLConnection]
conn.setRequestMethod("POST")
val resp = getResponseContent(conn)
resp.status mustEqual "200"
}
"handle a PUT request" in {
val url = new URL("http://127.0.0.1:8080/_wt_internal/testcommand")
val conn = url.openConnection().asInstanceOf[HttpURLConnection]
conn.setRequestMethod("PUT")
val resp = getResponseContent(conn)
resp.status mustEqual "200"
}
"handle a HEAD request" in {
val url = new URL("http://127.0.0.1:8080/_wt_internal/testcommand")
val conn = url.openConnection().asInstanceOf[HttpURLConnection]
conn.setRequestMethod("HEAD")
val resp = getResponseContent(conn)
resp.status mustEqual "200"
}
"handle a OPTIONS request" in {
val url = new URL("http://127.0.0.1:8080/_wt_internal/testcommand")
val conn = url.openConnection().asInstanceOf[HttpURLConnection]
conn.setRequestMethod("OPTIONS")
val resp = getResponseContent(conn)
resp.status mustEqual "200"
}
}
}
| Webtrends/wookiee-socko | src/test/scala/com/webtrends/harness/component/socko/CommandSpec.scala | Scala | apache-2.0 | 3,656 |
import leon.annotation._
import leon.lang._
object InsertionSort {
sealed abstract class List
case class Cons(head: BigInt,tail: List) extends List
case class Nil() extends List
sealed abstract class OptInt
case class Some(value: BigInt) extends OptInt
case class None() extends OptInt
def size(l : List) : BigInt = (l match {
case Nil() => BigInt(0)
case Cons(_, xs) => 1 + size(xs)
}) ensuring(_ >= 0)
def contents(l: List): Set[BigInt] = l match {
case Nil() => Set.empty
case Cons(x,xs) => contents(xs) ++ Set(x)
}
def min(l : List) : OptInt = l match {
case Nil() => None()
case Cons(x, xs) => min(xs) match {
case None() => Some(x)
case Some(x2) => if(x < x2) Some(x) else Some(x2)
}
}
def minProp0(l : List) : Boolean = (l match {
case Nil() => true
case c @ Cons(x, xs) => min(c) match {
case None() => false
case Some(m) => x >= m
}
}) holds
def minProp1(l : List) : Boolean = {
require(isSorted(l) && size(l) <= 5)
l match {
case Nil() => true
case c @ Cons(x, xs) => min(c) match {
case None() => false
case Some(m) => x == m
}
}
} holds
def isSorted(l: List): Boolean = l match {
case Nil() => true
case Cons(x, Nil()) => true
case Cons(x, Cons(y, ys)) => x <= y && isSorted(Cons(y, ys))
}
/* Inserting element 'e' into a sorted list 'l' produces a sorted list with
* the expected content and size */
def sortedIns(e: BigInt, l: List): List = {
require(isSorted(l))
l match {
case Nil() => Cons(e,Nil())
case Cons(x,xs) => if (x <= e) Cons(x,sortedIns(e, xs)) else Cons(e, l)
}
} ensuring(res => contents(res) == contents(l) ++ Set(e)
&& isSorted(res)
&& size(res) == size(l) + 1
)
/* Inserting element 'e' into a sorted list 'l' produces a sorted list with
* the expected content and size */
def buggySortedIns(e: BigInt, l: List): List = {
// require(isSorted(l))
l match {
case Nil() => Cons(e,Nil())
case Cons(x,xs) => if (x <= e) Cons(x,buggySortedIns(e, xs)) else Cons(e, l)
}
} ensuring(res => contents(res) == contents(l) ++ Set(e)
&& isSorted(res)
&& size(res) == size(l) + 1
)
/* Insertion sort yields a sorted list of same size and content as the input
* list */
def sort(l: List): List = (l match {
case Nil() => Nil()
case Cons(x,xs) => sortedIns(x, sort(xs))
}) ensuring(res => contents(res) == contents(l)
&& isSorted(res)
&& size(res) == size(l)
)
@ignore
def main(args: Array[String]): Unit = {
val ls: List = Cons(5, Cons(2, Cons(4, Cons(5, Cons(1, Cons(8,Nil()))))))
println(ls)
println(sort(ls))
}
}
| regb/leon | testcases/verification/list-algorithms/InsertionSort.scala | Scala | gpl-3.0 | 2,863 |
/** MACHINE-GENERATED FROM AVRO SCHEMA. DO NOT EDIT DIRECTLY */
package example.idl.model
import org.apache.avro.Schema
import org.oedura.scavro.{AvroMetadata, AvroReader, AvroSerializeable}
import other.ns.model.{ExternalDependency, Suit}
import example.idl.{DependentRecord => JDependentRecord, DependentRecord2 => JDependentRecord2, DependentRecord3 => JDependentRecord3, Embedded => JEmbedded}
import other.ns.{ExternalDependency => JExternalDependency, Suit => JSuit}
sealed trait ImportProtocol extends AvroSerializeable with Product with Serializable
final case class DependentRecord(dependency: ExternalDependency, number: Int) extends AvroSerializeable with ImportProtocol {
type J = JDependentRecord
override def toAvro: JDependentRecord = {
new JDependentRecord(dependency.toAvro, number)
}
}
final object DependentRecord {
implicit def reader = new AvroReader[DependentRecord] {
override type J = JDependentRecord
}
implicit val metadata: AvroMetadata[DependentRecord, JDependentRecord] = new AvroMetadata[DependentRecord, JDependentRecord] {
override val avroClass: Class[JDependentRecord] = classOf[JDependentRecord]
override val schema: Schema = JDependentRecord.getClassSchema()
override val fromAvro: (JDependentRecord) => DependentRecord = {
(j: JDependentRecord) => DependentRecord(ExternalDependency.metadata.fromAvro(j.getDependency), j.getNumber.toInt)
}
}
}
final case class DependentRecord2(dependency: Suit.Value, name: String) extends AvroSerializeable with ImportProtocol {
type J = JDependentRecord2
override def toAvro: JDependentRecord2 = {
new JDependentRecord2(dependency match {
case Suit.SPADES => JSuit.SPADES
case Suit.DIAMONDS => JSuit.DIAMONDS
case Suit.CLUBS => JSuit.CLUBS
case Suit.HEARTS => JSuit.HEARTS
}, name)
}
}
final object DependentRecord2 {
implicit def reader = new AvroReader[DependentRecord2] {
override type J = JDependentRecord2
}
implicit val metadata: AvroMetadata[DependentRecord2, JDependentRecord2] = new AvroMetadata[DependentRecord2, JDependentRecord2] {
override val avroClass: Class[JDependentRecord2] = classOf[JDependentRecord2]
override val schema: Schema = JDependentRecord2.getClassSchema()
override val fromAvro: (JDependentRecord2) => DependentRecord2 = {
(j: JDependentRecord2) => DependentRecord2(j.getDependency match {
case JSuit.SPADES => Suit.SPADES
case JSuit.DIAMONDS => Suit.DIAMONDS
case JSuit.CLUBS => Suit.CLUBS
case JSuit.HEARTS => Suit.HEARTS
}, j.getName.toString)
}
}
}
final case class DependentRecord3(dependency: Embedded, value: Boolean) extends AvroSerializeable with ImportProtocol {
type J = JDependentRecord3
override def toAvro: JDependentRecord3 = {
new JDependentRecord3(dependency.toAvro, value)
}
}
final object DependentRecord3 {
implicit def reader = new AvroReader[DependentRecord3] {
override type J = JDependentRecord3
}
implicit val metadata: AvroMetadata[DependentRecord3, JDependentRecord3] = new AvroMetadata[DependentRecord3, JDependentRecord3] {
override val avroClass: Class[JDependentRecord3] = classOf[JDependentRecord3]
override val schema: Schema = JDependentRecord3.getClassSchema()
override val fromAvro: (JDependentRecord3) => DependentRecord3 = {
(j: JDependentRecord3) => DependentRecord3(Embedded.metadata.fromAvro(j.getDependency), j.getValue)
}
}
} | julianpeeters/avrohugger | avrohugger-core/src/test/expected/scavro/example/idl/model/ImportProtocol.scala | Scala | apache-2.0 | 3,482 |
package pl.touk.nussknacker.k8s.manager.deployment
import net.ceedubs.ficus.Ficus
import net.ceedubs.ficus.Ficus._
import net.ceedubs.ficus.readers.ArbitraryTypeReader._
import net.ceedubs.ficus.readers.ValueReader
import pl.touk.nussknacker.k8s.manager.deployment.K8sScalingConfig._
trait K8sScalingOptionsDeterminer {
def determine(parallelism: Int): K8sScalingOptions
}
case class K8sScalingOptions(replicasCount: Int, noOfTasksInReplica: Int)
object K8sScalingOptionsDeterminer {
// 4 because it is quite normal number of cpus reserved for one container
val defaultTasksPerReplica = 4
val defaultScalingDeterminer: K8sScalingOptionsDeterminer = new DividingParallelismK8sScalingOptionsDeterminer(DividingParallelismConfig(defaultTasksPerReplica))
def apply(config: Option[K8sScalingConfig]): K8sScalingOptionsDeterminer = {
config match {
case None | Some(NotDefinedConfig) => defaultScalingDeterminer
case Some(fixedReplicas: FixedReplicasCountConfig) => new FixedReplicasCountK8sScalingOptionsDeterminer(fixedReplicas)
case Some(dividingParallelism: DividingParallelismConfig) => new DividingParallelismK8sScalingOptionsDeterminer(dividingParallelism)
}
}
}
class FixedReplicasCountK8sScalingOptionsDeterminer(config: FixedReplicasCountConfig) extends K8sScalingOptionsDeterminer {
override def determine(parallelism: Int): K8sScalingOptions = {
val noOfTasksInReplica = Math.ceil(parallelism.toDouble / config.fixedReplicasCount).toInt
K8sScalingOptions(config.fixedReplicasCount, noOfTasksInReplica)
}
}
class DividingParallelismK8sScalingOptionsDeterminer(config: DividingParallelismConfig) extends K8sScalingOptionsDeterminer {
override def determine(parallelism: Int): K8sScalingOptions = {
val replicasCount = Math.ceil(parallelism.toDouble / config.tasksPerReplica).toInt
val noOfTasksInReplica = Math.ceil(parallelism.toDouble / replicasCount).toInt
K8sScalingOptions(replicasCount, noOfTasksInReplica)
}
}
sealed trait K8sScalingConfig
object K8sScalingConfig {
case object NotDefinedConfig extends K8sScalingConfig
case class FixedReplicasCountConfig(fixedReplicasCount: Int) extends K8sScalingConfig
case class DividingParallelismConfig(tasksPerReplica: Int) extends K8sScalingConfig
val fixedReplicasCountPath = "fixedReplicasCount"
val tasksPerReplicaPath = "tasksPerReplica"
implicit def valueReader: ValueReader[K8sScalingConfig] = Ficus.configValueReader.map { config =>
(config.hasPath(fixedReplicasCountPath), config.hasPath(tasksPerReplicaPath)) match {
case (false, false) => NotDefinedConfig
case (true, false) => config.as[FixedReplicasCountConfig]
case (false, true) => config.as[DividingParallelismConfig]
case (true, true) => throw new IllegalArgumentException(s"You can specify only one scaling config option: either $fixedReplicasCountPath or $tasksPerReplicaPath")
}
}
} | TouK/nussknacker | engine/lite/k8sDeploymentManager/src/main/scala/pl/touk/nussknacker/k8s/manager/deployment/K8sScalingOptionsDeterminer.scala | Scala | apache-2.0 | 2,941 |
/*
* Accio is a platform to launch computer science experiments.
* Copyright (C) 2016-2018 Vincent Primault <v.primault@ucl.ac.uk>
*
* Accio is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Accio is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Accio. If not, see <http://www.gnu.org/licenses/>.
*/
package fr.cnrs.liris.lumos.transport
import com.twitter.finagle.Service
import com.twitter.util.{Duration, Future}
import fr.cnrs.liris.lumos.domain.thrift
import fr.cnrs.liris.lumos.server.LumosService.{GetInfo, GetJob, ListJobs, PushEvent}
import fr.cnrs.liris.lumos.server._
import scala.collection.mutable
private[transport] class LumosServiceCollector(latency: Duration) extends LumosService.ServicePerEndpoint {
private[this] val collector = mutable.ListBuffer.empty[thrift.Event]
def eventsOf(parent: String): Seq[thrift.Event] = collector.filter(_.parent == parent)
override def pushEvent: Service[PushEvent.Args, PushEventResponse] = new Service[PushEvent.Args, PushEventResponse] {
override def apply(request: PushEvent.Args): Future[PushEventResponse] = synchronized {
Thread.sleep(latency.inMillis)
collector += request.req.event
Future.value(PushEventResponse())
}
}
override def getInfo: Service[GetInfo.Args, GetInfoResponse] = ???
override def getJob: Service[GetJob.Args, GetJobResponse] = ???
override def listJobs: Service[ListJobs.Args, ListJobsResponse] = ???
} | privamov/accio | accio/javatests/fr/cnrs/liris/lumos/transport/LumosServiceCollector.scala | Scala | gpl-3.0 | 1,897 |
package com.wuyuntao.aeneas.migration.dsl
import com.datastax.driver.core.RegularStatement
import com.datastax.driver.core.SimpleStatement
trait DbModifier extends TableModifier with ColumnModifier with TypeModifier {
def executeStatement(statement: RegularStatement)
def executeSql(sql: String) = {
executeStatement(new SimpleStatement(sql))
}
} | wuyuntao/Aeneas | aeneas-migration/src/main/scala/com/wuyuntao/aeneas/migration/dsl/DbModifier.scala | Scala | apache-2.0 | 370 |
package challenge7
import core._
/*
* A state data type that represents the threading
* of some state value through computations in some effect M.
*/
case class StateT[M[_], S, A](run: S => M[(S, A)]) {
/*
* Exercise 7.1:
*
* Implement map for StateT[M, S, A].
*
* The following laws must hold:
* 1) r.map(z => z) == r
* 2) r.map(z => f(g(z))) == r.map(g).map(f)
*
*/
def map[B](f: A => B)(implicit M: Monad[M]): StateT[M, S, B] =
???
/*
* Exercise 7.2:
*
* Implement flatMap (a.k.a. bind, a.k.a. >>=).
*
* The following law must hold:
* r.flatMap(f).flatMap(g) == r.flatMap(z => f(z).flatMap(g))
*
*/
def flatMap[B](f: A => StateT[M, S, B])(implicit M: Monad[M]): StateT[M, S, B] =
???
}
object StateT {
/*
* Exercise 7.3:
*
* Implement value (a.k.a. return, point, pure).
*
* Hint: Try using StateT constructor.
*/
def value[M[_]: Monad, S, A](a: => A): StateT[M, S, A] =
???
/*
* Exercise 7.4:
*
* Implement get.
*
* Get provides access to the current state (S).
*
* Hint: Try using StateT constructor.
*/
def get[M[_]: Monad, S]: StateT[M, S, S] =
???
/*
* Exercise 7.5:
*
* Implement gets.
*
* Gets provides access to a view of the current state (S).
*
* Hint: Try building on get.
*/
def gets[M[_]: Monad, S, A](f: S => A): StateT[M, S, A] =
???
/*
* Exercise 7.6:
*
* Implement modify.
*
* Update the current state and produce no value.
*
* Hint: Try using State constructor.
*/
def modify[M[_]: Monad, S](f: S => S): StateT[M, S, Unit] =
???
/*
* Exercise 7.7:
*
* Implement put.
*
* Clobber the current state and produce no value.
*
* Hint: Try building on modify.
*/
def put[M[_]: Monad, S](s: S): StateT[M, S, Unit] =
???
class StateT_[F[_], S] {
type l[a] = StateT[F, S, a]
}
class StateT__[S] {
type l[f[_], a] = StateT[f, S, a]
}
implicit def StateTMonad[F[_], S](implicit F: Monad[F]): Monad[StateT_[F, S]#l] =
new Monad[StateT_[F, S]#l] {
def point[A](a: => A) = StateT(s => F.point((s, a)))
def bind[A, B](a: StateT[F, S, A])(f: A => StateT[F, S, B]) = a flatMap f
}
/*
* Exercise 7.8:
*
* Implement monad trans instance.
*
* Hint: Try using StateT constructor and Monad[M].map(ga).
*/
implicit def StateTMonadTrans[S]: MonadTrans[StateT__[S]#l] =
???
}
| Kimply/scala-challenge | src/main/scala/challenge7/StateT.scala | Scala | mit | 2,477 |
package applicative
import monad.Functor
import monad.Monad.Id
import monoid.Monoid
import monoid.Monoid.Foldable
/**
* Created by ariwaranosai on 16/8/27.
*
*/
trait Applicative[F[_]] extends Functor[F] { self =>
def map2[A, B, C](fa: F[A], fb: F[B])(f: (A, B) => C): F[C]
def map2_[A, B, C](fa: F[A], fb: F[B])(f: (A, B) => C): F[C] =
apply(apply(unit[A => B => C](f.curried))(fa))(fb)
def unit[A](a: => A): F[A]
def apply[A, B](fab: F[A => B])(fa: F[A]): F[B] =
map2(fab, fa)((f, x) => f(x))
def map[A, B](fa: F[A])(f: A => B): F[B] = map2(fa, unit(()))((x, y) => f(x))
def map_[A, B](fa: F[A])(f: A => B): F[B] =
apply(unit[A => B](f))(fa)
def map3[A, B, C, D](fa: F[A],
fb: F[B],
fc: F[C])(f: (A, B, C) => D): F[D] = {
apply(apply(apply(unit[A => B => C => D](f.curried))(fa))(fb))(fc)
}
def product[G[_]](G: Applicative[G]): Applicative[({type f[x] = (F[x], G[x])})#f] =
new Applicative[({type f[x] = (F[x], G[x])})#f] {
override def unit[A](a: => A): (F[A], G[A]) = (self.unit(a), G.unit(a))
override def map2[A, B, C](fa: (F[A], G[A]), fb: (F[B], G[B]))(f: (A, B) => C): (F[C], G[C]) =
(self.map2(fa._1, fb._1)((x, y) => f(x, y)),
G.map2(fa._2, fb._2)((x, y) => f(x, y)))
}
def compose[G[_]](G: Applicative[G]): Applicative[({type f[x] = F[G[x]]})#f] =
new Applicative[({type f[x] = F[G[x]]})#f] {
override def map2[A, B, C](fa: F[G[A]], fb: F[G[B]])(f: (A, B) => C): F[G[C]] = {
self.map2(fa, fb)((x, y) => G.map2(x, y)((k, v) => f(k, v)))
}
override def unit[A](a: => A): F[G[A]] = self.unit(G.unit(a))
}
def traverse[A, B](as: List[A])(f: A => F[B]): F[List[B]] =
as.foldRight(unit(List[B]()))((x, xs) => map2(f(x), xs)(_ :: _))
def sequence[A](fas: List[F[A]]): F[List[A]] =
traverse(fas)(identity)
def sequence[K, V](ofa: Map[K, F[V]]): F[Map[K, V]] =
ofa.foldRight(unit(Map[K, V]()))((x, fa) => map2(fa, x._2)((ys, y) => ys.updated(x._1, y)))
def replicateM[A](n: Int, fa: F[A]): F[List[A]] =
traverse((0 to n).toList)(_ => fa)
def product[A, B](fa: F[A], fb: F[B]): F[(A, B)] =
map2(fa, fb)((_, _))
}
sealed trait Validation[+E, +A]
case class Failure[E](head: E, tail: Vector[E] = Vector()) extends Validation[E, Nothing]
case class Success[A](a: A) extends Validation[Nothing, A]
object Applicative {
val streamApplicative = new Applicative[Stream] {
override def unit[A](a: => A): Stream[A] = Stream.continually(a)
override def map2[A, B, C](a: Stream[A], b: Stream[B])(f: (A, B) => C): Stream[C] =
a zip b map f.tupled
// sequence
// Stream(1, ?) Stream(2, ?) Stream(3, ?) => Stream(List(1,2,3), ?)
}
def validationApplicative[E]: Applicative[({type f[x] = Validation[E, x]})#f] = new Applicative[({type f[x] = Validation[E, x]})#f] {
override def map2[A, B, C](fa: Validation[E, A], fb: Validation[E, B])(f: (A, B) => C): Validation[E, C] =
(fa, fb) match {
case (Success(x), Success(y)) => Success(f(x, y))
case (Success(_), Failure(h, tl)) => Failure(h, tl)
case (Failure(h, tl), Success(_)) => Failure(h, tl)
case (Failure(hx, tlx), Failure(hy, tly)) => Failure(hx, (tlx :+ hy) ++ tly)
}
override def unit[A](a: => A): Validation[E, A] = Success(a)
}
}
trait Monad[F[_]] extends Applicative[F] {
def flatMap[A, B](fa: F[A])(f: A => F[B]): F[B]
def join[A](ffa: F[F[A]]): F[A] = flatMap(ffa)(fa => fa)
def compose[A, B, C](f: A => F[B], g: B => F[C]): A => F[C] =
a => flatMap(f(a))(g)
override def map[A, B](fa: F[A])(f: A => B): F[B] =
flatMap(fa)((a: A) => unit(f(a)))
override def map2[A, B, C](fa: F[A], fb: F[B])(f: (A, B) => C): F[C] =
flatMap(fa)(a => map(fb)(b => f(a, b)))
}
object Monad {
def eitherMonad[E]: Monad[({type f[x] = Either[E, x]})#f] = new Monad[({type f[x] = Either[E, x]})#f] {
override def flatMap[A, B](fa: Either[E, A])(f: (A) => Either[E, B]): Either[E, B] =
fa match {
case Right(x) => f(x)
case Left(e) => Left(e)
}
override def unit[A](a: => A): Either[E, A] = Right(a)
}
}
trait Traverse[F[_]] extends Functor[F] with Foldable[F] { self =>
def traverse[G[_]: Applicative, A, B](fa: F[A])(f: A => G[B]): G[F[B]] =
sequence(map(fa)(f))
def sequence[G[_]: Applicative, A](fga: F[G[A]]): G[F[A]] =
traverse(fga)(identity)
type Id[A] = A
implicit val idMonad = new Monad[Id] {
def unit[A](a: => A): Id[A] = a
override def flatMap[A, B](fa: Id[A])(f: (A) => Id[B]): Id[B] = f(fa)
}
override def map[A, B](fa: F[A])(f: (A) => B): F[B] =
traverse[Id, A, B](fa)(x => idMonad.unit(f(x)))
type Const[M, B] = M
implicit def monoidApplicative[M](M: Monoid[M]) = new Applicative[({type f[x] = Const[M, x]})#f] {
override def unit[A](a: => A): Const[M, A] = M.zero
override def map2[A, B, C](fa: Const[M, A], fb: Const[M, B])(f: (A, B) => C): Const[M, C] =
M.op(fa, fb)
}
override def foldMap[A, B](as: F[A])(f: (A) => B)(m: Monoid[B]): B =
traverse[({type f[x] = Const[B, x]})#f, A, Nothing](as)(f)(monoidApplicative(m))
}
object Traverse {
}
| ariwaranosai/FPinScala | src/main/scala/applicative/Applicative.scala | Scala | mit | 5,217 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import kafka.utils.ZkUtils._
import kafka.utils.CoreUtils._
import kafka.utils.{Json, SystemTime, Logging}
import org.I0Itec.zkclient.exception.ZkNodeExistsException
import org.I0Itec.zkclient.IZkDataListener
import kafka.controller.ControllerContext
import kafka.controller.KafkaController
/**
* This class handles zookeeper based leader election based on an ephemeral path. The election module does not handle
* session expiration, instead it assumes the caller will handle it by probably try to re-elect again. If the existing
* leader is dead, this class will handle automatic re-election and if it succeeds, it invokes the leader state change
* callback
*/
class ZookeeperLeaderElector(controllerContext: ControllerContext,
electionPath: String,
onBecomingLeader: () => Unit,
onResigningAsLeader: () => Unit,
brokerId: Int)
extends LeaderElector with Logging {
var leaderId = -1
// create the election path in ZK, if one does not exist
val index = electionPath.lastIndexOf("/")
if (index > 0)
makeSurePersistentPathExists(controllerContext.zkClient, electionPath.substring(0, index))
val leaderChangeListener = new LeaderChangeListener
def startup {
inLock(controllerContext.controllerLock) {
controllerContext.zkClient.subscribeDataChanges(electionPath, leaderChangeListener)
elect
}
}
private def getControllerID(): Int = {
readDataMaybeNull(controllerContext.zkClient, electionPath)._1 match {
case Some(controller) => KafkaController.parseControllerId(controller)
case None => -1
}
}
def elect: Boolean = {
val timestamp = SystemTime.milliseconds.toString
val electString = Json.encode(Map("version" -> 1, "brokerid" -> brokerId, "timestamp" -> timestamp))
leaderId = getControllerID
/*
* We can get here during the initial startup and the handleDeleted ZK callback. Because of the potential race condition,
* it's possible that the controller has already been elected when we get here. This check will prevent the following
* createEphemeralPath method from getting into an infinite loop if this broker is already the controller.
*/
if(leaderId != -1) {
debug("Broker %d has been elected as leader, so stopping the election process.".format(leaderId))
return amILeader
}
try {
createEphemeralPathExpectConflictHandleZKBug(controllerContext.zkClient, electionPath, electString, brokerId,
(controllerString : String, leaderId : Any) => KafkaController.parseControllerId(controllerString) == leaderId.asInstanceOf[Int],
controllerContext.zkSessionTimeout)
info(brokerId + " successfully elected as leader")
leaderId = brokerId
onBecomingLeader()
} catch {
case e: ZkNodeExistsException =>
// If someone else has written the path, then
leaderId = getControllerID
if (leaderId != -1)
debug("Broker %d was elected as leader instead of broker %d".format(leaderId, brokerId))
else
warn("A leader has been elected but just resigned, this will result in another round of election")
case e2: Throwable =>
error("Error while electing or becoming leader on broker %d".format(brokerId), e2)
resign()
}
amILeader
}
def close = {
leaderId = -1
}
def amILeader : Boolean = leaderId == brokerId
def resign() = {
leaderId = -1
deletePath(controllerContext.zkClient, electionPath)
}
/**
* We do not have session expiration listen in the ZkElection, but assuming the caller who uses this module will
* have its own session expiration listener and handler
*/
class LeaderChangeListener extends IZkDataListener with Logging {
/**
* Called when the leader information stored in zookeeper has changed. Record the new leader in memory
* @throws Exception On any error.
*/
@throws(classOf[Exception])
def handleDataChange(dataPath: String, data: Object) {
inLock(controllerContext.controllerLock) {
val amILeaderBeforeDataChange = amILeader
leaderId = KafkaController.parseControllerId(data.toString)
info("New leader is %d".format(leaderId))
// The old leader needs to resign leadership if it is no longer the leader
if (amILeaderBeforeDataChange && !amILeader)
onResigningAsLeader()
}
}
/**
* Called when the leader information stored in zookeeper has been delete. Try to elect as the leader
* @throws Exception
* On any error.
*/
@throws(classOf[Exception])
def handleDataDeleted(dataPath: String) {
inLock(controllerContext.controllerLock) {
debug("%s leader change listener fired for path %s to handle data deleted: trying to elect as a leader"
.format(brokerId, dataPath))
if(amILeader)
onResigningAsLeader()
elect
}
}
}
}
| usakey/kafka | core/src/main/scala/kafka/server/ZookeeperLeaderElector.scala | Scala | apache-2.0 | 5,879 |
// Copyright: 2017 Sam Halliday
// License: http://www.gnu.org/licenses/lgpl.html
import sbt._
import sbt.Keys._
object NeoJmhKeys {
/** Where you put your JMH code. */
val Jmh = config("jmh") extend Test
val neoJmhGenerator = settingKey[String]("Available: `reflection` or `asm`")
}
/**
* https://github.com/ktoso/sbt-jmh/ rewritten as an idiomatic sbt
* Configuration (not requiring a separate Project).
*/
object NeoJmh extends AutoPlugin {
import NeoJmhKeys._
val autoImport = NeoJmhKeys
val JmhInternal = (config("jmh-internal") extend Test).hide
val generateJmhSourcesAndResources = taskKey[(Seq[File], Seq[File])]("Generate benchmark JMH Java code and resources")
override def requires = plugins.JvmPlugin
override def trigger = noTrigger
override def projectConfigurations = Seq(Jmh, JmhInternal)
override def projectSettings = inConfig(Jmh)(
Defaults.testSettings ++ Seq(
run := (run in JmhInternal).evaluated,
neoJmhGenerator := "reflection"
)
) ++ inConfig(JmhInternal)(
Defaults.testSettings ++ Seq(
mainClass in run := Some("org.openjdk.jmh.Main"),
fork in run := true,
dependencyClasspath ++= (fullClasspath in Jmh).value,
sourceGenerators += generateJmhSourcesAndResources.map { case (sources, _) => sources },
resourceGenerators += generateJmhSourcesAndResources.map { case (_, res) => res },
generateJmhSourcesAndResources := generateBenchmarkSourcesAndResources.value
)
) ++ Seq(
libraryDependencies ++= Seq(
"jmh-core",
"jmh-generator-bytecode",
"jmh-generator-reflection",
"jmh-generator-asm"
).map("org.openjdk.jmh" % _ % "1.19" % Jmh.name)
)
def generateBenchmarkSourcesAndResources: Def.Initialize[Task[(Seq[File], Seq[File])]] = Def.task {
val s = streams.value
val cacheDir = crossTarget.value / "jmh-cache"
val bytecodeDir = (classDirectory in Jmh).value
val sourceDir = sourceManaged.value
val resourceDir = resourceManaged.value
val generator = (neoJmhGenerator in Jmh).value
val classpath = dependencyClasspath.value
val inputs: Set[File] = (bytecodeDir ** "*").filter(_.isFile).get.toSet
val cachedGeneration = FileFunction.cached(cacheDir, FilesInfo.hash) { _ =>
IO.delete(sourceDir)
IO.createDirectory(sourceDir)
IO.delete(resourceDir)
IO.createDirectory(resourceDir)
val options = ForkOptions(
runJVMOptions = Nil,
envVars = Map.empty,
workingDirectory = Some(baseDirectory.value)
)
new ForkRun(options).run(
"org.openjdk.jmh.generators.bytecode.JmhBytecodeGenerator",
Attributed.data(classpath),
List(bytecodeDir.getPath, sourceDir.getPath, resourceDir.getPath, generator),
s.log
).foreach(sys.error)
((sourceDir ** "*").filter(_.isFile) +++ (resourceDir ** "*").filter(_.isFile)).get.toSet
}
cachedGeneration(inputs).toSeq.partition(f => IO.relativizeFile(sourceDir, f).nonEmpty)
}
}
| vovapolu/scala-data-classes | project/NeoJmh.scala | Scala | lgpl-3.0 | 3,024 |
package tul.poiis.decision_tree
import scala.collection.mutable.ListBuffer
object InputParsers {
def parseMovieEntry(csv_entry: Map[String, String]): (Int, Movie) = {
val fieldsList = ListBuffer[MovieField]()
fieldsList += new PopularityField(csv_entry("popularity"))
fieldsList += new BudgetField(csv_entry("budget"))
fieldsList += new VoteAverageField(csv_entry("vote_average"))
fieldsList += new ReleaseYearField(csv_entry("release_date"))
(csv_entry("Id").toInt, new Movie(fieldsList.toList))
}
def readMoviesFile(filepath: String) : Map[Int, Movie] ={
val reader = MyCsvReader.reader(filepath)
val csv_list = reader.allWithHeaders()
reader.close()
csv_list.map { entry =>
val parseResult: (Int, Movie) = parseMovieEntry(entry)
parseResult._1 -> parseResult._2
}(collection.breakOut): Map[Int, Movie]
}
def parseTrainEntry(csv_entry: List[String], moviesMap: Map[Int, Movie]): (Int, Evaluation) ={
val personId = csv_entry(1)
val movieId = csv_entry(2)
val grade = csv_entry(3)
(personId.toInt, new Evaluation(grade = grade.toInt, movie = moviesMap(movieId.toInt)))
}
def readTrainSetFile(filepath: String, moviesMap: Map[Int, Movie]): Map[Int, List[Evaluation]] ={
val reader = MyCsvReader.reader(filepath)
val csv_list: List[List[String]] = reader.all()
reader.close()
val parsedTuples = csv_list.map { entry =>
val parseResult: (Int, Evaluation) = parseTrainEntry(entry, moviesMap)
parseResult._1 -> parseResult._2
}
parsedTuples.groupBy(_._1).mapValues(_.map(_._2))
}
def parseUnknownEntry(csv_entry: List[String]): (Int, Int, Int) ={
val evalId = csv_entry(0)
val personId = csv_entry(1)
val movieId = csv_entry(2)
(evalId.toInt, personId.toInt, movieId.toInt)
}
def readUnknowns(filepath: String): List[(Int, Int, Int)] ={
val reader = MyCsvReader.reader(filepath)
val csv_list: List[List[String]] = reader.all()
reader.close()
csv_list.map { entry =>
parseUnknownEntry(entry)
}
}
}
| CucumisSativus/decision-tree-movie-evaluator | src/main/scala/tul/poiis/decision_tree/InputParsers.scala | Scala | mit | 2,074 |
package com.sksamuel.elastic4s
sealed trait VersionType
object VersionType {
def valueOf(str: String): VersionType = str.toLowerCase match {
case "external" => VersionType.External
case "externalgte" | "external_gte" => VersionType.ExternalGte
case "force" => VersionType.Force
case _ => VersionType.Internal
}
case object External extends VersionType
case object ExternalGte extends VersionType
@deprecated("to be removed in 7.0", "6.0")
case object Force extends VersionType
case object Internal extends VersionType
def EXTERNAL = External
def EXTERNAL_GTE = ExternalGte
def INTERNAL = Internal
}
| Tecsisa/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/VersionType.scala | Scala | apache-2.0 | 725 |
/*
* Copyright 2014 Databricks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storlets.spark.csv.util
import java.sql.Timestamp
import java.text.SimpleDateFormat
import scala.util.control.Exception._
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types._
private[csv] object InferSchema {
/**
* Similar to the JSON schema inference.
* [[org.apache.spark.sql.execution.datasources.json.InferSchema]]
* 1. Infer type of each row
* 2. Merge row types to find common type
* 3. Replace any null types with string type
*/
def apply(
tokenRdd: RDD[Array[String]],
header: Array[String],
nullValue: String = "",
dateFormatter: SimpleDateFormat = null): StructType = {
val startType: Array[DataType] = Array.fill[DataType](header.length)(NullType)
val rootTypes: Array[DataType] = tokenRdd.aggregate(startType)(
inferRowType(nullValue, dateFormatter),
mergeRowTypes)
val structFields = header.zip(rootTypes).map { case (thisHeader, rootType) =>
val dType = rootType match {
case z: NullType => StringType
case other => other
}
StructField(thisHeader, dType, nullable = true)
}
StructType(structFields)
}
private def inferRowType(nullValue: String, dateFormatter: SimpleDateFormat)
(rowSoFar: Array[DataType], next: Array[String]): Array[DataType] = {
var i = 0
while (i < math.min(rowSoFar.length, next.length)) { // May have columns on right missing.
rowSoFar(i) = inferField(rowSoFar(i), next(i), nullValue, dateFormatter)
i+=1
}
rowSoFar
}
private[csv] def mergeRowTypes(
first: Array[DataType],
second: Array[DataType]): Array[DataType] = {
first.zipAll(second, NullType, NullType).map { case ((a, b)) =>
findTightestCommonType(a, b).getOrElse(NullType)
}
}
/**
* Infer type of string field. Given known type Double, and a string "1", there is no
* point checking if it is an Int, as the final type must be Double or higher.
*/
private[csv] def inferField(typeSoFar: DataType,
field: String,
nullValue: String = "",
dateFormatter: SimpleDateFormat = null): DataType = {
def tryParseInteger(field: String): DataType = if ((allCatch opt field.toInt).isDefined) {
IntegerType
} else {
tryParseLong(field)
}
def tryParseLong(field: String): DataType = if ((allCatch opt field.toLong).isDefined) {
LongType
} else {
tryParseDouble(field)
}
def tryParseDouble(field: String): DataType = {
if ((allCatch opt field.toDouble).isDefined) {
DoubleType
} else {
tryParseTimestamp(field)
}
}
def tryParseTimestamp(field: String): DataType = {
if (dateFormatter != null) {
// This case infers a custom `dataFormat` is set.
if ((allCatch opt dateFormatter.parse(field)).isDefined){
TimestampType
} else {
tryParseBoolean(field)
}
} else {
// We keep this for backwords competibility.
if ((allCatch opt Timestamp.valueOf(field)).isDefined) {
TimestampType
} else {
tryParseBoolean(field)
}
}
}
def tryParseBoolean(field: String): DataType = {
if ((allCatch opt field.toBoolean).isDefined) {
BooleanType
} else {
stringType()
}
}
// Defining a function to return the StringType constant is necessary in order to work around
// a Scala compiler issue which leads to runtime incompatibilities with certain Spark versions;
// see issue #128 for more details.
def stringType(): DataType = {
StringType
}
if (field == null || field.isEmpty || field == nullValue) {
typeSoFar
} else {
typeSoFar match {
case NullType => tryParseInteger(field)
case IntegerType => tryParseInteger(field)
case LongType => tryParseLong(field)
case DoubleType => tryParseDouble(field)
case TimestampType => tryParseTimestamp(field)
case BooleanType => tryParseBoolean(field)
case StringType => StringType
case other: DataType =>
throw new UnsupportedOperationException(s"Unexpected data type $other")
}
}
}
/**
* Copied from internal Spark api
* [[org.apache.spark.sql.catalyst.analysis.HiveTypeCoercion]]
*/
private val numericPrecedence: IndexedSeq[DataType] =
IndexedSeq[DataType](
ByteType,
ShortType,
IntegerType,
LongType,
FloatType,
DoubleType,
TimestampType,
DecimalType.Unlimited)
/**
* Copied from internal Spark api
* [[org.apache.spark.sql.catalyst.analysis.HiveTypeCoercion]]
*/
val findTightestCommonType: (DataType, DataType) => Option[DataType] = {
case (t1, t2) if t1 == t2 => Some(t1)
case (NullType, t1) => Some(t1)
case (t1, NullType) => Some(t1)
case (StringType, t2) => Some(StringType)
case (t1, StringType) => Some(StringType)
// Promote numeric types to the highest of the two and all numeric types to unlimited decimal
case (t1, t2) if Seq(t1, t2).forall(numericPrecedence.contains) =>
val index = numericPrecedence.lastIndexWhere(t => t == t1 || t == t2)
Some(numericPrecedence(index))
case _ => None
}
}
| eranr/spark-storlets | src/main/scala/org/apache/storlets/spark/csv/util/InferSchema.scala | Scala | apache-2.0 | 5,891 |
package skinny
/**
* Pagination builder.
*/
object Pagination {
def page(pageNo: Int): PaginationPageNoBuilder = {
PaginationPageNoBuilder(pageNo = Option(pageNo))
}
def per(pageSize: Int): PaginationPageSizeBuilder = {
PaginationPageSizeBuilder(pageSize = Option(pageSize))
}
}
/**
* Pagination builder.
*/
case class PaginationPageNoBuilder(pageNo: Option[Int] = None) {
def per(pageSize: Int): Pagination = Pagination(pageNo = pageNo.get, pageSize = pageSize)
}
/**
* Pagination builder.
*/
case class PaginationPageSizeBuilder(pageSize: Option[Int] = None) {
def page(pageNo: Int): Pagination = Pagination(pageNo = pageNo, pageSize = pageSize.get)
}
/**
* Pagination parameters.
*/
case class Pagination(pageSize: Int, pageNo: Int) {
def offset: Int = (pageNo - 1) * pageSize
def limit: Int = pageSize
} | Kuchitama/skinny-framework | common/src/main/scala/skinny/Pagination.scala | Scala | mit | 847 |
package com.spike.giantdataanalysis.spark.example.rdd
import com.spike.giantdataanalysis.spark.support.Datasets
object RDDCreation {
def main(args: Array[String]): Unit = {
// 创建RDD的两种方式: 从外部数据源加载; 在驱动程序中并行化集合
val inputFile = Datasets.DATA_DIR + "sonnets.txt"
val input = sc.textFile(inputFile)
println(input.toDebugString)
val lines = sc.parallelize(List("pandas", "i like pandas"))
println(lines.toDebugString)
}
} | zhoujiagen/giant-data-analysis | data-management-infrastructure/scala-infrastructure-apache-spark/src/main/scala/com/spike/giantdataanalysis/spark/example/rdd/RDDCreation.scala | Scala | mit | 499 |
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.mvc
trait RequestExtractors extends AcceptExtractors {
/**
* Convenient extractor allowing to apply two extractors.
* Example of use:
* {{{
* request match {
* case Accepts.Json() & Accepts.Html() => "This request accepts both JSON and HTML"
* }
* }}}
*/
object & {
def unapply(request: RequestHeader): Option[(RequestHeader, RequestHeader)] = Some((request, request))
}
}
/**
* Define a set of extractors allowing to pattern match on the Accept HTTP header of a request
*/
trait AcceptExtractors {
/**
* Common extractors to check if a request accepts JSON, Html, etc.
* Example of use:
* {{{
* request match {
* case Accepts.Json() => Ok(toJson(value))
* case _ => Ok(views.html.show(value))
* }
* }}}
*/
object Accepts {
import play.api.http.MimeTypes
val Json = Accepting(MimeTypes.JSON)
val Html = Accepting(MimeTypes.HTML)
val Xml = Accepting(MimeTypes.XML)
val JavaScript = Accepting(MimeTypes.JAVASCRIPT)
}
}
/**
* Convenient class to generate extractors checking if a given mime type matches the Accept header of a request.
* Example of use:
* {{{
* val AcceptsMp3 = Accepting("audio/mp3")
* }}}
* Then:
* {{{
* request match {
* case AcceptsMp3() => ...
* }
* }}}
*/
case class Accepting(mimeType: String) {
def unapply(request: RequestHeader): Boolean = request.accepts(mimeType)
def unapply(mediaRange: play.api.http.MediaRange): Boolean = mediaRange.accepts(mimeType)
}
| Shenker93/playframework | framework/src/play/src/main/scala/play/api/mvc/RequestExtractors.scala | Scala | apache-2.0 | 1,598 |
/**
* Copyright (C) 2016 Verizon. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.verizon.bda.trapezium.framework.manager
import java.io.File
import com.typesafe.config.{Config, ConfigFactory, ConfigList, ConfigObject}
import com.verizon.bda.trapezium.framework.ApplicationManager
import org.slf4j.LoggerFactory
import scala.collection.JavaConverters.asScalaBufferConverter
/**
* Created by Pankaj on 2/17/16.
*/
class WorkflowConfig
(val workflow: String) extends Serializable{
val logger = LoggerFactory.getLogger(this.getClass)
val workflowConfig: Config = {
val appConfig = ApplicationManager.getConfig()
logger.info(s"Config directory is ${appConfig.configDir}")
if (appConfig.configDir != null) {
val workflowConfigFilePath = s"${appConfig.configDir}/${workflow}.conf"
val workflowConfigFile: File = new File(workflowConfigFilePath)
ConfigFactory.parseFile(workflowConfigFile)
} else {
ConfigFactory.load(s"${workflow}.conf")
}
}.resolve()
lazy val runMode = workflowConfig.getString("runMode")
lazy val singleRun =
{
if (workflowConfig.hasPath("singleRun")) {
workflowConfig.getString("singleRun").toBoolean
} else {
false
}
}
/**
* Possible values - NATIVE / CUSTOM
* NATIVE - Kafka spark bridge
* CUSTOM - Custom HADR implementation for Kafka spark
*/
lazy val bridgeType =
{
if (workflowConfig.hasPath("bridgeType")) {
workflowConfig.getString("bridgeType")
} else {
"NATIVE"
}
}
lazy val pollTime =
{
val defaultPollTime: Long = 1000 //in millisecs
if (workflowConfig.hasPath("kafka.consumer.pollTime")) {
try {
workflowConfig.getString("kafka.consumer.pollTime").toLong
}catch {
case ex: Exception => {
logger.warn(s" Invalid vaue for 'pollTime' - ${workflowConfig.getString("kafka.pollTime")}. Setting default pollTime as ${defaultPollTime}")
defaultPollTime
}
}
} else {
logger.debug("Setting default pollTime for kafka config")
defaultPollTime
}
}
lazy val waitBetweenPolls =
{
val defaultPollTime: Long = 1000 //in millisecs
if (workflowConfig.hasPath("kafka.consumer.waitBetweenPolls")) {
try {
workflowConfig.getString("kafka.consumer.waitBetweenPolls").toLong
}catch {
case ex: Exception => {
logger.warn(s" Invalid vaue for 'waitBetweenPolls' - ${workflowConfig.getString("kafka.pollTime")}. Setting default pollTime as ${defaultPollTime}")
defaultPollTime
}
}
} else {
logger.debug("Setting default pollTime for kafka config")
defaultPollTime
}
}
lazy val maxRecordSize =
{
val defaultPollTime: Long = 0
if (workflowConfig.hasPath("kafka.consumer.maxRecordSize")) {
try {
workflowConfig.getString("kafka.consumer.maxRecordSize").toInt
}catch {
case ex: Exception => {
logger.warn(s" Invalid vaue for 'maxRecordSize' - ${workflowConfig.getString("kafka.pollTime")}. Setting default pollTime as ${defaultPollTime}")
defaultPollTime
}
}
} else {
logger.debug("Setting default pollTime for kafka config")
defaultPollTime
}
}
lazy val dataSource =
try {
workflowConfig.getString("dataSource")
} catch {
case ex: Throwable =>
logger.error("Invalid config file", s"dataSource must be present")
throw ex
}
lazy val syncWorkflow =
try {
workflowConfig.getString("syncWorkflow")
} catch {
case ex: Throwable => {
logger.warn(s"Config property dependentWorkflow not defined." +
s" This means ${workflow} does not depend on any other workflow.")
null
}
}
lazy val dependentWorkflows =
try {
workflowConfig.getConfig("dependentWorkflows").getStringList("workflows")
} catch {
case ex: Throwable => {
logger.warn(s"Config property dependentWorkflows not defined." +
s" This means ${workflow} does not depend on any other workflow.")
null
}
}
lazy val dependentFrequencyToCheck =
try {
workflowConfig.getConfig("dependentWorkflows").getLong("frequencyToCheck")
} catch {
case ex: Throwable => {
logger.warn(s"Config property frequencyToCheck not defined." +
s" This means ${workflow} does not depend on any other workflow.")
60000L
}
}
lazy val transactions =
try {
workflowConfig.getConfigList("transactions")
} catch {
case ex: Throwable =>
logger.error("Invalid config file",
s"At least one transaction must be specified for ${workflow}")
throw ex
}
lazy val hdfsStream =
try {
workflowConfig.getConfig("hdfsStream")
} catch {
case ex: Throwable =>
runMode match {
case "STREAM" => {
logger.error("Invalid config file", s"hdfsStream must be present for $runMode")
throw ex
}
case _ => {
logger.warn("Missing entry in config file",
s"hdfsStream is not present. Ignoring as it is not required in $runMode")
}
}
}
lazy val kafkaTopicInfo =
try {
workflowConfig.getConfig("kafkaTopicInfo")
} catch {
case ex: Throwable =>
dataSource match {
case "KAFKA" => {
logger.error("Invalid config file",
s"kafkaTopicInfo must be present for $dataSource")
throw ex
}
case _ => {
logger.warn("Missing entry in config file",
s"kafkaTopicInfo is not present. " +
s"Ignoring as it is not required for ${dataSource}")
}
}
}
lazy val hdfsFileBatch =
try {
workflowConfig.getConfig("hdfsFileBatch")
} catch {
case ex: Throwable => {
logger.warn("Missing entry in config file",
s"hdfsFileBatch is not present.")
}
}
lazy val httpServer =
try {
workflowConfig.getConfig("httpServer")
} catch {
case ex: Throwable => {
logger.warn("Missing entry in config file",
s"httpServer is not present.")
null
}
}
def fileFormat(inputName: String): String = {
var fileFormat = "text"
try {
val batchInfoList: ConfigList = hdfsFileBatch.asInstanceOf[Config].getList("batchInfo")
batchInfoList.asScala.foreach { batchConfig =>
val batchData = batchConfig.asInstanceOf[ConfigObject].toConfig
val name = batchData.getString("name")
if(name.equals(inputName)){
fileFormat = batchData.getString("fileFormat")
}
}
} catch {
case ex: Throwable => {
logger.warn(s"No file format present. Using default text")
}
}
fileFormat
}
}
| Verizon/trapezium | framework/src/main/scala/com/verizon/bda/trapezium/framework/manager/WorkflowConfig.scala | Scala | apache-2.0 | 7,941 |
package org.jetbrains.plugins.scala
package lang.refactoring.move
import java.lang.Boolean
import java.util
import com.intellij.psi.{PsiClass, PsiElement}
import com.intellij.refactoring.move.moveClassesOrPackages.MoveAllClassesInFileHandler
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.settings.ScalaApplicationSettings
/**
* Nikolay.Tropin
* 10/25/13
*/
class MoveScalaClassesInFileHandler extends MoveAllClassesInFileHandler {
def processMoveAllClassesInFile(allClasses: util.Map[PsiClass, Boolean],
psiClass: PsiClass,
elementsToMove: PsiElement*): Unit = {
psiClass.getContainingFile match {
case file: ScalaFile if ScalaApplicationSettings.getInstance().MOVE_COMPANION =>
val classesInFile = file.typeDefinitions.toSet
ScalaPsiUtil.getBaseCompanionModule(psiClass) match {
case Some(companion) if !elementsToMove.contains(companion) && classesInFile == Set(psiClass, companion) =>
allClasses.put(psiClass, true)
case _ =>
}
case file: ScalaFile if allClasses.get(psiClass) =>
//if move destination contains file with such name, we will try to move classes, not a whole file
val moveDestination = psiClass.getUserData(ScalaMoveUtil.MOVE_DESTINATION)
if (moveDestination.findFile(file.getName) != null)
allClasses.put(psiClass, false)
case _ =>
}
}
}
| LPTK/intellij-scala | src/org/jetbrains/plugins/scala/lang/refactoring/move/MoveScalaClassesInFileHandler.scala | Scala | apache-2.0 | 1,557 |
/*
* Part of NDLA article-api.
* Copyright (C) 2019 NDLA
*
* See LICENSE
*/
package no.ndla.articleapi.integration
import java.util.concurrent.Executors
import com.typesafe.scalalogging.LazyLogging
import no.ndla.articleapi.ArticleApiProperties.SearchHost
import no.ndla.articleapi.model.domain.{Article, ArticleType}
import no.ndla.articleapi.model.api.SearchException
import no.ndla.articleapi.service.ConverterService
import no.ndla.network.NdlaClient
import org.json4s.Formats
import org.json4s.ext.EnumNameSerializer
import org.json4s.native.Serialization.write
import scalaj.http.{Http, HttpRequest, HttpResponse}
import scala.concurrent.{ExecutionContext, ExecutionContextExecutorService, Future}
import scala.util.{Failure, Success, Try}
trait SearchApiClient {
this: NdlaClient with ConverterService =>
val searchApiClient: SearchApiClient
class SearchApiClient(SearchApiBaseUrl: String = s"http://$SearchHost") extends LazyLogging {
private val InternalEndpoint = s"$SearchApiBaseUrl/intern"
private val indexTimeout = 1000 * 30
def indexArticle(article: Article): Article = {
implicit val formats: Formats =
org.json4s.DefaultFormats +
new EnumNameSerializer(ArticleType)
implicit val executionContext: ExecutionContextExecutorService =
ExecutionContext.fromExecutorService(Executors.newSingleThreadExecutor)
val future = postWithData[Article, Article](s"$InternalEndpoint/article/", article)
future.onComplete {
case Success(Success(_)) =>
logger.info(s"Successfully indexed article with id: '${article.id
.getOrElse(-1)}' and revision '${article.revision.getOrElse(-1)}' in search-api")
case Failure(e) =>
logger.error(s"Failed to indexed article with id: '${article.id
.getOrElse(-1)}' and revision '${article.revision.getOrElse(-1)}' in search-api", e)
case Success(Failure(e)) =>
logger.error(s"Failed to indexed article with id: '${article.id
.getOrElse(-1)}' and revision '${article.revision.getOrElse(-1)}' in search-api", e)
}
article
}
private def postWithData[A, B <: AnyRef](endpointUrl: String, data: B, params: (String, String)*)(
implicit mf: Manifest[A],
format: org.json4s.Formats,
executionContext: ExecutionContext): Future[Try[A]] = {
Future {
ndlaClient.fetchWithForwardedAuth[A](
Http(endpointUrl)
.postData(write(data))
.timeout(indexTimeout, indexTimeout)
.method("POST")
.params(params.toMap)
.header("content-type", "application/json")
)
}
}
def deleteArticle(id: Long): Long = {
ndlaClient.fetchRawWithForwardedAuth(
Http(s"$InternalEndpoint/article/$id")
.timeout(indexTimeout, indexTimeout)
.method("DELETE")
)
id
}
}
}
| NDLANO/article-api | src/main/scala/no/ndla/articleapi/integration/SearchApiClient.scala | Scala | gpl-3.0 | 2,941 |
// code-examples/TypeLessDoMore/semicolon-example-script.scala
// Trailing equals sign indicates more code on next line
def equalsign = {
val reallySuperLongValueNameThatGoesOnForeverSoYouNeedANewLine =
"wow that was a long value name"
println(reallySuperLongValueNameThatGoesOnForeverSoYouNeedANewLine)
}
// Trailing opening curly brace indicates more code on next line
def equalsign2(s: String) = {
println("equalsign2: " + s)
}
// Trailing comma, operator, etc. indicates more code on next line
def commas(s1: String,
s2: String) = {
println("comma: " + s1 +
", " + s2)
}
| XClouded/t4f-core | scala/src/tmp/TypeLessDoMore/semicolon-example-script.scala | Scala | apache-2.0 | 613 |
object Bar
trait Foo {
@AnnotationWithClassType(cls = classOf[Bar.type])
def function: Any = ???
}
| scala/scala | test/files/pos/classOfObjectType/Foo.scala | Scala | apache-2.0 | 105 |
package dotty.tools.dotc
package transform
import core._
import Symbols._, Types._, Contexts._, SymDenotations._, DenotTransformers._, Flags._
import util.Positions._
import SymUtils._
import StdNames._, NameOps._
class MixinOps(cls: ClassSymbol, thisTransform: DenotTransformer)(implicit ctx: Context) {
import ast.tpd._
val superCls: Symbol = cls.superClass
val mixins: List[ClassSymbol] = cls.mixins
def implementation(member: TermSymbol): TermSymbol = {
val res = member.copy(
owner = cls,
name = member.name.stripScala2LocalSuffix,
flags = member.flags &~ Deferred,
info = cls.thisType.memberInfo(member)).enteredAfter(thisTransform).asTerm
res.addAnnotations(member.annotations)
res
}
def superRef(target: Symbol, pos: Position = cls.pos): Tree = {
val sup = if (target.isConstructor && !target.owner.is(Trait))
Super(This(cls), tpnme.EMPTY, true)
else
Super(This(cls), target.owner.name.asTypeName, false, target.owner)
//println(i"super ref $target on $sup")
ast.untpd.Select(sup.withPos(pos), target.name)
.withType(NamedType.withFixedSym(sup.tpe, target))
//sup.select(target)
}
/** Is `sym` a member of implementing class `cls`? */
def isCurrent(sym: Symbol) = cls.info.member(sym.name).hasAltWith(_.symbol == sym)
def needsForwarder(meth: Symbol): Boolean = {
def needsDisambiguation = !meth.allOverriddenSymbols.forall(_ is Deferred)
meth.is(Method, butNot = PrivateOrAccessorOrDeferred) &&
isCurrent(meth) &&
(needsDisambiguation || meth.owner.is(Scala2x))
}
final val PrivateOrAccessorOrDeferred = Private | Accessor | Deferred
def forwarder(target: Symbol) = (targs: List[Type]) => (vrefss: List[List[Tree]]) =>
superRef(target).appliedToTypes(targs).appliedToArgss(vrefss)
}
| folone/dotty | src/dotty/tools/dotc/transform/MixinOps.scala | Scala | bsd-3-clause | 1,821 |
package com.sksamuel.elastic4s.fields
import com.sksamuel.elastic4s.ext.OptionImplicits._
case class DynamicField(override val name: String,
analyzer: Option[String] = None,
boost: Option[Double] = None,
coerce: Option[Boolean] = None,
copyTo: Seq[String] = Nil,
docValues: Option[Boolean] = None,
enabled: Option[Boolean] = None,
fielddata: Option[Boolean] = None,
fields: List[ElasticField] = Nil,
format: Option[String] = None,
ignoreAbove: Option[Int] = None,
ignoreMalformed: Option[Boolean] = None,
index: Option[Boolean] = None,
indexOptions: Option[String] = None,
locale: Option[String] = None,
norms: Option[Boolean] = None,
nullValue: Option[String] = None,
scalingFactor: Option[Double] = None,
similarity: Option[String] = None,
store: Option[Boolean] = None,
termVector: Option[String] = None,
meta: Map[String, String] = Map.empty) extends ElasticField {
override def `type`: String = "{dynamic_type}"
def analyzer(name: String): DynamicField = copy(analyzer = Option(name))
def boost(boost: Double): DynamicField = copy(boost = boost.some)
def copyTo(copyTo: String*): DynamicField = copy(copyTo = copyTo.toList)
def copyTo(copyTo: Iterable[String]): DynamicField = copy(copyTo = copyTo.toList)
def fielddata(fielddata: Boolean): DynamicField = copy(fielddata = fielddata.some)
def fields(fields: ElasticField*): DynamicField = copy(fields = fields.toList)
def fields(fields: Iterable[ElasticField]): DynamicField = copy(fields = fields.toList)
def stored(store: Boolean): DynamicField = copy(store = store.some)
def index(index: Boolean): DynamicField = copy(index = index.some)
def indexOptions(indexOptions: String): DynamicField = copy(indexOptions = indexOptions.some)
def norms(norms: Boolean): DynamicField = copy(norms = norms.some)
def termVector(termVector: String): DynamicField = copy(termVector = termVector.some)
def store(store: Boolean): DynamicField = copy(store = store.some)
def similarity(similarity: String): DynamicField = copy(similarity = similarity.some)
}
| sksamuel/elastic4s | elastic4s-domain/src/main/scala/com/sksamuel/elastic4s/fields/DynamicField.scala | Scala | apache-2.0 | 2,534 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.conf
import java.io.InputStream
import com.typesafe.scalalogging.LazyLogging
import scala.util.control.NonFatal
import scala.util.{Failure, Try}
import scala.xml.XML
object ConfigLoader extends LazyLogging {
val GEOMESA_CONFIG_FILE_PROP = "geomesa.config.file"
val GEOMESA_CONFIG_FILE_NAME = "geomesa-site.xml"
private val EmbeddedConfigFile = "org/locationtech/geomesa/geomesa-site.xml.template"
lazy val Config: Map[String, (String, Boolean)] = {
val file = Option(System.getProperty(GEOMESA_CONFIG_FILE_PROP)).getOrElse(GEOMESA_CONFIG_FILE_NAME)
// load defaults first then overwrite with user values (if any)
loadConfig(EmbeddedConfigFile) ++ loadConfig(file)
}
def loadConfig(path: String): Map[String, (String, Boolean)] = {
val input = getClass.getClassLoader.getResourceAsStream(path)
val config: Map[String, (String, Boolean)] =
if (input == null) {
Map.empty
} else {
try {
logger.debug(s"Loading config: $path")
loadConfig(input, path)
} catch {
case NonFatal(e) =>
logger.warn(s"Error reading config file at: $path", e)
Map.empty
}
}
logger.trace(s"Loaded ${config.mkString(",")}")
config
}
def loadConfig(input: InputStream, path: String): Map[String, (String, Boolean)] = {
val xml = XML.load(input)
val properties = xml \\\\ "configuration" \\\\ "property"
properties.flatMap { prop =>
// Use try here so if we fail on a property the rest can still load
val pair = Try {
val key = (prop \\ "name").text
val value = (prop \\ "value").text
// don't overwrite properties, this gives commandline params preference
val isFinal: Boolean = (prop \\ "final").text.toString.toBoolean
key -> (value, isFinal)
}
pair match {
case Failure(e) => logger.warn(s"Unable to load property from: $path\\n$prop", e)
case _ => // no-op
}
pair.toOption.filter { case (_, (v, _)) => v != null && v.nonEmpty }
}.toMap
}
}
| ddseapy/geomesa | geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/conf/ConfigLoader.scala | Scala | apache-2.0 | 2,575 |
package com.cterm2.miniflags
import net.minecraft.world.WorldServer
import cpw.mods.fml.common.eventhandler._
import cpw.mods.fml.common.gameevent.PlayerEvent
import net.minecraft.entity.player.EntityPlayerMP
import cpw.mods.fml.relauncher.{SideOnly, Side}
import net.minecraftforge.common.DimensionManager
object FMLEvents
{
@SubscribeEvent
def onPlayerJoin(event: PlayerEvent.PlayerLoggedInEvent)
{
ModInstance.logger.info("Player logged in: Initial syncing...")
ObjectManager.instanceForWorld(event.player.worldObj) foreach (_.synchronizeAllLinks(event.player.asInstanceOf[EntityPlayerMP]))
}
@SubscribeEvent
def onPlayerChangedDimension(event: PlayerEvent.PlayerChangedDimensionEvent)
{
ModInstance.logger.info(s"Player Changed Dimension(${event.fromDim}->${event.toDim}): Resyncing...")
ObjectManager.instanceForWorld(event.player.worldObj) foreach (_.synchronizeAllLinks(event.player.asInstanceOf[EntityPlayerMP]))
}
}
| Pctg-x8/miniflags | src/FMLEvents.scala | Scala | lgpl-2.1 | 943 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct
import uk.gov.hmrc.ct.box.{Calculated, CtBoxIdentifier, CtInteger, NotInPdf}
import uk.gov.hmrc.ct.computations.calculations.ExpensesCalculator
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
case class CATO16(value: Int) extends CtBoxIdentifier(name = "General Administrative Expenses") with CtInteger with NotInPdf
object CATO16 extends Calculated[CATO16, ComputationsBoxRetriever] with ExpensesCalculator {
override def calculate(fieldValueRetriever: ComputationsBoxRetriever): CATO16 = {
calculateGeneralAdministrativeExpenses(cp25 = fieldValueRetriever.cp25(),
cp26 = fieldValueRetriever.cp26(),
cp27 = fieldValueRetriever.cp27(),
cp28 = fieldValueRetriever.cp28(),
cp29 = fieldValueRetriever.cp29(),
cp30 = fieldValueRetriever.cp30(),
cp31 = fieldValueRetriever.cp31(),
cp32 = fieldValueRetriever.cp32(),
cp33 = fieldValueRetriever.cp33(),
cp34 = fieldValueRetriever.cp34(),
cp35 = fieldValueRetriever.cp35(),
cp36 = fieldValueRetriever.cp36(),
cp37 = fieldValueRetriever.cp37())
}
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/CATO16.scala | Scala | apache-2.0 | 2,168 |
package org.bitcoins.core.script.interpreter.testprotocol
import org.bitcoins.core.protocol.script.{ScriptSignature, ScriptPubKey}
import org.bitcoins.core.script.constant.ScriptToken
/**
* Created by chris on 3/14/16.
*/
trait ScriptSignatureCoreTestCase {
/**
* The parsed asm representation for the core test case
* this will be different than the asm representation
* inside of scriptSignature
*
* @return
*/
def asm : Seq[ScriptToken]
/**
* This is the underlying scriptSignature that is parsed from the core test case
* this is needed because there is no ubiquitous formats for scriptSignatures
* inside of script_valid.json. Normal scriptSignatures have their asm representation
* parsed from the underlying hex/byte representation every time which won't work
* for core test cases.
*
* @return
*/
def scriptSignature : ScriptSignature
}
case class ScriptSignatureCoreTestCaseImpl(asm : Seq[ScriptToken], scriptSignature : ScriptSignature) extends ScriptSignatureCoreTestCase
| SuredBits/bitcoin-s-sidechains | src/test/scala/org/bitcoins/core/script/interpreter/testprotocol/ScriptSignatureCoreTestCase.scala | Scala | mit | 1,036 |
package com.twitter.finatra.http.integration.tweetexample.test
import com.fasterxml.jackson.databind.JsonNode
import com.twitter.finagle.http.Status
import com.twitter.finatra.http.integration.tweetexample.main.TweetsEndpointServer
import com.twitter.finatra.http.test.EmbeddedHttpServer
import com.twitter.inject.server.FeatureTest
import com.twitter.util.{Await, Future, FuturePool}
class TweetsControllerIntegrationTest extends FeatureTest {
override val server = new EmbeddedHttpServer(
new TweetsEndpointServer)
"get tweet 1" in {
val tweet =
server.httpGetJson[Map[String, Long]](
"/tweets/1",
headers = Map("X-UserId" -> "123"),
andExpect = Status.Ok)
tweet("idonly") should equal(1) //confirm response was transformed by registered TweetMessageBodyWriter
}
"post valid tweet" in {
server.httpPost(
"/tweets/",
"""
{
"custom_id": 5,
"username": "bob",
"tweet_msg": "hello"
}""",
headers = Map("X-UserId" -> "123"),
andExpect = Status.Ok,
withBody = "tweet with id 5 is valid")
}
"post tweet with missing field" in {
server.httpPost(
"/tweets/",
"""
{
"custom_id": 5,
"tweet_msg": "hello"
}""",
headers = Map("X-UserId" -> "123"),
andExpect = Status.BadRequest,
withErrors = Seq("username is a required field"))
}
"post tweet with field validation issue" in {
server.httpPost(
"/tweets/",
"""
{
"custom_id": 0,
"username": "foo",
"tweet_msg": "hello"
}""",
headers = Map("X-UserId" -> "123"),
andExpect = Status.BadRequest,
withErrors = Seq("custom_id [0] is not greater than or equal to 1"))
}
"post tweet with method validation issue" in {
server.httpPost(
"/tweets/",
"""
{
"custom_id": 5,
"username": "foo",
"tweet_msg": "hello"
}""",
headers = Map("X-UserId" -> "123"),
andExpect = Status.BadRequest,
withErrors = Seq("username cannot be foo"))
}
"get streaming json" in {
server.httpGet(
"/tweets/streaming_json",
headers = Map("X-UserId" -> "123"),
andExpect = Status.Ok,
withJsonBody =
"""
[
{
"id" : 1,
"user" : "Bob",
"msg" : "whats up"
},
{
"id" : 2,
"user" : "Sally",
"msg" : "yo"
},
{
"id" : 3,
"user" : "Fred",
"msg" : "hey"
}
]
""")
}
"get streaming custom toBuf" in {
server.httpGet(
"/tweets/streaming_custom_tobuf",
headers = Map("X-UserId" -> "123"),
andExpect = Status.Ok,
withBody = "ABC")
}
"get streaming manual writes" in {
server.httpGet(
"/tweets/streaming_manual_writes",
headers = Map("X-UserId" -> "123"),
andExpect = Status.Ok,
withBody = "helloworld")
}
"get admin yo" in {
server.httpGet(
"/admin/finatra/yo",
andExpect = Status.Ok,
withBody = "yo yo")
}
val pool = FuturePool.unboundedPool
"get hello in parallel" in {
Await.result {
Future.collect {
for (i <- 1 to 500) yield {
pool {
sayHello()
}
}
}
}
}
"get admin users" in {
server.httpGet(
"/admin/finatra/users/123",
withBody = "123 from data://prod, 123 from data://staging")
}
"get ping" in {
server.httpGet(
"/admin/ping",
withBody = "pong")
}
"get health" in {
server.httpGet(
"/health",
routeToAdminServer = true,
withBody = "OK\\n")
}
"verify max request size overridden" in {
val registry = server.httpGetJson[JsonNode](
"/admin/registry.json")
val maxRequestSize = registry.get("registry").get("flags").get("maxRequestSize").textValue()
maxRequestSize should equal("10485760.bytes")
}
def sayHello() = {
server.httpGet(
"/tweets/hello",
headers = Map("X-UserId" -> "123"),
andExpect = Status.Ok,
withBody = "hello world",
suppress = true)
}
}
| tempbottle/finatra | http/src/test/scala/com/twitter/finatra/http/integration/tweetexample/test/TweetsControllerIntegrationTest.scala | Scala | apache-2.0 | 4,260 |
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.zoo.pipeline.api.keras.layers
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, IdentityOutputShape}
import com.intel.analytics.bigdl.nn.keras.KerasLayer
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.Shape
import com.intel.analytics.zoo.pipeline.api.Net
import com.intel.analytics.zoo.pipeline.api.keras.layers.utils.KerasUtils
import scala.reflect.ClassTag
/**
* Applies the hard tanh function element-wise to the input.
*
* ⎧ maxValue, if x > maxValue
* f(x) = ⎨ minValue, if x < minValue
* ⎩ x, otherwise
*
* When you use this layer as the first layer of a model, you need to provide
* the argument inputShape (a Single Shape, does not include the batch dimension).
*
* Remark: This layer is from Torch and wrapped in Keras style.
*
* @param minValue The minimum threshold value. Default is -1.
* @param maxValue The maximum threshold value. Default is 1.
* @param inputShape A Single Shape, does not include the batch dimension.
* @tparam T The numeric type of parameter(e.g. weight, bias). Only support float/double now.
*/
class HardTanh[T: ClassTag](
val minValue: Double = -1,
val maxValue: Double = 1,
val inputShape: Shape = null)(implicit ev: TensorNumeric[T])
extends KerasLayer[Tensor[T], Tensor[T], T](KerasUtils.addBatch(inputShape))
with IdentityOutputShape with Net {
override def doBuild(inputShape: Shape): AbstractModule[Tensor[T], Tensor[T], T] = {
val layer = com.intel.analytics.bigdl.nn.HardTanh(minValue, maxValue)
layer.asInstanceOf[AbstractModule[Tensor[T], Tensor[T], T]]
}
}
object HardTanh {
def apply[@specialized(Float, Double) T: ClassTag](
minValue: Double = -1,
maxValue: Double = 1,
inputShape: Shape = null)(implicit ev: TensorNumeric[T]): HardTanh[T] = {
new HardTanh[T](minValue, maxValue, inputShape)
}
}
| intel-analytics/analytics-zoo | zoo/src/main/scala/com/intel/analytics/zoo/pipeline/api/keras/layers/HardTanh.scala | Scala | apache-2.0 | 2,606 |
package blitztags
import org.scalatest.FlatSpec
import org.scalatest.matchers.ShouldMatchers
import org.scalamock.scalatest.MockFactory
import scala.xml._
import blitztags.AddElementCommands._
class VoidElementSpec extends FlatSpec with ShouldMatchers with MockFactory with Attributes {
val Br = VoidElement("br")
"the AddVoidElement command" should "instruct the builder to add a new void element" in {
implicit val b = mock[XmlBuilder]
(b.addChild _) expects (<br/>)
Br()
}
it should "add attributes to the void element" in {
implicit val b = mock[XmlBuilder]
(b.addChild _) expects (<br class="big" id="first"/>)
Br('class -> "big", 'id -> "first")
}
}
class RawTextElementSpec extends FlatSpec with ShouldMatchers with MockFactory with Attributes {
val Script = RawTextElement("script")
"the AddRawTextElement command" should "instruct the builder to add a new raw text element" in {
implicit val b = mock[XmlBuilder]
(b.addChild _) expects (<script>console.log("Hello World");</script>)
Script {
"""console.log("Hello World");"""
}
}
it should "add attributes to the raw text element" in {
implicit val b = mock[XmlBuilder]
(b.addChild _) expects (<script src="js/main.js"></script>)
Script('src -> "js/main.js") {}
}
}
class NormalElementSpec extends FlatSpec with ShouldMatchers with MockFactory with Attributes {
val P = NormalElement("p")
val H1 = NormalElement("h1")
val H2 = NormalElement("h2")
"the AddNormalElement command" should "instruct the builder to add a new normal element" in {
implicit val b = mock[XmlBuilder]
(b.startElement _) expects (<p/>)
(b.addChild _) expects (new Text("Hello world"))
(b.endElement _) expects ()
P {
"Hello world"
}
}
it should "add attributes to the normal element" in {
implicit val b = mock[XmlBuilder]
(b.startElement _) expects (<p class="container" id="first"/>)
(b.endElement _) expects ()
P('class -> "container", 'id -> "first") {}
}
it should "add XML objects as child" in {
implicit val b = mock[XmlBuilder]
(b.startElement _) expects (<p/>)
(b.addChild _) expects (<b>the XML literal</b>)
(b.endElement _) expects ()
P {
<b>the XML literal</b>
}
}
it should "execute inner commands in appropriate order" in {
implicit val b = mock[XmlBuilder]
(b.startElement _) expects (<p/>)
(b.startElement _) expects (<h1/>)
(b.addChild _) expects (new Text("first"))
(b.endElement _) expects ()
(b.startElement _) expects (<h2/>)
(b.addChild _) expects (new Text("second"))
(b.endElement _) expects ()
(b.endElement _) expects ()
P {
H1 { "first" }
H2 { "second" }
}
}
it should "allow Unparsed xml nodes" in {
implicit val b = mock[XmlBuilder]
(b.startElement _) expects (<p/>)
(b.addChild _) expects (Unparsed("""<script>console.log("oops");</script>"""))
(b.endElement _) expects ()
P {
Unparsed("""<script>console.log("oops");</script>""")
}
}
it should "executes sequential commands in correct order" in {
implicit val b = mock[XmlBuilder]
(b.startElement _) expects (<p/>)
(b.endElement _) expects ()
(b.startElement _) expects (<p/>)
(b.endElement _) expects ()
P {}
P {}
}
}
class CommentNodeSpec extends FlatSpec with ShouldMatchers with MockFactory {
val Comment = CommentNode
"the AddComment command" should "instruct the builder to add a new comment" in {
implicit val b = mock[XmlBuilder]
(b.addChild _) expects (new Comment("invisible"))
Comment { "invisible" }
}
}
class TextNodeSpec extends FlatSpec with ShouldMatchers with MockFactory {
val T = TextNode
"the AddText command" should "instruct the builder to add a new text node" in {
implicit val b = mock[XmlBuilder]
(b.addChild _) expects (new Text("this is text"))
T { "this is text" }
}
} | Luegg/blitztags | src/test/scala/blitztags/AddElementCommandsSpec.scala | Scala | mit | 3,973 |
package slick.ast
import scala.collection.compat.immutable._
import scala.collection.mutable.ListBuffer
import scala.language.existentials
import scala.reflect.ClassTag
import slick.SlickException
import slick.util.{Dumpable, DumpInfo, GlobalConfig, ConstArray}
import TypeUtil._
/** A node in the Slick AST.
* Every Node has a number of child nodes and an optional type annotation. */
trait Node extends Dumpable {
type Self >: this.type <: Node
private[this] var seenType: Boolean = false
private var _type: Type = UnassignedType
/** All child nodes of this node. Must be implemented by subclasses. */
def children: ConstArray[Node]
/** Names for the child nodes to show in AST dumps. Defaults to a numbered sequence starting at 0
* but can be overridden by subclasses to produce more suitable names. */
def childNames: Iterable[String] = LazyList.from(0).map(_.toString)
/** Rebuild this node with a new list of children. Implementations of this method must not reuse
* the current node. This method always returns a fresh copy. */
protected[this] def rebuild(ch: ConstArray[Node]): Self
/** Build a copy of this node with the current children. */
protected[this] def buildCopy: Self = rebuild(children)
/** Rebuild this node with new child nodes unless all children are identical to the current ones,
* in which case this node is returned. */
final def withChildren(ch2: ConstArray[Node]): Self = {
val ch = children
val len = ch.length
var i = 0
while(i < len) {
if(ch(i) ne ch2(i)) return rebuild(ch2)
i += 1
}
this
}
/** Apply a mapping function to all children of this node and recreate the node with the new
* children. If all new children are identical to the old ones, this node is returned. If
* ``keepType`` is true, the type of this node is kept even when the children have changed. */
def mapChildren(f: Node => Node, keepType: Boolean = false): Self = {
val ch = children
val ch2 = ch.endoMap(f)
val n: Self = if(ch2 eq ch) this else rebuild(ch2)
if(!keepType || (_type eq UnassignedType)) n else (n :@ _type).asInstanceOf[Self]
}
/** Apply a side-effecting function to all direct children from left to right. Note that
* {{{ n.childrenForeach(f) }}} is equivalent to {{{ n.children.foreach(f) }}} but can be
* implemented more efficiently in `Node` subclasses. */
def childrenForeach[R](f: Node => R): Unit =
children.foreach(f)
/** The current type of this node. */
def nodeType: Type = {
seenType = true
_type
}
/** Get the current type of this node for debug output without marking it as seen. */
protected[this] def peekType: Type = _type
/** Check if this node has a type without marking the type as seen. */
def hasType: Boolean = _type != UnassignedType
/** Return this Node with no Type assigned (if it has not yet been observed) or an untyped copy. */
final def untyped: Self =
if(seenType || _type != UnassignedType) buildCopy else this
/** Return this Node with a Type assigned (if no other type has been seen for it yet) or a typed copy. */
final def :@ (newType: Type): Self = {
val n: Self = if(seenType && newType != _type) buildCopy else this
n._type = newType
n
}
/** Rebuild this node and all children with their computed type. If this node already has a type,
* the children are only type-checked again if ``typeChildren`` is true. if ``retype`` is also
* true, the existing type of this node is replaced. If this node does not yet have a type, the
* types of all children are computed first. */
final def infer(scope: Type.Scope = Map.empty, typeChildren: Boolean = false): Self =
if(hasType && !typeChildren) this else withInferredType(scope, typeChildren)
protected[this] def withInferredType(scope: Type.Scope, typeChildren: Boolean): Self
def getDumpInfo = {
val (objName, mainInfo) = this match {
case p: Product =>
val cln = DumpInfo.simpleNameFor(getClass)
val n = if(cln.endsWith("$")) cln.substring(0, cln.length - 1) else cln.replaceFirst(".*\\\\$", "")
val args = p.productIterator.filterNot(_.isInstanceOf[Node]).mkString(", ")
(n, args)
case _ => (super.toString, "")
}
val t = peekType
val ch = this match {
// Omit path details unless dumpPaths is set
case Path(l @ (_ :: _ :: _)) if !GlobalConfig.dumpPaths => Vector.empty
case _ => childNames.zip(children.toSeq).toVector
}
DumpInfo(objName, mainInfo, if(t != UnassignedType) ": " + t.toString else "", ch)
}
override final def toString = getDumpInfo.getNamePlusMainInfo
}
/** A Node which can be typed without access to its scope, and whose children can be typed
* independently of each other. */
trait SimplyTypedNode extends Node {
type Self >: this.type <: SimplyTypedNode
protected def buildType: Type
final def withInferredType(scope: Type.Scope, typeChildren: Boolean): Self = {
val this2: Self = mapChildren(_.infer(scope, typeChildren), keepType = true)
if(!hasType) (this2 :@ this2.buildType).asInstanceOf[Self] else this2
}
}
/** An expression that represents a conjunction of expressions. */
final case class ProductNode(children: ConstArray[Node]) extends SimplyTypedNode {
type Self = ProductNode
override def getDumpInfo = super.getDumpInfo.copy(name = "ProductNode", mainInfo = "")
protected[this] def rebuild(ch: ConstArray[Node]): Self = copy(ch)
override def childNames: Iterable[String] = LazyList.from(1).map(_.toString)
protected def buildType: Type = ProductType(children.map { ch =>
val t = ch.nodeType
if(t == UnassignedType) throw new SlickException(s"ProductNode child $ch has UnassignedType")
t
})
def flatten: ProductNode = {
def f(n: Node): ConstArray[Node] = n match {
case ProductNode(ns) => ns.flatMap(f)
case StructNode(els) => els.flatMap(el => f(el._2))
case n => ConstArray(n)
}
ProductNode(f(this))
}
}
/** An expression that represents a structure, i.e. a conjunction where the
* individual components have Symbols associated with them. */
final case class StructNode(elements: ConstArray[(TermSymbol, Node)]) extends SimplyTypedNode with DefNode {
type Self = StructNode
override def getDumpInfo = super.getDumpInfo.copy(name = "StructNode", mainInfo = "")
override def childNames = elements.map(_._1.toString).toSeq
val children = elements.map(_._2)
override protected[this] def rebuild(ch: ConstArray[Node]) =
new StructNode(elements.zip(ch).map{ case ((s,_),n) => (s,n) })
def generators = elements
protected[this] def rebuildWithSymbols(gen: ConstArray[TermSymbol]): Node =
copy(elements = elements.zip(gen).map { case (e, s) => (s, e._2) })
override protected def buildType: Type = StructType(elements.map { case (s, n) =>
val t = n.nodeType
if(t == UnassignedType) throw new SlickException(s"StructNode child $s has UnassignedType")
(s, t)
})
}
/** A literal value expression.
*
* @param volatileHint Indicates whether this value should be considered volatile, i.e. it
* contains user-generated data or may change in future executions of what
* is otherwise the same query. A database back-end should usually turn
* volatile constants into bind variables. */
class LiteralNode(val buildType: Type, val value: Any, val volatileHint: Boolean = false) extends NullaryNode with SimplyTypedNode {
type Self = LiteralNode
override def getDumpInfo = super.getDumpInfo.copy(name = "LiteralNode", mainInfo = s"$value (volatileHint=$volatileHint)")
protected[this] def rebuild = new LiteralNode(buildType, value, volatileHint)
override def hashCode = buildType.hashCode() + (if(value == null) 0 else value.asInstanceOf[AnyRef].hashCode)
override def equals(o: Any) = o match {
case l: LiteralNode => buildType == l.buildType && value == l.value
case _ => false
}
}
object LiteralNode {
def apply(tp: Type, v: Any, vol: Boolean = false): LiteralNode = new LiteralNode(tp, v, vol)
def apply[T](v: T)(implicit tp: ScalaBaseType[T]): LiteralNode = apply(tp, v)
def unapply(n: LiteralNode): Option[Any] = Some(n.value)
private[slick] val nullOption = LiteralNode(ScalaBaseType.nullType.optionType, None)
}
trait BinaryNode extends Node {
def left: Node
def right: Node
lazy val children = ConstArray(left, right)
protected[this] final def rebuild(ch: ConstArray[Node]): Self = rebuild(ch(0), ch(1))
protected[this] def rebuild(left: Node, right: Node): Self
override final def mapChildren(f: Node => Node, keepType: Boolean = false): Self = {
val l = left
val r = right
val l2 = f(l)
val r2 = f(r)
val n: Self = if((l eq l2) && (r eq r2)) this else rebuild(l2, r2)
val _type = peekType
if(!keepType || (_type eq UnassignedType)) n else (n :@ _type).asInstanceOf[Self]
}
override final protected[this] def buildCopy: Self = rebuild(left, right)
override final def childrenForeach[R](f: Node => R): Unit = {
f(left)
f(right)
}
}
trait UnaryNode extends Node {
def child: Node
lazy val children = ConstArray(child)
protected[this] final def rebuild(ch: ConstArray[Node]): Self = rebuild(ch(0))
protected[this] def rebuild(child: Node): Self
override final def mapChildren(f: Node => Node, keepType: Boolean = false): Self = {
val ch = child
val ch2 = f(child)
val n: Self = if(ch2 eq ch) this else rebuild(ch2)
val _type = peekType
if(!keepType || (_type eq UnassignedType)) n else (n :@ _type).asInstanceOf[Self]
}
override final protected[this] def buildCopy: Self = rebuild(child)
override final def childrenForeach[R](f: Node => R): Unit = f(child)
}
trait NullaryNode extends Node {
def children = ConstArray.empty
protected[this] final def rebuild(ch: ConstArray[Node]): Self = rebuild
protected[this] def rebuild: Self
override final def mapChildren(f: Node => Node, keepType: Boolean = false): Self = this
override final protected[this] def buildCopy: Self = rebuild
override final def childrenForeach[R](f: Node => R): Unit = ()
}
/** An expression that represents a plain value lifted into a Query. */
final case class Pure(value: Node, identity: TypeSymbol = new AnonTypeSymbol) extends UnaryNode with SimplyTypedNode with TypeGenerator {
type Self = Pure
def child = value
override def childNames = Seq("value")
protected[this] def rebuild(child: Node) = copy(child)
protected def buildType = CollectionType(TypedCollectionTypeConstructor.seq, NominalType(identity, value.nodeType))
}
final case class CollectionCast(child: Node, cons: CollectionTypeConstructor) extends UnaryNode with SimplyTypedNode {
type Self = CollectionCast
protected[this] def rebuild(child: Node) = copy(child = child)
protected def buildType = CollectionType(cons, child.nodeType.asCollectionType.elementType)
def nodeMapServerSide(keepType: Boolean, r: Node => Node) = mapChildren(r, keepType)
}
/** Forces a subquery to be created in `mergeToComprehension` if it occurs between two other
* collection-valued operations that would otherwise be fused, and the subquery condition
* is true. */
final case class Subquery(child: Node, condition: Subquery.Condition) extends UnaryNode with SimplyTypedNode {
type Self = Subquery
protected[this] def rebuild(child: Node) = copy(child = child)
protected def buildType = child.nodeType
}
object Subquery {
sealed trait Condition
/** Always create a subquery but allow purely aliasing projections to be pushed down */
case object Default extends Condition
/** A Subquery boundary below the mapping operation that adds a ROWNUM */
case object BelowRownum extends Condition
/** A Subquery boundary above the mapping operation that adds a ROWNUM */
case object AboveRownum extends Condition
/** A Subquery boundary below the mapping operation that adds a ROW_NUMBER */
case object BelowRowNumber extends Condition
/** A Subquery boundary above the mapping operation that adds a ROW_NUMBER */
case object AboveRowNumber extends Condition
/** A Subquery boundary above a DISTINCT without explicit column specification */
case object AboveDistinct extends Condition
}
/** Common superclass for expressions of type (CollectionType(c, t), _) => CollectionType(c, t). */
abstract class FilteredQuery extends Node {
type Self >: this.type <: FilteredQuery
def from: Node
}
/** A FilteredQuery without a Symbol. */
abstract class SimpleFilteredQuery extends FilteredQuery with SimplyTypedNode {
type Self >: this.type <: SimpleFilteredQuery
def buildType = from.nodeType
}
/** A FilteredQuery with a Symbol. */
abstract class ComplexFilteredQuery extends FilteredQuery with DefNode {
type Self >: this.type <: ComplexFilteredQuery
protected[this] def generator: TermSymbol
def generators = ConstArray((generator, from))
def withInferredType(scope: Type.Scope, typeChildren: Boolean): Self = {
val from2 = from.infer(scope, typeChildren)
val genScope = scope + (generator -> from2.nodeType.asCollectionType.elementType)
val this2 = mapChildren { ch =>
if(ch eq from) from2 else ch.infer(genScope, typeChildren)
}
(this2 :@ (if(!hasType) this2.from.nodeType else nodeType)).asInstanceOf[Self]
}
}
/** A .filter call of type (CollectionType(c, t), Boolean) => CollectionType(c, t). */
final case class Filter(generator: TermSymbol, from: Node, where: Node) extends ComplexFilteredQuery with BinaryNode {
type Self = Filter
def left = from
def right = where
override def childNames = Seq("from "+generator, "where")
protected[this] def rebuild(left: Node, right: Node) = copy(from = left, where = right)
protected[this] def rebuildWithSymbols(gen: ConstArray[TermSymbol]) = copy(generator = gen(0))
}
object Filter {
def ifRefutable(generator: TermSymbol, from: Node, where: Node): Node = where match {
case LiteralNode(true) => from
case _ => Filter(generator, from, where)
}
}
/** A .sortBy call of type (CollectionType(c, t), _) => CollectionType(c, t). */
final case class SortBy(generator: TermSymbol, from: Node, by: ConstArray[(Node, Ordering)]) extends ComplexFilteredQuery {
type Self = SortBy
lazy val children = from +: by.map(_._1)
protected[this] def rebuild(ch: ConstArray[Node]) =
copy(from = ch(0), by = by.zip(ch.tail).map{ case ((_, o), n) => (n, o) })
override def childNames = ("from "+generator) +: by.zipWithIndex.map("by" + _._2).toSeq
protected[this] def rebuildWithSymbols(gen: ConstArray[TermSymbol]) = copy(generator = gen(0))
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = by.map(_._2).mkString(", "))
}
final case class Ordering(direction: Ordering.Direction = Ordering.Asc, nulls: Ordering.NullOrdering = Ordering.NullsDefault) {
def asc = copy(direction = Ordering.Asc)
def desc = copy(direction = Ordering.Desc)
def reverse = copy(direction = direction.reverse)
def nullsDefault = copy(nulls = Ordering.NullsDefault)
def nullsFirst = copy(nulls = Ordering.NullsFirst)
def nullsLast = copy(nulls = Ordering.NullsLast)
}
object Ordering {
sealed abstract class NullOrdering(val first: Boolean, val last: Boolean)
case object NullsDefault extends NullOrdering(false, false)
case object NullsFirst extends NullOrdering(true, false)
case object NullsLast extends NullOrdering(false, true)
sealed abstract class Direction(val desc: Boolean) { def reverse: Direction }
case object Asc extends Direction(false) { def reverse = Desc }
case object Desc extends Direction(true) { def reverse = Asc }
}
/** A .groupBy call. */
final case class GroupBy(fromGen: TermSymbol, from: Node, by: Node, identity: TypeSymbol = new AnonTypeSymbol) extends BinaryNode with DefNode with TypeGenerator {
type Self = GroupBy
def left = from
def right = by
override def childNames = Seq("from "+fromGen, "by")
protected[this] def rebuild(left: Node, right: Node) = copy(from = left, by = right)
protected[this] def rebuildWithSymbols(gen: ConstArray[TermSymbol]) = copy(fromGen = gen(0))
def generators = ConstArray((fromGen, from))
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = identity.toString)
def withInferredType(scope: Type.Scope, typeChildren: Boolean): Self = {
val from2 = from.infer(scope, typeChildren)
val from2Type = from2.nodeType.asCollectionType
val by2 = by.infer(scope + (fromGen -> from2Type.elementType), typeChildren)
val this2 = if((from2 eq from) && (by2 eq by)) this else copy(from = from2, by = by2)
this2 :@ (
if(!hasType)
CollectionType(from2Type.cons, ProductType(ConstArray(NominalType(identity, by2.nodeType), CollectionType(TypedCollectionTypeConstructor.seq, from2Type.elementType))))
else nodeType)
}
}
/** A .forUpdate call */
final case class ForUpdate(generator: TermSymbol, from: Node) extends ComplexFilteredQuery {
type Self = ForUpdate
lazy val children = ConstArray(from)
protected[this] def rebuild(ch: ConstArray[Node]) = copy(from = ch(0))
protected[this] def rebuildWithSymbols(gen: ConstArray[TermSymbol]) = copy(generator = gen(0))
}
/** A .take call. */
final case class Take(from: Node, count: Node) extends SimpleFilteredQuery with BinaryNode {
type Self = Take
def left = from
def right = count
override def childNames = Seq("from", "count")
protected[this] def rebuild(left: Node, right: Node) = copy(from = left, count = right)
}
/** A .drop call. */
final case class Drop(from: Node, count: Node) extends SimpleFilteredQuery with BinaryNode {
type Self = Drop
def left = from
def right = count
override def childNames = Seq("from", "count")
protected[this] def rebuild(left: Node, right: Node) = copy(from = left, count = right)
}
/** A .distinct call of type (CollectionType(c, t), _) => CollectionType(c, t). */
final case class Distinct(generator: TermSymbol, from: Node, on: Node) extends ComplexFilteredQuery with BinaryNode {
type Self = Distinct
def left = from
def right = on
override def childNames = Seq("from", "on")
protected[this] def rebuild(left: Node, right: Node) = copy(from = left, on = right)
protected[this] def rebuildWithSymbols(gen: ConstArray[TermSymbol]) = copy(generator = gen(0))
}
/** A join expression. For joins without option extension, the type rule is
* (CollectionType(c, t), CollectionType(_, u)) => CollecionType(c, (t, u)).
* Option-extended left outer joins are typed as
* (CollectionType(c, t), CollectionType(_, u)) => CollecionType(c, (t, Option(u))),
* Option-extended right outer joins as
* (CollectionType(c, t), CollectionType(_, u)) => CollecionType(c, (Option(t), u))
* and Option-extended full outer joins as
* (CollectionType(c, t), CollectionType(_, u)) => CollecionType(c, (Option(t), Option(u))). */
final case class Join(leftGen: TermSymbol, rightGen: TermSymbol, left: Node, right: Node, jt: JoinType, on: Node) extends DefNode {
type Self = Join
lazy val children = ConstArray(left, right, on)
protected[this] def rebuild(ch: ConstArray[Node]) = copy(left = ch(0), right = ch(1), on = ch(2))
override def childNames = Seq("left "+leftGen, "right "+rightGen, "on")
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = jt.toString)
def generators = ConstArray((leftGen, left), (rightGen, right))
protected[this] def rebuildWithSymbols(gen: ConstArray[TermSymbol]) =
copy(leftGen = gen(0), rightGen = gen(1))
def withInferredType(scope: Type.Scope, typeChildren: Boolean): Self = {
val left2 = left.infer(scope, typeChildren)
val right2 = right.infer(scope, typeChildren)
val left2Type = left2.nodeType.asCollectionType
val right2Type = right2.nodeType.asCollectionType
val on2 = on.infer(scope + (leftGen -> left2Type.elementType) + (rightGen -> right2Type.elementType), typeChildren)
val (joinedLeftType, joinedRightType) = jt match {
case JoinType.LeftOption => (left2Type.elementType, OptionType(right2Type.elementType))
case JoinType.RightOption => (OptionType(left2Type.elementType), right2Type.elementType)
case JoinType.OuterOption => (OptionType(left2Type.elementType), OptionType(right2Type.elementType))
case _ => (left2Type.elementType, right2Type.elementType)
}
withChildren(ConstArray[Node](left2, right2, on2)) :@ (
if(!hasType) CollectionType(left2Type.cons, ProductType(ConstArray(joinedLeftType, joinedRightType)))
else nodeType)
}
}
/** A union of type
* (CollectionType(c, t), CollectionType(_, t)) => CollectionType(c, t). */
final case class Union(left: Node, right: Node, all: Boolean) extends BinaryNode with SimplyTypedNode {
type Self = Union
protected[this] def rebuild(left: Node, right: Node) = copy(left = left, right = right)
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = if(all) "all" else "")
override def childNames = Seq("left", "right")
protected def buildType = left.nodeType
}
/** A .flatMap call of type
* (CollectionType(c, _), CollectionType(_, u)) => CollectionType(c, u). */
final case class Bind(generator: TermSymbol, from: Node, select: Node) extends BinaryNode with DefNode {
type Self = Bind
def left = from
def right = select
override def childNames = Seq("from "+generator, "select")
protected[this] def rebuild(left: Node, right: Node) = copy(from = left, select = right)
def generators = ConstArray((generator, from))
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = "")
protected[this] def rebuildWithSymbols(gen: ConstArray[TermSymbol]) = copy(generator = gen(0))
def withInferredType(scope: Type.Scope, typeChildren: Boolean): Self = {
val from2 = from.infer(scope, typeChildren)
val from2Type = from2.nodeType.asCollectionType
val select2 = select.infer(scope + (generator -> from2Type.elementType), typeChildren)
val withCh = if((from2 eq from) && (select2 eq select)) this else rebuild(from2, select2)
withCh :@ (
if(!hasType) CollectionType(from2Type.cons, select2.nodeType.asCollectionType.elementType)
else nodeType)
}
}
/** An aggregation function application which is similar to a Bind(_, _, Pure(_)) where the
* projection contains a mapping function application. The return type is an aggregated
* scalar value though, not a collection. */
final case class Aggregate(sym: TermSymbol, from: Node, select: Node) extends BinaryNode with DefNode {
type Self = Aggregate
def left = from
def right = select
override def childNames = Seq("from "+sym, "select")
protected[this] def rebuild(left: Node, right: Node) = copy(from = left, select = right)
def generators = ConstArray((sym, from))
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = "")
protected[this] def rebuildWithSymbols(gen: ConstArray[TermSymbol]) = copy(sym = gen(0))
def withInferredType(scope: Type.Scope, typeChildren: Boolean): Self = {
val from2 :@ CollectionType(_, el) = from.infer(scope, typeChildren)
val select2 = select.infer(scope + (sym -> el), typeChildren)
val this2 = if((from2 eq from) && (select2 eq select)) this else copy(from = from2, select = select2)
this2 :@ (if(!hasType) select2.nodeType else nodeType)
}
}
/** A table together with its expansion into columns. */
final case class TableExpansion(generator: TermSymbol, table: Node, columns: Node) extends BinaryNode with DefNode {
type Self = TableExpansion
def left = table
def right = columns
override def childNames = Seq("table "+generator, "columns")
protected[this] def rebuild(left: Node, right: Node) = copy(table = left, columns = right)
def generators = ConstArray((generator, table))
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = "")
protected[this] def rebuildWithSymbols(gen: ConstArray[TermSymbol]) = copy(generator = gen(0))
def withInferredType(scope: Type.Scope, typeChildren: Boolean): Self = {
val table2 = table.infer(scope, typeChildren)
val columns2 = columns.infer(scope + (generator -> table2.nodeType.asCollectionType.elementType), typeChildren)
val this2 = if((table2 eq table) && (columns2 eq columns)) this else copy(table = table2, columns = columns2)
this2 :@ (if(!hasType) table2.nodeType else nodeType)
}
}
trait PathElement extends Node {
def sym: TermSymbol
def pathString: String
def untypedPath: PathElement
}
/** An expression that selects a field in another expression. */
final case class Select(in: Node, field: TermSymbol) extends PathElement with UnaryNode with SimplyTypedNode {
def sym = field
type Self = Select
def child = in
override def childNames = Seq("in")
protected[this] def rebuild(child: Node) = copy(in = child)
override def getDumpInfo = Path.unapply(this) match {
case Some(l) => super.getDumpInfo.copy(name = "Path", mainInfo = l.reverseIterator.mkString("."))
case None => super.getDumpInfo
}
protected def buildType = in.nodeType.select(field)
def pathString = in.asInstanceOf[PathElement].pathString+"."+field
def untypedPath = {
val in2 = in.asInstanceOf[PathElement].untypedPath
if(in2 eq in) untyped else Select(in2, field)
}
}
/** A function call expression. */
final case class Apply(sym: TermSymbol, children: ConstArray[Node])(val buildType: Type) extends SimplyTypedNode {
type Self = Apply
protected[this] def rebuild(ch: ConstArray[slick.ast.Node]) = copy(children = ch)(buildType)
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = sym.toString)
}
/** A reference to a Symbol */
final case class Ref(sym: TermSymbol) extends PathElement with NullaryNode {
type Self = Ref
def withInferredType(scope: Type.Scope, typeChildren: Boolean): Self =
if(hasType) this else {
scope.get(sym) match {
case Some(t) => this :@ t
case _ => throw new SlickException("No type for symbol "+sym+" found for "+this)
}
}
def rebuild = copy()
def pathString = sym.toString
def untypedPath = untyped
}
/** A constructor/extractor for nested Selects starting at a Ref so that, for example,
* `c :: b :: a :: Nil` corresponds to path `a.b.c`. */
object Path {
def apply(l: List[TermSymbol]): PathElement = l match {
case s :: Nil => Ref(s)
case s :: l => Select(apply(l), s)
case _ => throw new SlickException("Empty Path")
}
def unapply(n: PathElement): Option[List[TermSymbol]] = {
var l = new ListBuffer[TermSymbol]
var el: Node = n
while(el.isInstanceOf[Select]) {
val sel = el.asInstanceOf[Select]
l += sel.sym
el = sel.child
}
el match {
case Ref(sym) =>
l += sym
Some(l.toList)
case _ => None
}
}
def toString(path: Seq[TermSymbol]): String = path.reverseIterator.mkString("Path ", ".", "")
def toString(s: Select): String = s match {
case Path(syms) => toString(syms)
case n => n.toString
}
}
/** A constructor/extractor for nested Selects starting at a Ref so that, for example,
* `a :: b :: c :: Nil` corresponds to path `a.b.c`. */
object FwdPath {
def apply(ch: List[TermSymbol]): PathElement = {
var p: PathElement = Ref(ch.head)
ch.tail.foreach { sym => p = Select(p, sym) }
p
}
def unapply(n: PathElement): Option[List[TermSymbol]] = {
var l: List[TermSymbol] = Nil
var el: Node = n
while(el.isInstanceOf[Select]) {
val sel = el.asInstanceOf[Select]
l = sel.sym :: l
el = sel.child
}
el match {
case Ref(sym) => Some(sym :: l)
case _ => None
}
}
def toString(path: Seq[TermSymbol]): String = path.mkString("Path ", ".", "")
}
/** A Node representing a database table. */
final case class TableNode(schemaName: Option[String], tableName: String, identity: TableIdentitySymbol, baseIdentity: TableIdentitySymbol)(val profileTable: Any) extends NullaryNode with SimplyTypedNode with TypeGenerator {
type Self = TableNode
def buildType = CollectionType(TypedCollectionTypeConstructor.seq, NominalType(identity, UnassignedType))
def rebuild = copy()(profileTable)
override def getDumpInfo = super.getDumpInfo.copy(name = "Table", mainInfo = schemaName.map(_ + ".").getOrElse("") + tableName)
}
/** A node that represents an SQL sequence. */
final case class SequenceNode(name: String)(val increment: Long) extends NullaryNode with SimplyTypedNode {
type Self = SequenceNode
def buildType = ScalaBaseType.longType
def rebuild = copy()(increment)
}
/** A Query of this special Node represents an infinite stream of consecutive
* numbers starting at the given number. This is used as an operand for
* zipWithIndex. It is not exposed directly in the query language because it
* cannot be represented in SQL outside of a 'zip' operation. */
final case class RangeFrom(start: Long = 1L) extends NullaryNode with SimplyTypedNode {
type Self = RangeFrom
def buildType = CollectionType(TypedCollectionTypeConstructor.seq, ScalaBaseType.longType)
def rebuild = copy()
}
/** A conditional expression; The clauses should be: `(if then)+ else`.
* The result type is taken from the first `then` (i.e. the second clause). */
final case class IfThenElse(clauses: ConstArray[Node]) extends SimplyTypedNode {
type Self = IfThenElse
def children = clauses
override def childNames = (0 until clauses.length-1).map { i => if(i%2 == 0) "if" else "then" } :+ "else"
protected[this] def rebuild(ch: ConstArray[Node]): Self = copy(clauses = ch)
protected def buildType = clauses(1).nodeType
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = "")
private[this] def mapClauses(f: Node => Node, keepType: Boolean, pred: Int => Boolean): IfThenElse = {
var equal = true
val mapped = clauses.zipWithIndex.map { case (n, i) =>
val n2 = if(pred(i)) f(n) else n
if(n2 ne n) equal = false
n2
}
val this2 = if(equal) this else rebuild(mapped)
if(peekType == UnassignedType || !keepType) this2 else this2 :@ peekType
}
def mapConditionClauses(f: Node => Node, keepType: Boolean = false) =
mapClauses(f, keepType, (i => i%2 == 0 && i != clauses.length-1))
def mapResultClauses(f: Node => Node, keepType: Boolean = false) =
mapClauses(f, keepType, (i => i%2 == 1 || i == clauses.length-1))
def ifThenClauses: Iterator[(Node, Node)] =
clauses.iterator.grouped(2).withPartial(false).map { case Seq(i, t) => (i, t) }
def elseClause = clauses.last
}
/** Lift a value into an Option as Some (or None if the value is a `null` column). */
final case class OptionApply(child: Node) extends UnaryNode with SimplyTypedNode {
type Self = OptionApply
protected[this] def rebuild(ch: Node) = copy(child = ch)
protected def buildType = OptionType(child.nodeType)
}
/** The catamorphism of OptionType. */
final case class OptionFold(from: Node, ifEmpty: Node, map: Node, gen: TermSymbol) extends DefNode {
type Self = OptionFold
lazy val children = ConstArray(from, ifEmpty, map)
def generators = ConstArray((gen, from))
override def childNames = Vector("from "+gen, "ifEmpty", "map")
protected[this] def rebuild(ch: ConstArray[Node]) = copy(ch(0), ch(1), ch(2))
protected[this] def rebuildWithSymbols(gen: ConstArray[TermSymbol]) = copy(gen = gen(0))
protected[this] def withInferredType(scope: Type.Scope, typeChildren: Boolean) = {
val from2 = from.infer(scope, typeChildren)
val ifEmpty2 = ifEmpty.infer(scope, typeChildren)
val genScope = scope + (gen -> from2.nodeType.structural.asOptionType.elementType)
val map2 = map.infer(genScope, typeChildren)
withChildren(ConstArray[Node](from2, ifEmpty2, map2)) :@ (if(!hasType) map2.nodeType else nodeType)
}
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = "")
}
final case class GetOrElse(child: Node, default: () => Any) extends UnaryNode with SimplyTypedNode {
type Self = GetOrElse
protected[this] def rebuild(ch: Node) = copy(child = ch)
protected def buildType = child.nodeType.structural.asOptionType.elementType
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = "")
}
/** A compiled statement with a fixed type, a statement string and profile-specific extra data. */
final case class CompiledStatement(statement: String, extra: Any, buildType: Type) extends NullaryNode with SimplyTypedNode {
type Self = CompiledStatement
def rebuild = copy()
override def getDumpInfo =
super.getDumpInfo.copy(mainInfo = if(statement contains '\\n') statement else ("\\"" + statement + "\\""))
}
/** A client-side type mapping */
final case class TypeMapping(child: Node, mapper: MappedScalaType.Mapper, classTag: ClassTag[_]) extends UnaryNode with SimplyTypedNode { self =>
type Self = TypeMapping
def rebuild(ch: Node) = copy(child = ch)
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = "")
protected def buildType = new MappedScalaType(child.nodeType, mapper, classTag)
}
/** Rebuild an Option type on the client side */
final case class RebuildOption(discriminator: Node, data: Node) extends BinaryNode with SimplyTypedNode { self =>
type Self = RebuildOption
def left = discriminator
def right = data
def rebuild(left: Node, right: Node) = copy(left, right)
protected def buildType = OptionType(data.nodeType)
}
/** A parameter from a QueryTemplate which gets turned into a bind variable. */
final case class QueryParameter(extractor: (Any => Any), buildType: Type, id: TermSymbol = new AnonSymbol) extends NullaryNode with SimplyTypedNode {
type Self = QueryParameter
def rebuild = copy()
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = s"$id $extractor")
}
object QueryParameter {
import TypeUtil._
/** Create a LiteralNode or QueryParameter that performs a client-side computation
* on two primitive values. The given Nodes must also be of type `LiteralNode` or
* `QueryParameter`. */
def constOp[T](name: String)(op: (T, T) => T)(l: Node, r: Node)(implicit tpe: ScalaBaseType[T]): Node = (l, r) match {
case (LiteralNode(lv) :@ (lt: TypedType[_]), LiteralNode(rv) :@ (rt: TypedType[_])) if lt.scalaType == tpe && rt.scalaType == tpe => LiteralNode[T](op(lv.asInstanceOf[T], rv.asInstanceOf[T])).infer()
case (LiteralNode(lv) :@ (lt: TypedType[_]), QueryParameter(re, rt: TypedType[_], _)) if lt.scalaType == tpe && rt.scalaType == tpe =>
QueryParameter(new (Any => T) {
def apply(param: Any) = op(lv.asInstanceOf[T], re(param).asInstanceOf[T])
override def toString = s"($lv $name $re)"
}, tpe)
case (QueryParameter(le, lt: TypedType[_], _), LiteralNode(rv) :@ (rt: TypedType[_])) if lt.scalaType == tpe && rt.scalaType == tpe =>
QueryParameter(new (Any => T) {
def apply(param: Any) = op(le(param).asInstanceOf[T], rv.asInstanceOf[T])
override def toString = s"($le $name $rv)"
}, tpe)
case (QueryParameter(le, lt: TypedType[_], _), QueryParameter(re, rt: TypedType[_], _)) if lt.scalaType == tpe && rt.scalaType == tpe =>
QueryParameter(new (Any => T) {
def apply(param: Any) = op(le(param).asInstanceOf[T], re(param).asInstanceOf[T])
override def toString = s"($le $name $re)"
}, tpe)
case _ => throw new SlickException(s"Cannot fuse nodes $l, $r as constant operations of type $tpe")
}
}
| slick/slick | slick/src/main/scala/slick/ast/Node.scala | Scala | bsd-2-clause | 35,340 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import java.io._
import java.nio.charset.StandardCharsets
import java.util.concurrent.CountDownLatch
import java.util.zip.GZIPInputStream
import scala.collection.mutable.HashSet
import scala.reflect._
import com.google.common.io.Files
import org.apache.commons.io.IOUtils
import org.apache.log4j.{Appender, Level, Logger}
import org.apache.log4j.spi.LoggingEvent
import org.mockito.ArgumentCaptor
import org.mockito.Mockito.{atLeast, mock, verify}
import org.scalatest.BeforeAndAfter
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.internal.{config, Logging}
import org.apache.spark.util.logging.{FileAppender, RollingFileAppender, SizeBasedRollingPolicy, TimeBasedRollingPolicy}
class FileAppenderSuite extends SparkFunSuite with BeforeAndAfter with Logging {
val testFile = new File(Utils.createTempDir(), "FileAppenderSuite-test").getAbsoluteFile
before {
cleanup()
}
after {
cleanup()
}
test("basic file appender") {
val testString = (1 to 1000).mkString(", ")
val inputStream = new ByteArrayInputStream(testString.getBytes(StandardCharsets.UTF_8))
// The `header` should not be covered
val header = "Add header"
Files.write(header, testFile, StandardCharsets.UTF_8)
val appender = new FileAppender(inputStream, testFile)
inputStream.close()
appender.awaitTermination()
assert(Files.toString(testFile, StandardCharsets.UTF_8) === header + testString)
}
test("rolling file appender - time-based rolling") {
// setup input stream and appender
val testOutputStream = new PipedOutputStream()
val testInputStream = new PipedInputStream(testOutputStream, 100 * 1000)
val rolloverIntervalMillis = 100
val durationMillis = 1000
val numRollovers = durationMillis / rolloverIntervalMillis
val textToAppend = (1 to numRollovers).map( _.toString * 10 )
val appender = new RollingFileAppender(testInputStream, testFile,
new TimeBasedRollingPolicy(rolloverIntervalMillis, s"--HH-mm-ss-SSSS", false),
new SparkConf(), 10)
testRolling(appender, testOutputStream, textToAppend, rolloverIntervalMillis)
}
test("rolling file appender - time-based rolling (compressed)") {
// setup input stream and appender
val testOutputStream = new PipedOutputStream()
val testInputStream = new PipedInputStream(testOutputStream, 100 * 1000)
val rolloverIntervalMillis = 100
val durationMillis = 1000
val numRollovers = durationMillis / rolloverIntervalMillis
val textToAppend = (1 to numRollovers).map( _.toString * 10 )
val sparkConf = new SparkConf()
sparkConf.set("spark.executor.logs.rolling.enableCompression", "true")
val appender = new RollingFileAppender(testInputStream, testFile,
new TimeBasedRollingPolicy(rolloverIntervalMillis, s"--HH-mm-ss-SSSS", false),
sparkConf, 10)
testRolling(
appender, testOutputStream, textToAppend, rolloverIntervalMillis, isCompressed = true)
}
test("rolling file appender - size-based rolling") {
// setup input stream and appender
val testOutputStream = new PipedOutputStream()
val testInputStream = new PipedInputStream(testOutputStream, 100 * 1000)
val rolloverSize = 1000
val textToAppend = (1 to 3).map( _.toString * 1000 )
val appender = new RollingFileAppender(testInputStream, testFile,
new SizeBasedRollingPolicy(rolloverSize, false), new SparkConf(), 99)
val files = testRolling(appender, testOutputStream, textToAppend, 0)
files.foreach { file =>
logInfo(file.toString + ": " + file.length + " bytes")
assert(file.length <= rolloverSize)
}
}
test("rolling file appender - size-based rolling (compressed)") {
// setup input stream and appender
val testOutputStream = new PipedOutputStream()
val testInputStream = new PipedInputStream(testOutputStream, 100 * 1000)
val rolloverSize = 1000
val textToAppend = (1 to 3).map( _.toString * 1000 )
val sparkConf = new SparkConf()
sparkConf.set("spark.executor.logs.rolling.enableCompression", "true")
val appender = new RollingFileAppender(testInputStream, testFile,
new SizeBasedRollingPolicy(rolloverSize, false), sparkConf, 99)
val files = testRolling(appender, testOutputStream, textToAppend, 0, isCompressed = true)
files.foreach { file =>
logInfo(file.toString + ": " + file.length + " bytes")
assert(file.length <= rolloverSize)
}
}
test("rolling file appender - cleaning") {
// setup input stream and appender
val testOutputStream = new PipedOutputStream()
val testInputStream = new PipedInputStream(testOutputStream, 100 * 1000)
val conf = new SparkConf().set(config.EXECUTOR_LOGS_ROLLING_MAX_RETAINED_FILES, 10)
val appender = new RollingFileAppender(testInputStream, testFile,
new SizeBasedRollingPolicy(1000, false), conf, 10)
// send data to appender through the input stream, and wait for the data to be written
val allGeneratedFiles = new HashSet[String]()
val items = (1 to 10).map { _.toString * 10000 }
for (i <- 0 until items.size) {
testOutputStream.write(items(i).getBytes(StandardCharsets.UTF_8))
testOutputStream.flush()
allGeneratedFiles ++= RollingFileAppender.getSortedRolledOverFiles(
testFile.getParentFile.toString, testFile.getName).map(_.toString)
Thread.sleep(10)
}
testOutputStream.close()
appender.awaitTermination()
logInfo("Appender closed")
// verify whether the earliest file has been deleted
val rolledOverFiles = allGeneratedFiles.filter { _ != testFile.toString }.toArray.sorted
logInfo(s"All rolled over files generated:${rolledOverFiles.size}\\n" +
rolledOverFiles.mkString("\\n"))
assert(rolledOverFiles.size > 2)
val earliestRolledOverFile = rolledOverFiles.head
val existingRolledOverFiles = RollingFileAppender.getSortedRolledOverFiles(
testFile.getParentFile.toString, testFile.getName).map(_.toString)
logInfo("Existing rolled over files:\\n" + existingRolledOverFiles.mkString("\\n"))
assert(!existingRolledOverFiles.toSet.contains(earliestRolledOverFile))
}
test("file appender selection") {
// Test whether FileAppender.apply() returns the right type of the FileAppender based
// on SparkConf settings.
def testAppenderSelection[ExpectedAppender: ClassTag, ExpectedRollingPolicy](
properties: Seq[(String, String)], expectedRollingPolicyParam: Long = -1): Unit = {
// Set spark conf properties
val conf = new SparkConf
properties.foreach { p =>
conf.set(p._1, p._2)
}
// Create and test file appender
val testOutputStream = new PipedOutputStream()
val testInputStream = new PipedInputStream(testOutputStream)
val appender = FileAppender(testInputStream, testFile, conf)
// assert(appender.getClass === classTag[ExpectedAppender].getClass)
assert(appender.getClass.getSimpleName ===
classTag[ExpectedAppender].runtimeClass.getSimpleName)
if (appender.isInstanceOf[RollingFileAppender]) {
val rollingPolicy = appender.asInstanceOf[RollingFileAppender].rollingPolicy
val policyParam = if (rollingPolicy.isInstanceOf[TimeBasedRollingPolicy]) {
rollingPolicy.asInstanceOf[TimeBasedRollingPolicy].rolloverIntervalMillis
} else {
rollingPolicy.asInstanceOf[SizeBasedRollingPolicy].rolloverSizeBytes
}
assert(policyParam === expectedRollingPolicyParam)
}
testOutputStream.close()
appender.awaitTermination()
}
def rollingStrategy(strategy: String): Seq[(String, String)] =
Seq(config.EXECUTOR_LOGS_ROLLING_STRATEGY.key -> strategy)
def rollingSize(size: String): Seq[(String, String)] =
Seq(config.EXECUTOR_LOGS_ROLLING_MAX_SIZE.key -> size)
def rollingInterval(interval: String): Seq[(String, String)] =
Seq(config.EXECUTOR_LOGS_ROLLING_TIME_INTERVAL.key -> interval)
val msInDay = 24 * 60 * 60 * 1000L
val msInHour = 60 * 60 * 1000L
val msInMinute = 60 * 1000L
// test no strategy -> no rolling
testAppenderSelection[FileAppender, Any](Seq.empty)
// test time based rolling strategy
testAppenderSelection[RollingFileAppender, Any](rollingStrategy("time"), msInDay)
testAppenderSelection[RollingFileAppender, TimeBasedRollingPolicy](
rollingStrategy("time") ++ rollingInterval("daily"), msInDay)
testAppenderSelection[RollingFileAppender, TimeBasedRollingPolicy](
rollingStrategy("time") ++ rollingInterval("hourly"), msInHour)
testAppenderSelection[RollingFileAppender, TimeBasedRollingPolicy](
rollingStrategy("time") ++ rollingInterval("minutely"), msInMinute)
testAppenderSelection[RollingFileAppender, TimeBasedRollingPolicy](
rollingStrategy("time") ++ rollingInterval("123456789"), 123456789 * 1000L)
testAppenderSelection[FileAppender, Any](
rollingStrategy("time") ++ rollingInterval("xyz"))
// test size based rolling strategy
testAppenderSelection[RollingFileAppender, SizeBasedRollingPolicy](
rollingStrategy("size") ++ rollingSize("123456789"), 123456789)
testAppenderSelection[FileAppender, Any](rollingSize("xyz"))
// test illegal strategy
testAppenderSelection[FileAppender, Any](rollingStrategy("xyz"))
}
test("file appender async close stream abruptly") {
// Test FileAppender reaction to closing InputStream using a mock logging appender
val mockAppender = mock(classOf[Appender])
val loggingEventCaptor = ArgumentCaptor.forClass(classOf[LoggingEvent])
// Make sure only logging errors
val logger = Logger.getRootLogger
val oldLogLevel = logger.getLevel
logger.setLevel(Level.ERROR)
try {
logger.addAppender(mockAppender)
val testOutputStream = new PipedOutputStream()
val testInputStream = new PipedInputStream(testOutputStream)
// Close the stream before appender tries to read will cause an IOException
testInputStream.close()
testOutputStream.close()
val appender = FileAppender(testInputStream, testFile, new SparkConf)
appender.awaitTermination()
// If InputStream was closed without first stopping the appender, an exception will be logged
verify(mockAppender, atLeast(1)).doAppend(loggingEventCaptor.capture)
val loggingEvent = loggingEventCaptor.getValue
assert(loggingEvent.getThrowableInformation !== null)
assert(loggingEvent.getThrowableInformation.getThrowable.isInstanceOf[IOException])
} finally {
logger.setLevel(oldLogLevel)
}
}
test("file appender async close stream gracefully") {
// Test FileAppender reaction to closing InputStream using a mock logging appender
val mockAppender = mock(classOf[Appender])
val loggingEventCaptor = ArgumentCaptor.forClass(classOf[LoggingEvent])
// Make sure only logging errors
val logger = Logger.getRootLogger
val oldLogLevel = logger.getLevel
logger.setLevel(Level.ERROR)
try {
logger.addAppender(mockAppender)
val testOutputStream = new PipedOutputStream()
val testInputStream = new PipedInputStream(testOutputStream) with LatchedInputStream
// Close the stream before appender tries to read will cause an IOException
testInputStream.close()
testOutputStream.close()
val appender = FileAppender(testInputStream, testFile, new SparkConf)
// Stop the appender before an IOException is called during read
testInputStream.latchReadStarted.await()
appender.stop()
testInputStream.latchReadProceed.countDown()
appender.awaitTermination()
// Make sure no IOException errors have been logged as a result of appender closing gracefully
verify(mockAppender, atLeast(0)).doAppend(loggingEventCaptor.capture)
import scala.collection.JavaConverters._
loggingEventCaptor.getAllValues.asScala.foreach { loggingEvent =>
assert(loggingEvent.getThrowableInformation === null
|| !loggingEvent.getThrowableInformation.getThrowable.isInstanceOf[IOException])
}
} finally {
logger.setLevel(oldLogLevel)
}
}
/**
* Run the rolling file appender with data and see whether all the data was written correctly
* across rolled over files.
*/
def testRolling(
appender: FileAppender,
outputStream: OutputStream,
textToAppend: Seq[String],
sleepTimeBetweenTexts: Long,
isCompressed: Boolean = false
): Seq[File] = {
// send data to appender through the input stream, and wait for the data to be written
val expectedText = textToAppend.mkString("")
for (i <- 0 until textToAppend.size) {
outputStream.write(textToAppend(i).getBytes(StandardCharsets.UTF_8))
outputStream.flush()
Thread.sleep(sleepTimeBetweenTexts)
}
logInfo("Data sent to appender")
outputStream.close()
appender.awaitTermination()
logInfo("Appender closed")
// verify whether all the data written to rolled over files is same as expected
val generatedFiles = RollingFileAppender.getSortedRolledOverFiles(
testFile.getParentFile.toString, testFile.getName)
logInfo("Generate files: \\n" + generatedFiles.mkString("\\n"))
assert(generatedFiles.size > 1)
if (isCompressed) {
assert(
generatedFiles.filter(_.getName.endsWith(RollingFileAppender.GZIP_LOG_SUFFIX)).size > 0)
}
val allText = generatedFiles.map { file =>
if (file.getName.endsWith(RollingFileAppender.GZIP_LOG_SUFFIX)) {
val inputStream = new GZIPInputStream(new FileInputStream(file))
try {
IOUtils.toString(inputStream, StandardCharsets.UTF_8)
} finally {
IOUtils.closeQuietly(inputStream)
}
} else {
Files.toString(file, StandardCharsets.UTF_8)
}
}.mkString("")
assert(allText === expectedText)
generatedFiles
}
/** Delete all the generated rolled over files */
def cleanup(): Unit = {
testFile.getParentFile.listFiles.filter { file =>
file.getName.startsWith(testFile.getName)
}.foreach { _.delete() }
}
/** Used to synchronize when read is called on a stream */
private trait LatchedInputStream extends PipedInputStream {
val latchReadStarted = new CountDownLatch(1)
val latchReadProceed = new CountDownLatch(1)
abstract override def read(): Int = {
latchReadStarted.countDown()
latchReadProceed.await()
super.read()
}
}
}
| goldmedal/spark | core/src/test/scala/org/apache/spark/util/FileAppenderSuite.scala | Scala | apache-2.0 | 15,370 |
package ch28_heap
import scala.util.control.Breaks._
class Heap(val capacity: Int, var elementCount: Int = 0) {
def this(arrayParam: Array[Int], bottomUp: Boolean) = {
this(arrayParam.length + 1)
if (bottomUp) {
arrayParam.foreach(this.insert)
} else {
//copy data into array of heap
for (i <- arrayParam.indices) {
array(i + 1) = arrayParam(i)
elementCount = arrayParam.length
}
for (i <- elementCount / 2 + 1 to 1 by -1) {
heapifyTopDown(i, elementCount - 1)
}
}
}
require(capacity > 0, "capacity should be > 0")
val array: Array[Int] = new Array[Int](capacity)
def insert(data: Int): Unit = {
if (elementCount == capacity - 1) {
throw new IllegalStateException("heap full")
}
elementCount += 1
array(elementCount) = data
//heapify bottom up
//compare the element with it's parent node i/2 until parent node > child node
//this will make sure the root element of the tree is the biggest value
var i = elementCount
while (i / 2 > 0 && array(i) > array(i / 2)) {
val temp = array(i)
array(i) = array(i / 2)
array(i / 2) = temp
i = i / 2
}
}
def removeMax(): Int = {
require(elementCount > 0, "heap is empty")
val result = array(1)
array(1) = array(elementCount)
elementCount -= 1
heapifyTopDown(1, elementCount)
result
}
//heapify from top to bottom
//start from the top to compare with it's child nodes
//swap if child node > parent node
//stop at child node <= parent node
private[this] def heapifyTopDown(startIndex: Int, stopIndex: Int) = {
var pointer = startIndex
breakable {
while (true) {
var maxPos = pointer
if (pointer * 2 <= stopIndex && array(pointer * 2) > array(maxPos)) {
maxPos = pointer * 2
}
if (pointer * 2 <= stopIndex && array(pointer * 2 + 1) > array(maxPos)) {
maxPos = pointer * 2 + 1
}
if (maxPos == pointer) {
break
}
//swap the parent and child
val temp = array(pointer)
array(pointer) = array(maxPos)
array(maxPos) = temp
//start a new round
pointer = maxPos
}
}
}
}
object Heap {
def heapSort(array: Array[Int]): Array[Int] = {
val result = new Array[Int](array.length)
val heap = new Heap(array, true)
for (i <- result.length - 1 to 0 by -1) {
result(i) = heap.removeMax()
}
result
}
} | wangzheng0822/algo | scala/src/main/scala/ch28_heap/Heap.scala | Scala | apache-2.0 | 2,514 |
package msgpack4z
import scalaprops._
import scala.util.control.NonFatal
import scalaz._
abstract class SpecBase extends Scalaprops {
protected[this] final def expectException[A](f: => A): Unit = {
val ex = new Exception("expect Exception")
try {
val _ = f
throw ex
} catch {
case NonFatal(e) if e ne ex =>
}
}
protected[this] def packer(): MsgPacker
protected[this] def unpacker(bytes: Array[Byte]): MsgUnpacker
implicit final val symbolGen: Gen[Symbol] =
Gen.alphaNumString.map(Symbol(_))
implicit final val stringGen: Gen[String] =
Gen.alphaNumString
final def checkRoundTripBytes[A](checkHashCode: Boolean)(implicit A: MsgpackCodec[A], G: Gen[A], E: Equal[A]): Property =
Property.forAll { (a: A) =>
try {
A.roundtripz(a, packer(), unpacker _) match {
case None =>
if (checkHashCode) {
A.unpackAndClose(unpacker(A.toBytes(a, packer()))) match {
case \\/-(value) =>
a.## == value.##
case -\\/(e) =>
throw e
}
} else {
true
}
case Some(\\/-(b)) =>
println("fail roundtrip bytes " + a + " " + b)
false
case Some(-\\/(e)) =>
println(e)
false
}
} catch {
case NonFatal(e) =>
println(a)
println(e.getStackTrace.map("\\tat " + _).mkString("\\n" + e.toString + "\\n", "\\n", "\\n"))
throw e
}
}
final def checkLawz[A](implicit A: MsgpackCodec[A], G: Gen[A], E: Equal[A]) =
checkRoundTripBytes(true)(A, G, E)
final def checkLaw[A](implicit A: MsgpackCodec[A], G: Gen[A]) =
checkRoundTripBytes(true)(A, G, Equal.equalA[A])
final def checkLawWithoutHashCode[A](implicit A: MsgpackCodec[A], G: Gen[A], E: Equal[A]) =
checkRoundTripBytes(false)(A, G, E)
}
| msgpack4z/msgpack4z-core | src/test/scala/msgpack4z/SpecBase.scala | Scala | mit | 1,925 |
package sp.domain
case class State(state: Map[ID, SPValue]) {
def add(tuple: (ID, SPValue)) = State(state.+(tuple))
def add(m: Map[ID, SPValue]) = State(state ++ m)
}
// TODO: Move to internal relation identification
case class States(states: Map[ID, Set[SPValue]]) {
def apply(id: ID): Set[SPValue] = states(id)
def get(id: ID): Option[Set[SPValue]] = states.get(id)
def add(id: ID, value: SPValue): States = add(id, Set(value))
def add(id: ID, value: Set[SPValue]): States = {
val xs = get(id).getOrElse(Set()) ++ value
States(states + (id -> xs))
}
def add(s: State): States = {
val newMap = s.asInstanceOf[State].state map {case (id, set) =>
id -> (get(id).getOrElse(Set()) + set)
}
States(states ++ newMap)
}
}
| kristoferB/SP | sp1/src/main/scala/sp/domain/State.scala | Scala | mit | 766 |
package com.tibidat
import scala.slick.jdbc.JdbcBackend.Database
import scala.slick.driver.H2Driver
class Model(name: String, dal: DAL, db: Database) {
import dal._
import dal.profile.simple._
implicit val implicitSession = db.createSession
def createDB = dal.create
def dropDB = dal.drop
def purgeDB = dal.purge
def addNote(content: String): Note = {
val note = Note(java.util.UUID.randomUUID(), content)
val result = notes.insert(note)
note
}
def getNote(id: java.util.UUID): Option[Note] = {
val m = notes.filter(_.id === id)
val n = m.firstOption
n match {
case nn @ Some(n @ Note(_,_,None)) => {
m.map(_.purged).update(Some(true))
nn
}
case n @ Some(_) => {
n
}
case None => None
}
}
}
trait DB {
val m = new Model("H2", new DAL(H2Driver),
Database.forURL("jdbc:h2:mem:servicetestdb", driver = "org.h2.Driver"))
m.createDB
}
| m2w/ephemeral | src/main/scala/com/tibidat/DB.scala | Scala | mit | 956 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.calcite
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.core._
import org.apache.calcite.rel.logical._
import org.apache.calcite.rel.{RelNode, RelShuttle}
import org.apache.calcite.rex._
import org.apache.calcite.sql.`type`.SqlTypeName
import org.apache.calcite.sql.fun.SqlStdOperatorTable
import org.apache.calcite.sql.fun.SqlStdOperatorTable.FINAL
import org.apache.flink.api.common.typeinfo.SqlTimeTypeInfo
import org.apache.flink.table.api.{TableException, ValidationException}
import org.apache.flink.table.calcite.FlinkTypeFactory.{isRowtimeIndicatorType, _}
import org.apache.flink.table.catalog.BasicOperatorTable
import org.apache.flink.table.functions.sql.ProctimeSqlFunction
import org.apache.flink.table.plan.logical.rel._
import org.apache.flink.table.plan.nodes.LogicalSink
import org.apache.flink.table.plan.schema.TimeIndicatorRelDataType
import org.apache.calcite.rel.hint.RelHint
import java.util.{Collections => JCollections}
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.collection.mutable
/**
* Traverses a [[RelNode]] tree and converts fields with [[TimeIndicatorRelDataType]] type. If a
* time attribute is accessed for a calculation, it will be materialized. Forwarding is allowed in
* some cases, but not all.
*/
class RelTimeIndicatorConverter(rexBuilder: RexBuilder) extends RelShuttle {
val materializerUtils = new RexTimeIndicatorMaterializerUtils(rexBuilder)
override def visit(intersect: LogicalIntersect): RelNode =
throw new TableException("Logical intersect in a stream environment is not supported yet.")
override def visit(union: LogicalUnion): RelNode = {
// visit children and update inputs
val inputs = union.getInputs.map(_.accept(this))
// make sure that time indicator types match
val inputTypes = inputs.map(_.getRowType)
val head = inputTypes.head.getFieldList.map(_.getType)
val isValid = inputTypes.forall { t =>
val fieldTypes = t.getFieldList.map(_.getType)
fieldTypes.zip(head).forall { case (l, r) =>
// check if time indicators match
if (isTimeIndicatorType(l) && isTimeIndicatorType(r)) {
val leftTime = l.asInstanceOf[TimeIndicatorRelDataType].isEventTime
val rightTime = r.asInstanceOf[TimeIndicatorRelDataType].isEventTime
leftTime == rightTime
}
// one side is not an indicator
else if (isTimeIndicatorType(l) || isTimeIndicatorType(r)) {
false
}
// uninteresting types
else {
true
}
}
}
if (!isValid) {
throw new ValidationException(
"Union fields with time attributes have different types.")
}
LogicalUnion.create(inputs, union.all)
}
override def visit(aggregate: LogicalAggregate): RelNode = convertAggregate(aggregate)
override def visit(minus: LogicalMinus): RelNode =
throw new TableException("Logical minus in a stream environment is not supported yet.")
override def visit(sort: LogicalSort): RelNode = {
val input = sort.getInput.accept(this)
LogicalSort.create(input, sort.collation, sort.offset, sort.fetch)
}
override def visit(matchRel: LogicalMatch): RelNode = {
// visit children and update inputs
val input = matchRel.getInput.accept(this)
val materializer = createMaterializer(input)
// update input expressions
val patternDefs = matchRel.getPatternDefinitions.mapValues(_.accept(materializer))
val measures = matchRel.getMeasures
.mapValues(_.accept(materializer))
val interval = if (matchRel.getInterval != null) {
matchRel.getInterval.accept(materializer)
} else {
null
}
val isNoLongerTimeIndicator : String => Boolean = fieldName =>
measures.get(fieldName).exists(r => !FlinkTypeFactory.isTimeIndicatorType(r.getType))
// materialize all output types
val outputType = materializerUtils.getRowTypeWithoutIndicators(
matchRel.getRowType,
isNoLongerTimeIndicator)
LogicalMatch.create(
input,
outputType,
matchRel.getPattern,
matchRel.isStrictStart,
matchRel.isStrictEnd,
patternDefs,
measures,
matchRel.getAfter,
matchRel.getSubsets.asInstanceOf[java.util.Map[String, java.util.TreeSet[String]]],
matchRel.isAllRows,
matchRel.getPartitionKeys,
matchRel.getOrderKeys,
interval)
}
override def visit(other: RelNode): RelNode = other match {
case uncollect: Uncollect =>
// visit children and update inputs
val input = uncollect.getInput.accept(this)
Uncollect.create(uncollect.getTraitSet, input,
uncollect.withOrdinality, JCollections.emptyList())
case scan: LogicalTableFunctionScan =>
scan
case aggregate: LogicalWindowAggregate =>
val convAggregate = convertAggregate(aggregate)
LogicalWindowAggregate.create(
aggregate.getWindow,
aggregate.getNamedProperties,
convAggregate)
case windowTableAggregate: LogicalWindowTableAggregate =>
val convAggregate = convertAggregate(windowTableAggregate.getCorrespondingAggregate)
LogicalWindowTableAggregate.create(
windowTableAggregate.getWindow,
windowTableAggregate.getNamedProperties,
convAggregate)
case tableAggregate: LogicalTableAggregate =>
val convAggregate = convertAggregate(tableAggregate.getCorrespondingAggregate)
LogicalTableAggregate.create(convAggregate)
case temporalTableJoin: LogicalTemporalTableJoin =>
visit(temporalTableJoin)
case sink: LogicalSink =>
var newInput = sink.getInput.accept(this)
var needsConversion = false
val projects = newInput.getRowType.getFieldList.map { field =>
if (isProctimeIndicatorType(field.getType)) {
needsConversion = true
rexBuilder.makeCall(ProctimeSqlFunction, new RexInputRef(field.getIndex, field.getType))
} else {
new RexInputRef(field.getIndex, field.getType)
}
}
// add final conversion if necessary
if (needsConversion) {
newInput = LogicalProject.create(
newInput, JCollections.emptyList[RelHint](),
projects, newInput.getRowType.getFieldNames)
}
new LogicalSink(
sink.getCluster,
sink.getTraitSet,
newInput,
sink.sink,
sink.sinkName)
case _ =>
throw new TableException(s"Unsupported logical operator: ${other.getClass.getSimpleName}")
}
override def visit(exchange: LogicalExchange): RelNode =
throw new TableException("Logical exchange in a stream environment is not supported yet.")
override def visit(scan: TableScan): RelNode = scan
override def visit(scan: TableFunctionScan): RelNode =
throw new TableException("Table function scan in a stream environment is not supported yet.")
override def visit(values: LogicalValues): RelNode = values
override def visit(filter: LogicalFilter): RelNode = {
// visit children and update inputs
val input = filter.getInput.accept(this)
val materializer = createMaterializer(input)
val condition = filter.getCondition.accept(materializer)
LogicalFilter.create(input, condition)
}
override def visit(project: LogicalProject): RelNode = {
// visit children and update inputs
val input = project.getInput.accept(this)
val materializer = createMaterializer(input)
val projects = project.getProjects.map(_.accept(materializer))
val fieldNames = project.getRowType.getFieldNames
LogicalProject.create(input, project.getHints, projects, fieldNames)
}
override def visit(join: LogicalJoin): RelNode = {
val left = join.getLeft.accept(this)
val right = join.getRight.accept(this)
val materializer = createMaterializer(left, right)
LogicalJoin.create(
left,
right,
join.getHints,
join.getCondition.accept(materializer),
join.getVariablesSet,
join.getJoinType)
}
def visit(temporalJoin: LogicalTemporalTableJoin): RelNode = {
val left = temporalJoin.getLeft.accept(this)
val right = temporalJoin.getRight.accept(this)
val rewrittenTemporalJoin = temporalJoin.copy(temporalJoin.getTraitSet, List(left, right))
val indicesToMaterialize = gatherIndicesToMaterialize(rewrittenTemporalJoin, left, right)
materializerUtils.projectAndMaterializeFields(rewrittenTemporalJoin, indicesToMaterialize)
}
override def visit(correlate: LogicalCorrelate): RelNode = {
// visit children and update inputs
val inputs = correlate.getInputs.map(_.accept(this))
val right = inputs(1) match {
case scan: LogicalTableFunctionScan =>
// visit children and update inputs
val scanInputs = scan.getInputs.map(_.accept(this))
val materializer = createMaterializer(inputs.head)
LogicalTableFunctionScan.create(
scan.getCluster,
scanInputs,
scan.getCall.accept(materializer),
scan.getElementType,
scan.getRowType,
scan.getColumnMappings)
case _ =>
inputs(1)
}
LogicalCorrelate.create(
inputs.head,
right,
correlate.getCorrelationId,
correlate.getRequiredColumns,
correlate.getJoinType)
}
private def gatherIndicesToMaterialize(
temporalJoin: Join,
left: RelNode,
right: RelNode)
: Set[Int] = {
// Materialize all of the time attributes from the right side of temporal join
var indicesToMaterialize =
(left.getRowType.getFieldCount until temporalJoin.getRowType.getFieldCount).toSet
if (!hasRowtimeAttribute(right.getRowType)) {
// No rowtime on the right side means that this must be a processing time temporal join
// and that we can not provide watermarks even if there is a rowtime time attribute
// on the left side (besides processing time attribute used for temporal join).
for (fieldIndex <- 0 until left.getRowType.getFieldCount) {
val fieldName = left.getRowType.getFieldNames.get(fieldIndex)
val fieldType = left.getRowType.getFieldList.get(fieldIndex).getType
if (isRowtimeIndicatorType(fieldType)) {
indicesToMaterialize += fieldIndex
}
}
}
indicesToMaterialize
}
private def hasRowtimeAttribute(rowType: RelDataType): Boolean = {
rowType.getFieldList.exists(field => isRowtimeIndicatorType(field.getType))
}
private def convertAggregate(aggregate: Aggregate): LogicalAggregate = {
// visit children and update inputs
val input = aggregate.getInput.accept(this)
// add a project to materialize aggregation arguments/grouping keys
val indicesToMaterialize = gatherIndicesToMaterialize(aggregate, input)
val needsMaterialization = indicesToMaterialize.exists(idx =>
isTimeIndicatorType(input.getRowType.getFieldList.get(idx).getType))
// create project if necessary
val projectedInput = if (needsMaterialization) {
// insert or merge with input project if
// a time attribute is accessed and needs to be materialized
input match {
// merge
case lp: LogicalProject =>
val projects = lp.getProjects.zipWithIndex.map { case (expr, idx) =>
materializerUtils.materializeIfContains(expr, idx, indicesToMaterialize)
}
LogicalProject.create(
lp.getInput,
lp.getHints,
projects,
input.getRowType.getFieldNames)
// new project
case _ =>
materializerUtils.projectAndMaterializeFields(input, indicesToMaterialize)
}
} else {
// no project necessary
input
}
// remove time indicator type as agg call return type
val updatedAggCalls = aggregate.getAggCallList.map { call =>
val callType = if (isTimeIndicatorType(call.getType)) {
materializerUtils.getTimestamp
} else {
call.getType
}
AggregateCall.create(
call.getAggregation,
call.isDistinct,
call.getArgList,
call.filterArg,
callType,
call.name)
}
LogicalAggregate.create(
projectedInput,
aggregate.indicator,
aggregate.getGroupSet,
aggregate.getGroupSets,
updatedAggCalls)
}
private def gatherIndicesToMaterialize(aggregate: Aggregate, input: RelNode): Set[Int] = {
val indicesToMaterialize = mutable.Set[Int]()
// check arguments of agg calls
aggregate.getAggCallList.foreach(call => if (call.getArgList.size() == 0) {
// count(*) has an empty argument list
(0 until input.getRowType.getFieldCount).foreach(indicesToMaterialize.add)
} else {
// for other aggregations
call.getArgList.map(_.asInstanceOf[Int]).foreach(indicesToMaterialize.add)
})
// check grouping sets
aggregate.getGroupSets.foreach(set =>
set.asList().map(_.asInstanceOf[Int]).foreach(indicesToMaterialize.add)
)
indicesToMaterialize.toSet
}
private def createMaterializer(inputs: RelNode*): RexTimeIndicatorMaterializer = {
// check if input field contains time indicator type
// materialize field if no time indicator is present anymore
// if input field is already materialized, change to timestamp type
new RexTimeIndicatorMaterializer(
rexBuilder,
inputs.flatMap(_.getRowType.getFieldList.map(_.getType)))
}
override def visit(modify: LogicalTableModify): RelNode = {
val input = modify.getInput.accept(this)
modify.copy(modify.getTraitSet, JCollections.singletonList(input))
}
override def visit(calc: LogicalCalc): RelNode = {
calc // Do nothing for Calc now.
}
}
object RelTimeIndicatorConverter {
def convert(rootRel: RelNode, rexBuilder: RexBuilder): RelNode = {
val converter = new RelTimeIndicatorConverter(rexBuilder)
val convertedRoot = rootRel.accept(converter)
// the LogicalSink is converted in RelTimeIndicatorConverter before
if (rootRel.isInstanceOf[LogicalSink]) {
return convertedRoot
}
var needsConversion = false
// materialize remaining proctime indicators
val projects = convertedRoot.getRowType.getFieldList.map(field =>
if (isProctimeIndicatorType(field.getType)) {
needsConversion = true
rexBuilder.makeCall(
ProctimeSqlFunction,
new RexInputRef(field.getIndex, field.getType))
} else {
new RexInputRef(field.getIndex, field.getType)
}
)
// add final conversion if necessary
if (needsConversion) {
LogicalProject.create(
convertedRoot,
JCollections.emptyList[RelHint](),
projects,
convertedRoot.getRowType.getFieldNames)
} else {
convertedRoot
}
}
/**
* Materializes time indicator accesses in an expression.
*
* @param expr The expression in which time indicators are materialized.
* @param rowType The input schema of the expression.
* @param rexBuilder A RexBuilder.
*
* @return The expression with materialized time indicators.
*/
def convertExpression(expr: RexNode, rowType: RelDataType, rexBuilder: RexBuilder): RexNode = {
// check if input field contains time indicator type
// materialize field if no time indicator is present anymore
// if input field is already materialized, change to timestamp type
val materializer = new RexTimeIndicatorMaterializer(
rexBuilder,
rowType.getFieldList.map(_.getType))
expr.accept(materializer)
}
/**
* Checks if the given call is a materialization call for either proctime or rowtime.
*/
def isMaterializationCall(call: RexCall): Boolean = {
val isProctimeCall: Boolean = {
call.getOperator == ProctimeSqlFunction &&
call.getOperands.size() == 1 &&
isProctimeIndicatorType(call.getOperands.get(0).getType)
}
val isRowtimeCall: Boolean = {
call.getOperator == SqlStdOperatorTable.CAST &&
call.getOperands.size() == 1 &&
isRowtimeIndicatorType(call.getOperands.get(0).getType) &&
call.getType.getSqlTypeName == SqlTypeName.TIMESTAMP
}
isProctimeCall || isRowtimeCall
}
}
/**
* Takes `newResolvedInput` types of the [[RexNode]] and if those types have changed rewrites
* the [[RexNode]] to make it consistent with new type.
*/
class RexTimeIndicatorMaterializer(
private val rexBuilder: RexBuilder,
private val newResolvedInput: Seq[RelDataType])
extends RexShuttle {
private val materializerUtils = new RexTimeIndicatorMaterializerUtils(rexBuilder)
override def visitInputRef(inputRef: RexInputRef): RexNode = {
// reference is interesting
if (isTimeIndicatorType(inputRef.getType)) {
val resolvedRefType = newResolvedInput(inputRef.getIndex)
// input is a valid time indicator
if (isTimeIndicatorType(resolvedRefType)) {
inputRef
}
// input has been materialized
else {
new RexInputRef(inputRef.getIndex, resolvedRefType)
}
}
// reference is a regular field
else {
super.visitInputRef(inputRef)
}
}
private def isMatchTimeIndicator(call: RexNode): Boolean = {
call match {
case operand: RexCall if
operand.getOperator == BasicOperatorTable.MATCH_PROCTIME ||
operand.getOperator == BasicOperatorTable.MATCH_ROWTIME =>
true
case _ =>
false
}
}
override def visitCall(call: RexCall): RexNode = {
val updatedCall = super.visitCall(call).asInstanceOf[RexCall]
// materialize operands with time indicators
val materializedOperands = updatedCall.getOperator match {
// skip materialization for special operators
case BasicOperatorTable.SESSION |
BasicOperatorTable.HOP |
BasicOperatorTable.TUMBLE =>
updatedCall.getOperands.toList
case _ =>
updatedCall.getOperands.map { materializerUtils.materialize }
}
// remove time indicator return type
updatedCall.getOperator match {
// we do not modify AS if operand has not been materialized
case SqlStdOperatorTable.AS if
isTimeIndicatorType(updatedCall.getOperands.get(0).getType) =>
updatedCall
// All calls in MEASURES and DEFINE are wrapped with FINAL/RUNNING, therefore
// we should treat FINAL(MATCH_ROWTIME) and FINAL(MATCH_PROCTIME) as a time attribute
// extraction
case FINAL if
updatedCall.getOperands.size() == 1 && isMatchTimeIndicator(updatedCall.getOperands.get(0)) =>
updatedCall
// do not modify window time attributes
case BasicOperatorTable.TUMBLE_ROWTIME |
BasicOperatorTable.TUMBLE_PROCTIME |
BasicOperatorTable.HOP_ROWTIME |
BasicOperatorTable.HOP_PROCTIME |
BasicOperatorTable.SESSION_ROWTIME |
BasicOperatorTable.SESSION_PROCTIME |
BasicOperatorTable.MATCH_ROWTIME |
BasicOperatorTable.MATCH_PROCTIME
// since we materialize groupings on time indicators,
// we cannot check the operands anymore but the return type at least
if isTimeIndicatorType(updatedCall.getType) =>
updatedCall
// materialize function's result and operands
case _ if isTimeIndicatorType(updatedCall.getType) =>
updatedCall.clone(materializerUtils.getTimestamp, materializedOperands)
// materialize function's operands only
case _ =>
updatedCall.clone(updatedCall.getType, materializedOperands)
}
}
}
/**
* Helper class for shared logic of materializing time attributes in [[RelNode]] and [[RexNode]].
*/
class RexTimeIndicatorMaterializerUtils(rexBuilder: RexBuilder) {
private val timestamp = rexBuilder
.getTypeFactory
.asInstanceOf[FlinkTypeFactory]
.createTypeFromTypeInfo(SqlTimeTypeInfo.TIMESTAMP, isNullable = false)
def getTimestamp: RelDataType = {
timestamp
}
def projectAndMaterializeFields(input: RelNode, indicesToMaterialize: Set[Int]) : RelNode = {
val projects = input.getRowType.getFieldList.map { field =>
materializeIfContains(
new RexInputRef(field.getIndex, field.getType),
field.getIndex,
indicesToMaterialize)
}
LogicalProject.create(
input,
JCollections.emptyList[RelHint](),
projects,
input.getRowType.getFieldNames)
}
def getRowTypeWithoutIndicators(
relType: RelDataType,
shouldMaterialize: String => Boolean): RelDataType = {
val outputTypeBuilder = rexBuilder
.getTypeFactory
.asInstanceOf[FlinkTypeFactory]
.builder()
relType.getFieldList.asScala.foreach(field =>
if (FlinkTypeFactory.isTimeIndicatorType(field.getType) && shouldMaterialize(field.getName)) {
outputTypeBuilder.add(field.getName, timestamp)
} else {
outputTypeBuilder.add(field.getName, field.getType)
}
)
outputTypeBuilder.build()
}
def materializeIfContains(expr: RexNode, index: Int, indicesToMaterialize: Set[Int]): RexNode = {
if (indicesToMaterialize.contains(index)) {
materialize(expr)
}
else {
expr
}
}
def materialize(expr: RexNode): RexNode = {
if (isTimeIndicatorType(expr.getType)) {
if (isRowtimeIndicatorType(expr.getType)) {
// cast rowtime indicator to regular timestamp
rexBuilder.makeAbstractCast(timestamp, expr)
} else {
// generate proctime access
rexBuilder.makeCall(ProctimeSqlFunction, expr)
}
} else {
expr
}
}
}
| rmetzger/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/calcite/RelTimeIndicatorConverter.scala | Scala | apache-2.0 | 22,495 |
package com.rasterfoundry.api.toolrun
import com.rasterfoundry.akkautil._
import com.rasterfoundry.datamodel._
import com.rasterfoundry.database.filter.Filterables._
import com.rasterfoundry.database.{ToolRunDao, UserDao}
import com.rasterfoundry.api.project.ProjectAuthorizationDirectives
import com.rasterfoundry.akkautil.PaginationDirectives
import de.heikoseeberger.akkahttpcirce.ErrorAccumulatingCirceSupport._
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Route
import cats.implicits._
import cats.effect.IO
import doobie.util.transactor.Transactor
import doobie._
import doobie.implicits._
import java.util.UUID
trait ToolRunRoutes
extends Authentication
with PaginationDirectives
with ToolRunQueryParametersDirective
with ToolRunAuthorizationDirective
with ProjectAuthorizationDirectives
with CommonHandlers
with UserErrorHandler {
val xa: Transactor[IO]
val toolRunRoutes: Route = handleExceptions(userExceptionHandler) {
pathEndOrSingleSlash {
get { listToolRuns } ~
post { createToolRun }
} ~
pathPrefix(JavaUUID) { runId =>
pathEndOrSingleSlash {
get { getToolRun(runId) } ~
put { updateToolRun(runId) } ~
delete { deleteToolRun(runId) }
} ~
pathPrefix("permissions") {
pathEndOrSingleSlash {
put {
replaceToolRunPermissions(runId)
}
} ~
post {
addToolRunPermission(runId)
} ~
get {
listToolRunPermissions(runId)
} ~
delete {
deleteToolRunPermissions(runId)
}
} ~
pathPrefix("actions") {
pathEndOrSingleSlash {
get {
listUserAnalysisActions(runId)
}
}
}
}
}
// This could probably be written cleaner, but I just want to get things working so I can work on the primary task
// of getting the share page working correctly
def listToolRuns: Route = (withPagination & toolRunQueryParameters) {
(page, runParams) =>
runParams.toolRunParams.projectId match {
case Some(projectId) =>
(extractTokenHeader & extractMapTokenParam) { (tokenO, mapTokenO) =>
(
toolRunAuthProjectFromMapTokenO(mapTokenO, projectId) |
projectAuthFromTokenO(tokenO, projectId)
) {
complete {
val userOQuery
: Option[doobie.ConnectionIO[User]] = tokenO flatMap {
token: String =>
verifyJWT(token.split(" ").last).toOption
} map {
case (_, jwtClaims) =>
val userId = jwtClaims.getStringClaim("sub")
UserDao.unsafeGetUserById(userId)
}
val analysesQuery = userOQuery match {
case Some(userQ) =>
userQ.flatMap(
user =>
ToolRunDao.listAnalysesWithRelated(
Some(user),
page,
projectId,
runParams.toolRunParams.projectLayerId,
runParams.ownershipTypeParams.ownershipType,
runParams.groupQueryParameters.groupType,
runParams.groupQueryParameters.groupId
)
)
case _ =>
ToolRunDao.listAnalysesWithRelated(
None,
page,
projectId,
runParams.toolRunParams.projectLayerId,
None,
None,
None
)
}
analysesQuery.transact(xa).unsafeToFuture
}
}
}
case _ =>
authenticate { user =>
complete {
ToolRunDao
.authQuery(
user,
ObjectType.Analysis,
runParams.ownershipTypeParams.ownershipType,
runParams.groupQueryParameters.groupType,
runParams.groupQueryParameters.groupId
)
.filter(runParams)
.page(page)
.transact(xa)
.unsafeToFuture
}
}
}
}
def createToolRun: Route = authenticate { user =>
entity(as[ToolRun.Create]) { newRun =>
onSuccess(
ToolRunDao.insertToolRun(newRun, user).transact(xa).unsafeToFuture
) { toolRun =>
{
complete {
(StatusCodes.Created, toolRun)
}
}
}
}
}
def getToolRun(runId: UUID): Route = authenticate { user =>
authorizeAsync {
ToolRunDao
.authorized(user, ObjectType.Analysis, runId, ActionType.View)
.transact(xa)
.unsafeToFuture
} {
rejectEmptyResponse {
complete(
ToolRunDao.query
.filter(runId)
.selectOption
.transact(xa)
.unsafeToFuture
)
}
}
}
def updateToolRun(runId: UUID): Route = authenticate { user =>
authorizeAsync {
ToolRunDao
.authorized(user, ObjectType.Analysis, runId, ActionType.Edit)
.transact(xa)
.unsafeToFuture
} {
entity(as[ToolRun]) { updatedRun =>
onSuccess(
ToolRunDao
.updateToolRun(updatedRun, runId)
.transact(xa)
.unsafeToFuture
) {
completeSingleOrNotFound
}
}
}
}
def deleteToolRun(runId: UUID): Route = authenticate { user =>
authorizeAsync {
ToolRunDao
.authorized(user, ObjectType.Analysis, runId, ActionType.Delete)
.transact(xa)
.unsafeToFuture
} {
onSuccess(
ToolRunDao.query.filter(runId).delete.transact(xa).unsafeToFuture
) {
completeSingleOrNotFound
}
}
}
def listToolRunPermissions(toolRunId: UUID): Route = authenticate { user =>
authorizeAsync {
ToolRunDao
.authorized(user, ObjectType.Analysis, toolRunId, ActionType.Edit)
.transact(xa)
.unsafeToFuture
} {
complete {
ToolRunDao
.getPermissions(toolRunId)
.transact(xa)
.unsafeToFuture
}
}
}
def replaceToolRunPermissions(toolRunId: UUID): Route = authenticate { user =>
entity(as[List[ObjectAccessControlRule]]) { acrList =>
authorizeAsync {
(
ToolRunDao.authorized(
user,
ObjectType.Analysis,
toolRunId,
ActionType.Edit
),
acrList traverse { acr =>
ToolRunDao.isValidPermission(acr, user)
} map { _.foldLeft(true)(_ && _) }
).tupled
.map({ authTup =>
authTup._1 && authTup._2
})
.transact(xa)
.unsafeToFuture
} {
complete {
ToolRunDao
.replacePermissions(toolRunId, acrList)
.transact(xa)
.unsafeToFuture
}
}
}
}
def addToolRunPermission(toolRunId: UUID): Route = authenticate { user =>
entity(as[ObjectAccessControlRule]) { acr =>
authorizeAsync {
(
ToolRunDao
.authorized(user, ObjectType.Analysis, toolRunId, ActionType.Edit),
ToolRunDao.isValidPermission(acr, user)
).tupled
.map({ authTup =>
authTup._1 && authTup._2
})
.transact(xa)
.unsafeToFuture
} {
complete {
ToolRunDao
.addPermission(toolRunId, acr)
.transact(xa)
.unsafeToFuture
}
}
}
}
def listUserAnalysisActions(analysisId: UUID): Route = authenticate { user =>
authorizeAsync {
ToolRunDao
.authorized(user, ObjectType.Analysis, analysisId, ActionType.View)
.transact(xa)
.unsafeToFuture
} {
user.isSuperuser match {
case true => complete(List("*"))
case false =>
onSuccess(
ToolRunDao.query
.filter(analysisId)
.select
.transact(xa)
.unsafeToFuture
) { analysis =>
analysis.owner == user.id match {
case true => complete(List("*"))
case false =>
complete {
ToolRunDao
.listUserActions(user, analysisId)
.transact(xa)
.unsafeToFuture
}
}
}
}
}
}
def deleteToolRunPermissions(toolRunId: UUID): Route = authenticate { user =>
authorizeAsync {
ToolRunDao
.authorized(user, ObjectType.Analysis, toolRunId, ActionType.Edit)
.transact(xa)
.unsafeToFuture
} {
complete {
ToolRunDao
.deletePermissions(toolRunId)
.transact(xa)
.unsafeToFuture
}
}
}
}
| azavea/raster-foundry | app-backend/api/src/main/scala/toolrun/Routes.scala | Scala | apache-2.0 | 9,308 |
/**
* Copyright (C) 2009-2011 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.coffeebar.auth
/**
* Created by IntelliJ IDEA.
* User: chirino
* Date: 10/23/11
* Time: 9:56 AM
* To change this template use File | Settings | File Templates.
*/
trait Authenticator {
def authenticate(username: String, password: String):Boolean
} | fusesource/coffeebar | src/main/scala/org/fusesource/coffeebar/auth/Authenticator.scala | Scala | apache-2.0 | 1,019 |
package reactivemongo.api
import scala.util.Try
import reactivemongo.bson.buffer.{ ReadableBuffer, WritableBuffer }
trait SerializationPack { self: Singleton =>
type Value
type ElementProducer
type Document <: Value
type Writer[A]
type Reader[A]
type NarrowValueReader[A]
private[reactivemongo]type WidenValueReader[A]
def IdentityWriter: Writer[Document]
def IdentityReader: Reader[Document]
def serialize[A](a: A, writer: Writer[A]): Document
def deserialize[A](document: Document, reader: Reader[A]): A
def writeToBuffer(buffer: WritableBuffer, document: Document): WritableBuffer
def readFromBuffer(buffer: ReadableBuffer): Document
def serializeAndWrite[A](buffer: WritableBuffer, document: A, writer: Writer[A]): WritableBuffer = writeToBuffer(buffer, serialize(document, writer))
def readAndDeserialize[A](buffer: ReadableBuffer, reader: Reader[A]): A =
deserialize(readFromBuffer(buffer), reader)
import reactivemongo.core.protocol.Response
import reactivemongo.core.netty.ChannelBufferReadableBuffer
final def readAndDeserialize[A](response: Response, reader: Reader[A]): A = {
val buf = response.documents
val channelBuf = ChannelBufferReadableBuffer(buf.readBytes(buf.getInt(buf.readerIndex)))
readAndDeserialize(channelBuf, reader)
}
def writer[A](f: A => Document): Writer[A]
def isEmpty(document: Document): Boolean
def widenReader[T](r: NarrowValueReader[T]): WidenValueReader[T]
def readValue[A](value: Value, reader: WidenValueReader[A]): Try[A]
}
/** The default serialization pack. */
object BSONSerializationPack extends SerializationPack {
import reactivemongo.bson._, buffer.DefaultBufferHandler
type Value = BSONValue
type ElementProducer = Producer[BSONElement]
type Document = BSONDocument
type Writer[A] = BSONDocumentWriter[A]
type Reader[A] = BSONDocumentReader[A]
type NarrowValueReader[A] = BSONReader[_ <: BSONValue, A]
private[reactivemongo]type WidenValueReader[A] = UnsafeBSONReader[A]
object IdentityReader extends Reader[Document] {
def read(document: Document): Document = document
}
object IdentityWriter extends Writer[Document] {
def write(document: Document): Document = document
}
def serialize[A](a: A, writer: Writer[A]): Document = writer.write(a)
def deserialize[A](document: Document, reader: Reader[A]): A =
reader.read(document)
def writeToBuffer(buffer: WritableBuffer, document: Document): WritableBuffer = DefaultBufferHandler.writeDocument(document, buffer)
def readFromBuffer(buffer: ReadableBuffer): Document =
DefaultBufferHandler.readDocument(buffer).get
def writer[A](f: A => Document): Writer[A] = new BSONDocumentWriter[A] {
def write(input: A): Document = f(input)
}
def isEmpty(document: Document) = document.isEmpty
def widenReader[T](r: NarrowValueReader[T]): WidenValueReader[T] =
r.widenReader
def readValue[A](value: Value, reader: WidenValueReader[A]): Try[A] =
reader.readTry(value)
}
| charleskubicek/ReactiveMongo | driver/src/main/scala/api/serializationpack.scala | Scala | apache-2.0 | 3,012 |
package breeze.optimize.linear
import org.scalatest._;
import org.scalatest.junit._;
import org.scalatest.prop._;
import org.junit.runner.RunWith
@RunWith(classOf[JUnitRunner])
class CompetitiveLinkingTest extends FunSuite with Checkers {
test("sanity check") {
val arr = Array( Seq(2.,4.,7.,9.), Seq(3.,9.,5.,1.), Seq(8.,2.,9.,7.));
val (matching,weight) = CompetitiveLinking.extractMatching(arr.map(_.toSeq));
assert(weight === 5.0);
assert(matching(0) === 0)
assert(matching(1) === 3)
assert(matching(2) === 1)
}
}
| tjhunter/scalanlp-core | learn/src/test/scala/breeze/optimize/linear/CompetitiveLinkingTest.scala | Scala | apache-2.0 | 550 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.runtime.batch.sql
import org.apache.flink.api.common.typeinfo.BasicTypeInfo.{INT_TYPE_INFO, LONG_TYPE_INFO, STRING_TYPE_INFO}
import org.apache.flink.api.common.typeinfo.LocalTimeTypeInfo.{LOCAL_DATE, LOCAL_DATE_TIME, LOCAL_TIME}
import org.apache.flink.api.common.typeinfo.PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO
import org.apache.flink.api.common.typeinfo.SqlTimeTypeInfo.{DATE, TIME, TIMESTAMP}
import org.apache.flink.api.common.typeinfo.Types
import org.apache.flink.api.common.typeinfo.Types.INSTANT
import org.apache.flink.api.java.typeutils._
import org.apache.flink.api.scala._
import org.apache.flink.table.api.{DataTypes, TableSchema, ValidationException}
import org.apache.flink.table.api.config.ExecutionConfigOptions
import org.apache.flink.table.data.{DecimalDataUtils, TimestampData}
import org.apache.flink.table.data.util.DataFormatConverters.LocalDateConverter
import org.apache.flink.table.planner.expressions.utils.{RichFunc1, RichFunc2, RichFunc3, SplitUDF}
import org.apache.flink.table.planner.factories.TestValuesTableFactory
import org.apache.flink.table.planner.plan.rules.physical.batch.BatchPhysicalSortRule
import org.apache.flink.table.planner.runtime.utils.BatchTableEnvUtil.parseFieldNames
import org.apache.flink.table.planner.runtime.utils.BatchTestBase.row
import org.apache.flink.table.planner.runtime.utils.TestData._
import org.apache.flink.table.planner.runtime.utils.UserDefinedFunctionTestUtils._
import org.apache.flink.table.planner.runtime.utils.{BatchTableEnvUtil, BatchTestBase, TestData, UserDefinedFunctionTestUtils}
import org.apache.flink.table.planner.utils.{DateTimeTestUtil, TestLegacyFilterableTableSource}
import org.apache.flink.table.planner.utils.DateTimeTestUtil._
import org.apache.flink.table.utils.DateTimeUtils.unixTimestampToLocalDateTime
import org.apache.flink.types.Row
import org.junit.Assert.assertEquals
import org.junit._
import org.junit.rules.ExpectedException
import java.nio.charset.StandardCharsets
import java.sql.{Date, Time, Timestamp}
import java.time.{Instant, LocalDate, LocalDateTime, LocalTime, ZoneId}
import java.util
import scala.collection.Seq
class CalcITCase extends BatchTestBase {
var _expectedEx: ExpectedException = ExpectedException.none
@Rule
def expectedEx: ExpectedException = _expectedEx
@Before
override def before(): Unit = {
super.before()
registerCollection("Table3", data3, type3, "a, b, c", nullablesOfData3)
registerCollection("NullTable3", nullData3, type3, "a, b, c", nullablesOfData3)
registerCollection("SmallTable3", smallData3, type3, "a, b, c", nullablesOfData3)
registerCollection("testTable", buildInData, buildInType, "a,b,c,d,e,f,g,h,i,j")
}
@Test
def testSelectStar(): Unit = {
checkResult(
"SELECT * FROM Table3 where a is not null",
data3)
}
@Test
def testSimpleSelectAll(): Unit = {
checkResult(
"SELECT a, b, c FROM Table3",
data3)
}
@Test
def testManySelectWithFilter(): Unit = {
val data = Seq(
(true, 1, 2, 3, 4, 5, 6, 7),
(false, 1, 2, 3, 4, 5, 6, 7)
)
BatchTableEnvUtil.registerCollection(tEnv, "MyT", data, "a, b, c, d, e, f, g, h")
checkResult(
"""
|SELECT
| a, b, c, d, e, f, g, h,
| a, b, c, g, d, e, f, h,
| h, g, f, e, d, c, b, a,
| h, f, e, d, g, c, b, a,
| c, a, b, g, f, e, h, d,
| a, b, c, d, e, f, g, h,
| a, b, c, g, d, e, f, h,
| h, g, f, e, d, c, b, a,
| h, f, e, d, g, c, b, a,
| c, a, b, g, f, e, h, d
|FROM MyT WHERE a
""".stripMargin,
Seq(row(
true, 1, 2, 3, 4, 5, 6, 7, true, 1, 2, 6, 3, 4, 5, 7, 7, 6, 5, 4, 3, 2, 1,
true, 7, 5, 4, 3, 6, 2, 1, true, 2, true, 1, 6, 5, 4, 7, 3, true, 1, 2, 3,
4, 5, 6, 7, true, 1, 2, 6, 3, 4, 5, 7, 7, 6, 5, 4, 3, 2, 1, true, 7, 5, 4,
3, 6, 2, 1, true, 2, true, 1, 6, 5, 4, 7, 3
)))
}
@Test
def testManySelect(): Unit = {
registerCollection(
"ProjectionTestTable",
projectionTestData, projectionTestDataType,
"a, b, c, d, e, f, g, h",
nullablesOfProjectionTestData)
checkResult(
"""
|SELECT
| a, b, c, d, e, f, g, h,
| a, b, c, g, d, e, f, h,
| h, g, f, e, d, c, b, a,
| h, f, e, d, g, c, b, a,
| c, a, b, g, f, e, h, d,
| a, b, c, d, e, f, g, h,
| a, b, c, g, d, e, f, h,
| h, g, f, e, d, c, b, a,
| h, f, e, d, g, c, b, a,
| c, a, b, g, f, e, h, d
|FROM ProjectionTestTable
""".stripMargin,
Seq(
row(
1, 10, 100, "1", "10", "100", 1000, "1000",
1, 10, 100, 1000, "1", "10", "100", "1000",
"1000", 1000, "100", "10", "1", 100, 10, 1,
"1000", "100", "10", "1", 1000, 100, 10, 1,
100, 1, 10, 1000, "100", "10", "1000", "1",
1, 10, 100, "1", "10", "100", 1000, "1000",
1, 10, 100, 1000, "1", "10", "100", "1000",
"1000", 1000, "100", "10", "1", 100, 10, 1,
"1000", "100", "10", "1", 1000, 100, 10, 1,
100, 1, 10, 1000, "100", "10", "1000", "1"),
row(
2, 20, 200, "2", "20", "200", 2000, "2000",
2, 20, 200, 2000, "2", "20", "200", "2000",
"2000", 2000, "200", "20", "2", 200, 20, 2,
"2000", "200", "20", "2", 2000, 200, 20, 2,
200, 2, 20, 2000, "200", "20", "2000", "2",
2, 20, 200, "2", "20", "200", 2000, "2000",
2, 20, 200, 2000, "2", "20", "200", "2000",
"2000", 2000, "200", "20", "2", 200, 20, 2,
"2000", "200", "20", "2", 2000, 200, 20, 2,
200, 2, 20, 2000, "200", "20", "2000", "2"),
row(
3, 30, 300, "3", "30", "300", 3000, "3000",
3, 30, 300, 3000, "3", "30", "300", "3000",
"3000", 3000, "300", "30", "3", 300, 30, 3,
"3000", "300", "30", "3", 3000, 300, 30, 3,
300, 3, 30, 3000, "300", "30", "3000", "3",
3, 30, 300, "3", "30", "300", 3000, "3000",
3, 30, 300, 3000, "3", "30", "300", "3000",
"3000", 3000, "300", "30", "3", 300, 30, 3,
"3000", "300", "30", "3", 3000, 300, 30, 3,
300, 3, 30, 3000, "300", "30", "3000", "3")
))
}
@Test
def testSelectWithNaming(): Unit = {
checkResult(
"SELECT `1-_./Ü`, b, c FROM (SELECT a as `1-_./Ü`, b, c FROM Table3)",
data3)
}
@Test(expected = classOf[ValidationException])
def testInvalidFields(): Unit = {
checkResult(
"SELECT a, foo FROM Table3",
data3)
}
@Test
def testAllRejectingFilter(): Unit = {
checkResult(
"SELECT * FROM Table3 WHERE false",
Seq())
}
@Test
def testAllPassingFilter(): Unit = {
checkResult(
"SELECT * FROM Table3 WHERE true",
data3)
}
@Test
def testFilterOnString(): Unit = {
checkResult(
"SELECT * FROM Table3 WHERE c LIKE '%world%'",
Seq(
row(3, 2L, "Hello world"),
row(4, 3L, "Hello world, how are you?")
))
}
@Test
def testFilterOnInteger(): Unit = {
checkResult(
"SELECT * FROM Table3 WHERE MOD(a,2)=0",
Seq(
row(2, 2L, "Hello"),
row(4, 3L, "Hello world, how are you?"),
row(6, 3L, "Luke Skywalker"),
row(8, 4L, "Comment#2"),
row(10, 4L, "Comment#4"),
row(12, 5L, "Comment#6"),
row(14, 5L, "Comment#8"),
row(16, 6L, "Comment#10"),
row(18, 6L, "Comment#12"),
row(20, 6L, "Comment#14")
))
}
@Test
def testDisjunctivePredicate(): Unit = {
checkResult(
"SELECT * FROM Table3 WHERE a < 2 OR a > 20",
Seq(
row(1, 1L, "Hi"),
row(21, 6L, "Comment#15")
))
}
@Test
def testFilterWithAnd(): Unit = {
checkResult(
"SELECT * FROM Table3 WHERE MOD(a,2)<>0 AND MOD(b,2)=0",
Seq(
row(3, 2L, "Hello world"),
row(7, 4L, "Comment#1"),
row(9, 4L, "Comment#3"),
row(17, 6L, "Comment#11"),
row(19, 6L, "Comment#13"),
row(21, 6L, "Comment#15")
))
}
@Test
def testAdvancedDataTypes(): Unit = {
val data = Seq(
row(
localDate("1984-07-12"),
localTime("14:34:24"),
localDateTime("1984-07-12 14:34:24")))
registerCollection(
"MyTable", data, new RowTypeInfo(LOCAL_DATE, LOCAL_TIME, LOCAL_DATE_TIME), "a, b, c")
checkResult(
"SELECT a, b, c, DATE '1984-07-12', TIME '14:34:24', " +
"TIMESTAMP '1984-07-12 14:34:24' FROM MyTable",
Seq(
row(
localDate("1984-07-12"),
localTime("14:34:24"),
localDateTime("1984-07-12 14:34:24"),
localDate("1984-07-12"),
localTime("14:34:24"),
localDateTime("1984-07-12 14:34:24"))))
checkResult(
"SELECT a, b, c, DATE '1984-07-12', TIME '14:34:24', " +
"TIMESTAMP '1984-07-12 14:34:24' FROM MyTable " +
"WHERE a = '1984-07-12' and b = '14:34:24' and c = '1984-07-12 14:34:24'",
Seq(
row(
localDate("1984-07-12"),
localTime("14:34:24"),
localDateTime("1984-07-12 14:34:24"),
localDate("1984-07-12"),
localTime("14:34:24"),
localDateTime("1984-07-12 14:34:24"))))
checkResult(
"SELECT a, b, c, DATE '1984-07-12', TIME '14:34:24', " +
"TIMESTAMP '1984-07-12 14:34:24' FROM MyTable " +
"WHERE '1984-07-12' = a and '14:34:24' = b and '1984-07-12 14:34:24' = c",
Seq(
row(
localDate("1984-07-12"),
localTime("14:34:24"),
localDateTime("1984-07-12 14:34:24"),
localDate("1984-07-12"),
localTime("14:34:24"),
localDateTime("1984-07-12 14:34:24"))))
}
@Test
def testUserDefinedScalarFunction(): Unit = {
registerFunction("hashCode", MyHashCode)
val data = Seq(row("a"), row("b"), row("c"))
registerCollection("MyTable", data, new RowTypeInfo(STRING_TYPE_INFO), "text")
checkResult(
"SELECT hashCode(text), hashCode('22') FROM MyTable",
Seq(row(97,1600), row(98,1600), row(99,1600)
))
}
@Test
def testDecimalReturnType(): Unit = {
registerFunction("myNegative", MyNegative)
checkResult("SELECT myNegative(5.1)",
Seq(row(new java.math.BigDecimal("-5.100000000000000000"))
))
}
@Test
def testUDFWithInternalClass(): Unit = {
registerFunction("func", BinaryStringFunction)
val data = Seq(row("a"), row("b"), row("c"))
registerCollection("MyTable", data, new RowTypeInfo(STRING_TYPE_INFO), "text")
checkResult(
"SELECT func(text) FROM MyTable",
Seq(row("a"), row("b"), row("c")
))
}
@Test
def testTimestampSemantics(): Unit = {
// If the timestamp literal '1969-07-20 16:17:39' is inserted in Washington D.C.
// and then queried from Paris, it might be shown in the following ways based
// on timestamp semantics:
// TODO: Add ZonedDateTime/OffsetDateTime
val new_york = ZoneId.of("America/New_York")
val ldt = localDateTime("1969-07-20 16:17:39")
val data = Seq(row(
ldt,
ldt.toInstant(new_york.getRules.getOffset(ldt))
))
registerCollection("T", data, new RowTypeInfo(LOCAL_DATE_TIME, INSTANT), "a, b")
val pairs = ZoneId.of("Europe/Paris")
tEnv.getConfig.setLocalTimeZone(pairs)
checkResult(
"SELECT CAST(a AS VARCHAR), b, CAST(b AS VARCHAR) FROM T",
Seq(row("1969-07-20 16:17:39.000", "1969-07-20T20:17:39Z", "1969-07-20 21:17:39.000"))
)
}
@Test
def testTimeUDF(): Unit = {
val data = Seq(row(
localDate("1984-07-12"),
Date.valueOf("1984-07-12"),
DateTimeTestUtil.localTime("08:03:09"),
Time.valueOf("08:03:09"),
localDateTime("2019-09-19 08:03:09"),
Timestamp.valueOf("2019-09-19 08:03:09"),
Timestamp.valueOf("2019-09-19 08:03:09").toInstant))
registerCollection("MyTable", data,
new RowTypeInfo(LOCAL_DATE, DATE, LOCAL_TIME, TIME, LOCAL_DATE_TIME, TIMESTAMP, INSTANT),
"a, b, c, d, e, f, g")
tEnv.registerFunction("dateFunc", DateFunction)
tEnv.registerFunction("localDateFunc", LocalDateFunction)
tEnv.registerFunction("timeFunc", TimeFunction)
tEnv.registerFunction("localTimeFunc", LocalTimeFunction)
tEnv.registerFunction("timestampFunc", TimestampFunction)
tEnv.registerFunction("datetimeFunc", DateTimeFunction)
tEnv.registerFunction("instantFunc", InstantFunction)
val v1 = "1984-07-12"
val v2 = "08:03:09"
val v3 = "2019-09-19 08:03:09.0"
val v4 = "2019-09-19T08:03:09"
checkResult(
"SELECT" +
" dateFunc(a), localDateFunc(a), dateFunc(b), localDateFunc(b)," +
" timeFunc(c), localTimeFunc(c), timeFunc(d), localTimeFunc(d)," +
" timestampFunc(e), datetimeFunc(e), timestampFunc(f), datetimeFunc(f)," +
" CAST(instantFunc(g) AS TIMESTAMP), instantFunc(g)" +
" FROM MyTable",
Seq(row(
v1, v1, v1, v1,
v2, v2, v2, v2,
v3, v4, v3, v4,
localDateTime("2019-09-19 08:03:09"),
Timestamp.valueOf("2019-09-19 08:03:09").toInstant)))
}
@Test
def testTimeUDFParametersImplicitCast(): Unit = {
val data: Seq[Row] = Seq(row(
localDateTime("2019-09-19 08:03:09.123"),
Timestamp.valueOf("2019-09-19 08:03:09").toInstant,
Timestamp.valueOf("2019-09-19 08:03:09.123").toInstant,
Timestamp.valueOf("2019-09-19 08:03:09.123456").toInstant,
Timestamp.valueOf("2019-09-19 08:03:09.123456789").toInstant,
Timestamp.valueOf("2019-09-19 08:03:09.123").toInstant)
)
val dataId = TestValuesTableFactory.registerData(data)
val ddl =
s"""
|CREATE TABLE MyTable (
| ntz TIMESTAMP(3),
| ltz0 TIMESTAMP_LTZ(0),
| ltz3 TIMESTAMP_LTZ(3),
| ltz6 TIMESTAMP_LTZ(6),
| ltz9 TIMESTAMP_LTZ(9),
| ltz_not_null TIMESTAMP_LTZ(3) NOT NULL
|) WITH (
| 'connector' = 'values',
| 'data-id' = '$dataId',
| 'bounded' = 'true'
|)
|""".stripMargin
tEnv.executeSql(ddl)
tEnv.createTemporaryFunction("timestampFunc", TimestampFunction)
tEnv.createTemporaryFunction("datetimeFunc", DateTimeFunction)
tEnv.createTemporaryFunction("instantFunc", InstantFunction)
checkResult(
"SELECT" +
" timestampFunc(ntz), datetimeFunc(ntz), instantFunc(ntz)," +
" timestampFunc(ltz0), datetimeFunc(ltz0), instantFunc(ltz0)," +
" timestampFunc(ltz3), datetimeFunc(ltz3), instantFunc(ltz3)," +
" timestampFunc(ltz6), datetimeFunc(ltz6), instantFunc(ltz6)," +
" timestampFunc(ltz9), datetimeFunc(ltz9), instantFunc(ltz9)," +
" timestampFunc(ltz_not_null), datetimeFunc(ltz_not_null), instantFunc(ltz_not_null)" +
" FROM MyTable",
Seq(row(
// ntz
"2019-09-19 08:03:09.123", "2019-09-19T08:03:09.123",
Timestamp.valueOf("2019-09-19 08:03:09.123").toInstant,
// ltz0
"2019-09-19 08:03:09.0", "2019-09-19T08:03:09",
Timestamp.valueOf("2019-09-19 08:03:09").toInstant,
// ltz3
"2019-09-19 08:03:09.123", "2019-09-19T08:03:09.123",
Timestamp.valueOf("2019-09-19 08:03:09.123").toInstant,
// ltz6
"2019-09-19 08:03:09.123456", "2019-09-19T08:03:09.123456",
Timestamp.valueOf("2019-09-19 08:03:09.123456").toInstant,
// ltz6
"2019-09-19 08:03:09.123456789", "2019-09-19T08:03:09.123456789",
Timestamp.valueOf("2019-09-19 08:03:09.123456789").toInstant,
// ltz_not_null
"2019-09-19 08:03:09.123", "2019-09-19T08:03:09.123",
Timestamp.valueOf("2019-09-19 08:03:09.123").toInstant))
)
}
@Test
def testBinary(): Unit = {
val data = Seq(row(1, 2, "hehe".getBytes(StandardCharsets.UTF_8)))
registerCollection(
"MyTable",
data,
new RowTypeInfo(INT_TYPE_INFO, INT_TYPE_INFO, BYTE_PRIMITIVE_ARRAY_TYPE_INFO),
"a, b, c")
checkResult(
"SELECT a, b, c FROM MyTable",
data)
}
@Test
def testUserDefinedScalarFunctionWithParameter(): Unit = {
registerFunction("RichFunc2", new RichFunc2)
UserDefinedFunctionTestUtils.setJobParameters(env, Map("string.value" -> "ABC"))
checkResult(
"SELECT c FROM SmallTable3 where RichFunc2(c)='ABC#Hello'",
Seq(row("Hello"))
)
}
@Test
def testUserDefinedScalarFunctionWithDistributedCache(): Unit = {
val words = "Hello\\nWord"
val filePath = UserDefinedFunctionTestUtils.writeCacheFile("test_words", words)
env.registerCachedFile(filePath, "words")
registerFunction("RichFunc3", new RichFunc3)
checkResult(
"SELECT c FROM SmallTable3 where RichFunc3(c)=true",
Seq(row("Hello"))
)
}
@Test
def testMultipleUserDefinedScalarFunctions(): Unit = {
registerFunction("RichFunc1", new RichFunc1)
registerFunction("RichFunc2", new RichFunc2)
UserDefinedFunctionTestUtils.setJobParameters(env, Map("string.value" -> "Abc"))
checkResult(
"SELECT c FROM SmallTable3 where RichFunc2(c)='Abc#Hello' or RichFunc1(a)=3 and b=2",
Seq(row("Hello"), row("Hello world"))
)
}
@Test
def testExternalTypeFunc1(): Unit = {
registerFunction("func1", RowFunc)
registerFunction("rowToStr", RowToStrFunc)
registerFunction("func2", ListFunc)
registerFunction("func3", StringFunc)
val data = Seq(row("a"), row("b"), row("c"))
registerCollection("MyTable", data, new RowTypeInfo(STRING_TYPE_INFO), "text")
checkResult(
"SELECT rowToStr(func1(text)), func2(text), func3(text) FROM MyTable",
Seq(
row("a", util.Arrays.asList("a"), "a"),
row("b", util.Arrays.asList("b"), "b"),
row("c", util.Arrays.asList("c"), "c")
))
}
@Test
def testExternalTypeFunc2(): Unit = {
registerFunction("func1", RowFunc)
registerFunction("rowToStr", RowToStrFunc)
registerFunction("func2", ListFunc)
registerFunction("func3", StringFunc)
val data = Seq(row("a"), row("b"), row("c"))
registerCollection("MyTable", data, new RowTypeInfo(STRING_TYPE_INFO), "text")
// go to shuffler to serializer
checkResult(
"SELECT text, count(*), rowToStr(func1(text)), func2(text), func3(text) " +
"FROM MyTable group by text",
Seq(
row("a", 1, "a", util.Arrays.asList("a"), "a"),
row("b", 1, "b", util.Arrays.asList("b"), "b"),
row("c", 1, "c", util.Arrays.asList("c"), "c")
))
}
@Test
def testPojoField(): Unit = {
val data = Seq(
row(new MyPojo(5, 105)),
row(new MyPojo(6, 11)),
row(new MyPojo(7, 12)))
registerCollection(
"MyTable",
data,
new RowTypeInfo(TypeExtractor.createTypeInfo(classOf[MyPojo])),
"a")
checkResult(
"SELECT a FROM MyTable",
Seq(
row(row(5, 105)),
row(row(6, 11)),
row(row(7, 12))
))
}
@Test
def testPojoFieldUDF(): Unit = {
val data = Seq(
row(new MyPojo(5, 105)),
row(new MyPojo(6, 11)),
row(new MyPojo(7, 12)))
registerCollection(
"MyTable",
data,
new RowTypeInfo(TypeExtractor.createTypeInfo(classOf[MyPojo])),
"a")
//1. external type for udf parameter
registerFunction("pojoFunc", MyPojoFunc)
registerFunction("toPojoFunc", MyToPojoFunc)
checkResult(
"SELECT pojoFunc(a) FROM MyTable",
Seq(row(105), row(11), row(12)))
//2. external type return in udf
checkResult(
"SELECT toPojoFunc(pojoFunc(a)) FROM MyTable",
Seq(
row(row(11, 11)),
row(row(12, 12)),
row(row(105, 105))))
}
// TODO
// @Test
// def testUDFWithGetResultTypeFromLiteral(): Unit = {
// registerFunction("hashCode0", LiteralHashCode)
// registerFunction("hashCode1", LiteralHashCode)
// val data = Seq(row("a"), row("b"), row("c"))
// tEnv.registerCollection("MyTable", data, new RowTypeInfo(STRING_TYPE_INFO), "text")
// checkResult(
// "SELECT hashCode0(text, 'int') FROM MyTable",
// Seq(row(97), row(98), row(99)
// ))
//
// checkResult(
// "SELECT hashCode1(text, 'string') FROM MyTable",
// Seq(row("str97"), row("str98"), row("str99")
// ))
// }
@Test
def testInSmallValues(): Unit = {
checkResult(
"SELECT a FROM Table3 WHERE a in (1, 2)",
Seq(row(1), row(2)))
checkResult(
"SELECT a FROM Table3 WHERE a in (1, 2) and b = 2",
Seq(row(2)))
}
@Test
def testInLargeValues(): Unit = {
checkResult(
"SELECT a FROM Table3 WHERE a in (1, 2, 3, 4, 5)",
Seq(row(1), row(2), row(3), row(4), row(5)))
checkResult(
"SELECT a FROM Table3 WHERE a in (1, 2, 3, 4, 5) and b = 2",
Seq(row(2), row(3)))
checkResult(
"SELECT c FROM Table3 WHERE c in ('Hi', 'H2', 'H3', 'H4', 'H5')",
Seq(row("Hi")))
}
@Test
def testComplexInLargeValues(): Unit = {
checkResult(
"SELECT c FROM Table3 WHERE substring(c, 0, 2) in ('Hi', 'H2', 'H3', 'H4', 'H5')",
Seq(row("Hi")))
checkResult(
"SELECT c FROM Table3 WHERE a = 1 and " +
"(b = 1 or (c = 'Hello' and substring(c, 0, 2) in ('Hi', 'H2', 'H3', 'H4', 'H5')))",
Seq(row("Hi")))
checkResult(
"SELECT c FROM Table3 WHERE a = 1 and " +
"(b = 1 or (c = 'Hello' and (" +
"substring(c, 0, 2) = 'Hi' or substring(c, 0, 2) = 'H2' or " +
"substring(c, 0, 2) = 'H3' or substring(c, 0, 2) = 'H4' or " +
"substring(c, 0, 2) = 'H5')))",
Seq(row("Hi")))
}
@Test
def testNotInLargeValues(): Unit = {
checkResult(
"SELECT a FROM SmallTable3 WHERE a not in (2, 3, 4, 5)",
Seq(row(1)))
checkResult(
"SELECT a FROM SmallTable3 WHERE a not in (2, 3, 4, 5) or b = 2",
Seq(row(1), row(2), row(3)))
checkResult(
"SELECT c FROM SmallTable3 WHERE c not in ('Hi', 'H2', 'H3', 'H4')",
Seq(row("Hello"), row("Hello world")))
}
@Test
def testComplexNotInLargeValues(): Unit = {
checkResult(
"SELECT c FROM SmallTable3 WHERE substring(c, 0, 2) not in ('Hi', 'H2', 'H3', 'H4', 'H5')",
Seq(row("Hello"), row("Hello world")))
checkResult(
"SELECT c FROM SmallTable3 WHERE a = 1 or " +
"(b = 1 and (c = 'Hello' or substring(c, 0, 2) not in ('Hi', 'H2', 'H3', 'H4', 'H5')))",
Seq(row("Hi")))
checkResult(
"SELECT c FROM SmallTable3 WHERE a = 1 or " +
"(b = 1 and (c = 'Hello' or (" +
"substring(c, 0, 2) <> 'Hi' and substring(c, 0, 2) <> 'H2' and " +
"substring(c, 0, 2) <> 'H3' and substring(c, 0, 2) <> 'H4' and " +
"substring(c, 0, 2) <> 'H5')))",
Seq(row("Hi")))
}
@Test
def testRowType(): Unit = {
// literals
checkResult(
"SELECT ROW(1, 'Hi', true) FROM SmallTable3",
Seq(
row(row(1, "Hi", true)),
row(row(1, "Hi", true)),
row(row(1, "Hi", true))
)
)
// primitive type
checkResult(
"SELECT ROW(1, a, b) FROM SmallTable3",
Seq(
row(row(1, 1, 1L)),
row(row(1, 2, 2L)),
row(row(1, 3, 2L))
)
)
}
@Test
def testRowTypeWithDecimal(): Unit = {
val d = DecimalDataUtils.castFrom(2.0002, 5, 4).toBigDecimal
checkResult(
"SELECT ROW(CAST(2.0002 AS DECIMAL(5, 4)), a, c) FROM SmallTable3",
Seq(
row(row(d, 1, "Hi")),
row(row(d, 2, "Hello")),
row(row(d, 3, "Hello world"))
)
)
}
@Test
def testArrayType(): Unit = {
// literals
checkResult(
"SELECT ARRAY['Hi', 'Hello', 'How are you'] FROM SmallTable3",
Seq(
row("[Hi, Hello, How are you]"),
row("[Hi, Hello, How are you]"),
row("[Hi, Hello, How are you]")
)
)
// primitive type
checkResult(
"SELECT ARRAY[b, 30, 10, a] FROM SmallTable3",
Seq(
row("[1, 30, 10, 1]"),
row("[2, 30, 10, 2]"),
row("[2, 30, 10, 3]")
)
)
// non-primitive type
checkResult(
"SELECT ARRAY['Test', c] FROM SmallTable3",
Seq(
row("[Test, Hi]"),
row("[Test, Hello]"),
row("[Test, Hello world]")
)
)
}
@Test
def testMapType(): Unit = {
// literals
checkResult(
"SELECT MAP[1, 'Hello', 2, 'Hi'] FROM SmallTable3",
Seq(
row("{1=Hello, 2=Hi}"),
row("{1=Hello, 2=Hi}"),
row("{1=Hello, 2=Hi}")
)
)
// primitive type
checkResult(
"SELECT MAP[b, 30, 10, a] FROM SmallTable3",
Seq(
row("{1=30, 10=1}"),
row("{2=30, 10=2}"),
row("{2=30, 10=3}")
)
)
// non-primitive type
checkResult(
"SELECT MAP[a, c] FROM SmallTable3",
Seq(
row("{1=Hi}"),
row("{2=Hello}"),
row("{3=Hello world}")
)
)
}
@Test
def testMapTypeGroupBy(): Unit = {
_expectedEx.expectMessage("is not supported as a GROUP_BY/PARTITION_BY/JOIN_EQUAL/UNION field")
checkResult(
"SELECT COUNT(*) FROM SmallTable3 GROUP BY MAP[1, 'Hello', 2, 'Hi']",
Seq()
)
}
@Test
def testValueConstructor(): Unit = {
val data = Seq(row("foo", 12, localDateTime("1984-07-12 14:34:24.001")))
BatchTableEnvUtil.registerCollection(
tEnv, "MyTable", data,
new RowTypeInfo(Types.STRING, Types.INT, Types.LOCAL_DATE_TIME),
Some(parseFieldNames("a, b, c")), None, None)
val table = parseQuery("SELECT ROW(a, b, c), ARRAY[12, b], MAP[a, c] FROM MyTable " +
"WHERE (a, b, c) = ('foo', 12, TIMESTAMP '1984-07-12 14:34:24.001')")
val result = executeQuery(table)
val nestedRow = result.head.getField(0).asInstanceOf[Row]
assertEquals(data.head.getField(0), nestedRow.getField(0))
assertEquals(data.head.getField(1), nestedRow.getField(1))
assertEquals(data.head.getField(2), nestedRow.getField(2))
val arr = result.head.getField(1).asInstanceOf[Array[Integer]]
assertEquals(12, arr(0))
assertEquals(data.head.getField(1), arr(1))
val hashMap = result.head.getField(2).asInstanceOf[util.HashMap[String, Timestamp]]
assertEquals(data.head.getField(2),
hashMap.get(data.head.getField(0).asInstanceOf[String]))
}
@Test
def testSelectStarFromNestedTable(): Unit = {
val table = BatchTableEnvUtil.fromCollection(tEnv, Seq(
((0, 0), "0"),
((1, 1), "1"),
((2, 2), "2")
))
tEnv.registerTable("MyTable", table)
checkResult(
"SELECT * FROM MyTable",
Seq(
row(row(0, 0), "0"),
row(row(1, 1), "1"),
row(row(2, 2), "2")
)
)
}
@Test
def testSelectStarFromNestedValues(): Unit = {
val table = BatchTableEnvUtil.fromCollection(tEnv, Seq(
(0L, "0"),
(1L, "1"),
(2L, "2")
), "a, b")
tEnv.registerTable("MyTable", table)
checkResult(
"select * from (select MAP[a,b], a from MyTable)",
Seq(
row("{0=0}", 0),
row("{1=1}", 1),
row("{2=2}", 2)
)
)
checkResult(
"select * from (select ROW(a, a), b from MyTable)",
Seq(
row(row(0, 0), "0"),
row(row(1, 1), "1"),
row(row(2, 2), "2")
)
)
}
@Test
def testSelectStarFromNestedValues2(): Unit = {
val table = BatchTableEnvUtil.fromCollection(tEnv, Seq(
(0L, "0"),
(1L, "1"),
(2L, "2")
), "a, b")
tEnv.registerTable("MyTable", table)
checkResult(
"select * from (select ARRAY[a,cast(b as BIGINT)], a from MyTable)",
Seq(
row("[0, 0]", 0),
row("[1, 1]", 1),
row("[2, 2]", 2)
)
)
}
@Ignore // TODO support Unicode
@Test
def testFunctionWithUnicodeParameters(): Unit = {
val data = List(
("a\\u0001b", "c\\"d", "e\\"\\u0004f"), // uses Java/Scala escaping
("x\\u0001y", "y\\"z", "z\\"\\u0004z")
)
val splitUDF0 = new SplitUDF(deterministic = true)
val splitUDF1 = new SplitUDF(deterministic = false)
registerFunction("splitUDF0", splitUDF0)
registerFunction("splitUDF1", splitUDF1)
val t1 = BatchTableEnvUtil.fromCollection(tEnv, data, "a, b, c")
tEnv.registerTable("T1", t1)
// uses SQL escaping (be aware that even Scala multi-line strings parse backslash!)
checkResult(
s"""
|SELECT
| splitUDF0(a, U&'${'\\\\'}0001', 0) AS a0,
| splitUDF1(a, U&'${'\\\\'}0001', 0) AS a1,
| splitUDF0(b, U&'"', 1) AS b0,
| splitUDF1(b, U&'"', 1) AS b1,
| splitUDF0(c, U&'${'\\\\'}${'\\\\'}"${'\\\\'}0004', 0) AS c0,
| splitUDF1(c, U&'${'\\\\'}"#0004' UESCAPE '#', 0) AS c1
|FROM T1
|""".stripMargin,
Seq(
row("a", "a", "d", "d", "e", "e"),
row("x", "x", "z", "z", "z", "z"))
)
}
@Test
def testCastInWhere(): Unit = {
checkResult(
"SELECT CAST(a AS VARCHAR(10)) FROM Table3 WHERE CAST(a AS VARCHAR(10)) = '1'",
Seq(row(1)))
}
@Test
def testLike(): Unit = {
checkResult(
"SELECT a FROM NullTable3 WHERE c LIKE '%llo%'",
Seq(row(2), row(3), row(4)))
checkResult(
"SELECT a FROM NullTable3 WHERE CAST(a as VARCHAR(10)) LIKE CAST(b as VARCHAR(10))",
Seq(row(1), row(2)))
checkResult(
"SELECT a FROM NullTable3 WHERE c NOT LIKE '%Comment%' AND c NOT LIKE '%Hello%'",
Seq(row(1), row(5), row(6), row(null), row(null)))
checkResult(
"SELECT a FROM NullTable3 WHERE c LIKE 'Comment#%' and c LIKE '%2'",
Seq(row(8), row(18)))
checkResult(
"SELECT a FROM NullTable3 WHERE c LIKE 'Comment#12'",
Seq(row(18)))
checkResult(
"SELECT a FROM NullTable3 WHERE c LIKE '%omm%nt#12'",
Seq(row(18)))
}
@Test
def testLikeWithEscape(): Unit = {
val rows = Seq(
(1, "ha_ha"),
(2, "ffhaha_hahaff"),
(3, "aaffhaha_hahaffaa"),
(4, "aaffhaaa_aahaffaa"),
(5, "a%_ha")
)
BatchTableEnvUtil.registerCollection(tEnv, "MyT", rows, "a, b")
checkResult(
"SELECT a FROM MyT WHERE b LIKE '%ha?_ha%' ESCAPE '?'",
Seq(row(1), row(2), row(3)))
checkResult(
"SELECT a FROM MyT WHERE b LIKE '%ha?_ha' ESCAPE '?'",
Seq(row(1)))
checkResult(
"SELECT a FROM MyT WHERE b LIKE 'ha?_ha%' ESCAPE '?'",
Seq(row(1)))
checkResult(
"SELECT a FROM MyT WHERE b LIKE 'ha?_ha' ESCAPE '?'",
Seq(row(1)))
checkResult(
"SELECT a FROM MyT WHERE b LIKE '%affh%ha?_ha%' ESCAPE '?'",
Seq(row(3)))
checkResult(
"SELECT a FROM MyT WHERE b LIKE 'a?%?_ha' ESCAPE '?'",
Seq(row(5)))
checkResult(
"SELECT a FROM MyT WHERE b LIKE 'h_?_ha' ESCAPE '?'",
Seq(row(1)))
}
@Test
def testChainLike(): Unit = {
// special case to test CHAIN_PATTERN.
checkResult(
"SELECT a FROM NullTable3 WHERE c LIKE '% /sys/kvengine/KVServerRole/kvengine/kv_server%'",
Seq())
// special case to test CHAIN_PATTERN.
checkResult(
"SELECT a FROM NullTable3 WHERE c LIKE '%Tuple%%'",
Seq(row(null), row(null)))
// special case to test CHAIN_PATTERN.
checkResult(
"SELECT a FROM NullTable3 WHERE c LIKE '%/order/inter/touch/backwayprice.do%%'",
Seq())
}
@Test
def testEqual(): Unit = {
checkResult(
"SELECT a FROM Table3 WHERE c = 'Hi'",
Seq(row(1)))
checkResult(
"SELECT c FROM Table3 WHERE c <> 'Hello' AND b = 2",
Seq(row("Hello world")))
}
@Test
def testSubString(): Unit = {
checkResult(
"SELECT SUBSTRING(c, 6, 13) FROM Table3 WHERE a = 6",
Seq(row("Skywalker")))
}
@Test
def testConcat(): Unit = {
checkResult(
"SELECT CONCAT(c, '-haha') FROM Table3 WHERE a = 1",
Seq(row("Hi-haha")))
checkResult(
"SELECT CONCAT_WS('-x-', c, 'haha') FROM Table3 WHERE a = 1",
Seq(row("Hi-x-haha")))
}
@Test
def testStringAgg(): Unit = {
checkResult(
"SELECT MIN(c) FROM NullTable3",
Seq(row("Comment#1")))
checkResult(
"SELECT SUM(b) FROM NullTable3 WHERE c = 'NullTuple' OR c LIKE '%Hello world%' GROUP BY c",
Seq(row(1998), row(2), row(3)))
}
@Test
def testTruncate(): Unit = {
checkResult(
"SELECT TRUNCATE(CAST(123.456 AS DOUBLE), 2)",
Seq(row(123.45)))
checkResult(
"SELECT TRUNCATE(CAST(123.456 AS DOUBLE))",
Seq(row(123.0)))
checkResult(
"SELECT TRUNCATE(CAST(123.456 AS FLOAT), 2)",
Seq(row(123.45f)))
checkResult(
"SELECT TRUNCATE(CAST(123.456 AS FLOAT))",
Seq(row(123.0f)))
checkResult(
"SELECT TRUNCATE(123, -1)",
Seq(row(120)))
checkResult(
"SELECT TRUNCATE(123, -2)",
Seq(row(100)))
checkResult(
"SELECT TRUNCATE(CAST(123.456 AS DECIMAL(6, 3)), 2)",
Seq(row(new java.math.BigDecimal("123.45"))))
checkResult(
"SELECT TRUNCATE(CAST(123.456 AS DECIMAL(6, 3)))",
Seq(row(new java.math.BigDecimal("123"))))
}
@Test
def testStringUdf(): Unit = {
registerFunction("myFunc", MyStringFunc)
checkResult(
"SELECT myFunc(c) FROM Table3 WHERE a = 1",
Seq(row("Hihaha")))
}
@Test
def testNestUdf(): Unit = {
registerFunction("func", MyStringFunc)
checkResult(
"SELECT func(func(func(c))) FROM SmallTable3",
Seq(row("Hello worldhahahahahaha"), row("Hellohahahahahaha"), row("Hihahahahahaha")))
}
@Test
def testCurrentDate(): Unit = {
// Execution in on Query should return the same value
checkResult("SELECT CURRENT_DATE = CURRENT_DATE FROM testTable WHERE a = TRUE",
Seq(row(true)))
val d0 = LocalDateConverter.INSTANCE.toInternal(
unixTimestampToLocalDateTime(System.currentTimeMillis()).toLocalDate)
val table = parseQuery("SELECT CURRENT_DATE FROM testTable WHERE a = TRUE")
val result = executeQuery(table)
val d1 = LocalDateConverter.INSTANCE.toInternal(
result.toList.head.getField(0).asInstanceOf[LocalDate])
Assert.assertTrue(d0 <= d1 && d1 - d0 <= 1)
}
@Test
def testCurrentTimestamp(): Unit = {
// Execution in on Query should return the same value
checkResult("SELECT CURRENT_TIMESTAMP = CURRENT_TIMESTAMP FROM testTable WHERE a = TRUE",
Seq(row(true)))
// CURRENT_TIMESTAMP should return the current timestamp
val ts0 = System.currentTimeMillis()
val table = parseQuery("SELECT CURRENT_TIMESTAMP FROM testTable WHERE a = TRUE")
val result = executeQuery(table)
val ts1 = TimestampData.fromInstant(
result.toList.head.getField(0).asInstanceOf[Instant]).getMillisecond
val ts2 = System.currentTimeMillis()
Assert.assertTrue(ts0 <= ts1 && ts1 <= ts2)
}
@Test
def testCurrentTime(): Unit = {
// Execution in on Query should return the same value
checkResult("SELECT CURRENT_TIME = CURRENT_TIME FROM testTable WHERE a = TRUE",
Seq(row(true)))
}
def testTimestampCompareWithDate(): Unit = {
checkResult("SELECT j FROM testTable WHERE j < DATE '2017-11-11'",
Seq(row(true)))
}
/**
* TODO Support below string timestamp format to cast to timestamp:
* yyyy
* yyyy-[m]m
* yyyy-[m]m-[d]d
* yyyy-[m]m-[d]d
* yyyy-[m]m-[d]d [h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]
* yyyy-[m]m-[d]d [h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]Z
* yyyy-[m]m-[d]d [h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]-[h]h:[m]m
* yyyy-[m]m-[d]d [h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]+[h]h:[m]m
* yyyy-[m]m-[d]dT[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]
* yyyy-[m]m-[d]dT[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]Z
* yyyy-[m]m-[d]dT[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]-[h]h:[m]m
* yyyy-[m]m-[d]dT[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]+[h]h:[m]m
* [h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]
* [h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]Z
* [h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]-[h]h:[m]m
* [h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]+[h]h:[m]m
* T[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]
* T[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]Z
* T[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]-[h]h:[m]m
* T[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]+[h]h:[m]m
*/
@Test
def testTimestampCompareWithDateString(): Unit = {
//j 2015-05-20 10:00:00.887
checkResult("SELECT j FROM testTable WHERE j < '2017-11-11'",
Seq(row(localDateTime("2015-05-20 10:00:00.887"))))
}
@Test
def testDateCompareWithDateString(): Unit = {
checkResult("SELECT h FROM testTable WHERE h <= '2017-12-12'",
Seq(
row(localDate("2017-12-12")),
row(localDate("2017-12-12"))
))
}
@Test
def testDateEqualsWithDateString(): Unit = {
checkResult("SELECT h FROM testTable WHERE h = '2017-12-12'",
Seq(
row(localDate("2017-12-12")),
row(localDate("2017-12-12"))
))
}
@Test
def testDateFormat(): Unit = {
//j 2015-05-20 10:00:00.887
checkResult("SELECT j, " +
" DATE_FORMAT(j, 'yyyy/MM/dd HH:mm:ss')," +
" DATE_FORMAT('2015-05-20 10:00:00.887', 'yyyy/MM/dd HH:mm:ss')" +
" FROM testTable WHERE a = TRUE",
Seq(
row(localDateTime("2015-05-20 10:00:00.887"),
"2015/05/20 10:00:00",
"2015/05/20 10:00:00")
))
}
@Test
def testYear(): Unit = {
checkResult("SELECT j, YEAR(j) FROM testTable WHERE a = TRUE",
Seq(row(localDateTime("2015-05-20 10:00:00.887"), "2015")))
}
@Test
def testQuarter(): Unit = {
checkResult("SELECT j, QUARTER(j) FROM testTable WHERE a = TRUE",
Seq(row(localDateTime("2015-05-20 10:00:00.887"), "2")))
}
@Test
def testMonth(): Unit = {
checkResult("SELECT j, MONTH(j) FROM testTable WHERE a = TRUE",
Seq(row(localDateTime("2015-05-20 10:00:00.887"), "5")))
}
@Test
def testWeek(): Unit = {
checkResult("SELECT j, WEEK(j) FROM testTable WHERE a = TRUE",
Seq(row(localDateTime("2015-05-20 10:00:00.887"), "21")))
}
@Test
def testDayOfYear(): Unit = {
checkResult("SELECT j, DAYOFYEAR(j) FROM testTable WHERE a = TRUE",
Seq(row(localDateTime("2015-05-20 10:00:00.887"), "140")))
}
@Test
def testDayOfMonth(): Unit = {
checkResult("SELECT j, DAYOFMONTH(j) FROM testTable WHERE a = TRUE",
Seq(row(localDateTime("2015-05-20 10:00:00.887"), "20")))
}
@Test
def testDayOfWeek(): Unit = {
checkResult("SELECT j, DAYOFWEEK(j) FROM testTable WHERE a = TRUE",
Seq(row(localDateTime("2015-05-20 10:00:00.887"), "4")))
}
@Test
def testHour(): Unit = {
checkResult("SELECT j, HOUR(j) FROM testTable WHERE a = TRUE",
Seq(row(localDateTime("2015-05-20 10:00:00.887"), "10")))
}
@Test
def testMinute(): Unit = {
checkResult("SELECT j, MINUTE(j) FROM testTable WHERE a = TRUE",
Seq(row(localDateTime("2015-05-20 10:00:00.887"), "0")))
}
@Test
def testSecond(): Unit = {
checkResult("SELECT j, SECOND(j) FROM testTable WHERE a = TRUE",
Seq(row(localDateTime("2015-05-20 10:00:00.887"), "0")))
}
@Test
def testToDate(): Unit = {
checkResult("SELECT" +
" TO_DATE(CAST(null AS VARCHAR))," +
" TO_DATE('2016-12-31')," +
" TO_DATE('2016-12-31', 'yyyy-MM-dd')",
Seq(row(null, localDate("2016-12-31"), localDate("2016-12-31"))))
}
@Test
def testToTimestamp(): Unit = {
checkResult("SELECT" +
" TO_TIMESTAMP(CAST(null AS VARCHAR))," +
" TO_TIMESTAMP('2016-12-31 00:12:00')," +
" TO_TIMESTAMP('2016-12-31', 'yyyy-MM-dd')",
Seq(row(null, localDateTime("2016-12-31 00:12:00"), localDateTime("2016-12-31 00:00:00"))))
}
@Test
def testCalcBinary(): Unit = {
registerCollection(
"BinaryT",
nullData3.map((r) => row(r.getField(0), r.getField(1),
r.getField(2).toString.getBytes(StandardCharsets.UTF_8))),
new RowTypeInfo(INT_TYPE_INFO, LONG_TYPE_INFO, BYTE_PRIMITIVE_ARRAY_TYPE_INFO),
"a, b, c",
nullablesOfNullData3)
checkResult(
"select a, b, c from BinaryT where b < 1000",
nullData3.map((r) => row(r.getField(0), r.getField(1),
r.getField(2).toString.getBytes(StandardCharsets.UTF_8)))
)
}
@Test(expected = classOf[UnsupportedOperationException])
def testOrderByBinary(): Unit = {
registerCollection(
"BinaryT",
nullData3.map((r) => row(r.getField(0), r.getField(1),
r.getField(2).toString.getBytes(StandardCharsets.UTF_8))),
new RowTypeInfo(INT_TYPE_INFO, LONG_TYPE_INFO, BYTE_PRIMITIVE_ARRAY_TYPE_INFO),
"a, b, c",
nullablesOfNullData3)
conf.getConfiguration.setInteger(
ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 1)
conf.getConfiguration.setBoolean(
BatchPhysicalSortRule.TABLE_EXEC_RANGE_SORT_ENABLED, true)
checkResult(
"select * from BinaryT order by c",
nullData3.sortBy((x : Row) =>
x.getField(2).asInstanceOf[String]).map((r) =>
row(r.getField(0), r.getField(1),
r.getField(2).toString.getBytes(StandardCharsets.UTF_8))),
isSorted = true
)
}
@Test
def testGroupByBinary(): Unit = {
registerCollection(
"BinaryT2",
nullData3.map((r) => row(r.getField(0),
r.getField(1).toString.getBytes(StandardCharsets.UTF_8), r.getField(2))),
new RowTypeInfo(INT_TYPE_INFO, BYTE_PRIMITIVE_ARRAY_TYPE_INFO, STRING_TYPE_INFO),
"a, b, c",
nullablesOfNullData3)
checkResult(
"select sum(sumA) from (select sum(a) as sumA, b, c from BinaryT2 group by c, b) group by b",
Seq(row(1), row(111), row(15), row(34), row(5), row(65), row(null))
)
}
@Test
def testSimpleProject(): Unit = {
val myTableDataId = TestValuesTableFactory.registerData(TestData.smallData3)
val ddl =
s"""
|CREATE TABLE SimpleTable (
| a int,
| b bigint,
| c string
|) WITH (
| 'connector' = 'values',
| 'data-id' = '$myTableDataId',
| 'bounded' = 'true'
|)
""".stripMargin
tEnv.executeSql(ddl)
checkResult(
"select a, c from SimpleTable",
Seq(row(1, "Hi"), row(2, "Hello"), row(3, "Hello world"))
)
}
@Test
def testNestedProject(): Unit = {
val data = Seq(
row(1, row(row("HI", 11), row(111, true)), row("hi", 1111), "tom"),
row(2, row(row("HELLO", 22), row(222, false)), row("hello", 2222), "mary"),
row(3, row(row("HELLO WORLD", 33), row(333, true)), row("hello world", 3333), "benji")
)
val myTableDataId = TestValuesTableFactory.registerData(data)
val ddl =
s"""
|CREATE TABLE NestedTable (
| id int,
| deepNested row<nested1 row<name string, `value` int>,
| nested2 row<num int, flag boolean>>,
| nested row<name string, `value` int>,
| name string
|) WITH (
| 'connector' = 'values',
| 'nested-projection-supported' = 'false',
| 'data-id' = '$myTableDataId',
| 'bounded' = 'true'
|)
""".stripMargin
tEnv.executeSql(ddl)
checkResult(
"""
|select id,
| deepNested.nested1.name AS nestedName,
| nested.`value` AS nestedValue,
| deepNested.nested2.flag AS nestedFlag,
| deepNested.nested2.num AS nestedNum
|from NestedTable
|""".stripMargin,
Seq(row(1, "HI", 1111, true, 111),
row(2, "HELLO", 2222, false, 222),
row(3, "HELLO WORLD", 3333, true, 333))
)
}
@Test
def testFloatIn(): Unit = {
val source = tEnv.fromValues(
DataTypes.ROW(
DataTypes.FIELD("f0", DataTypes.FLOAT()),
DataTypes.FIELD("f1", DataTypes.FLOAT()),
DataTypes.FIELD("f2", DataTypes.FLOAT())),
row(1.0f, 11.0f, 12.0f),
row(2.0f, 21.0f, 22.0f),
row(3.0f, 31.0f, 32.0f),
row(4.0f, 41.0f, 42.0f),
row(5.0f, 51.0f, 52.0f)
)
tEnv.createTemporaryView("myTable", source)
val query = """
|select * from myTable where f0 in (1.0, 2.0, 3.0)
|""".stripMargin;
checkResult(
query,
Seq(
row(1.0f, 11.0f, 12.0f),
row(2.0f, 21.0f, 22.0f),
row(3.0f, 31.0f, 32.0f))
)
}
@Test
def testFilterPushDownWithInterval(): Unit = {
val schema = TableSchema
.builder()
.field("a", DataTypes.TIMESTAMP)
.field("b", DataTypes.TIMESTAMP)
.build()
val data = List(
row(localDateTime("2021-03-30 10:00:00"), localDateTime("2021-03-30 14:59:59")),
row(localDateTime("2021-03-30 10:00:00"), localDateTime("2021-03-30 15:00:00")),
row(localDateTime("2021-03-30 10:00:00"), localDateTime("2021-03-30 15:00:01")),
row(localDateTime("2021-03-30 10:00:00"), localDateTime("2023-03-30 09:59:59")),
row(localDateTime("2021-03-30 10:00:00"), localDateTime("2023-03-30 10:00:00")),
row(localDateTime("2021-03-30 10:00:00"), localDateTime("2023-03-30 10:00:01")))
TestLegacyFilterableTableSource.createTemporaryTable(
tEnv,
schema,
"myTable",
isBounded = true,
data,
Set("a", "b"))
checkResult(
"SELECT * FROM myTable WHERE TIMESTAMPADD(HOUR, 5, a) >= b",
Seq(
row(localDateTime("2021-03-30 10:00:00"), localDateTime("2021-03-30 14:59:59")),
row(localDateTime("2021-03-30 10:00:00"), localDateTime("2021-03-30 15:00:00"))))
checkResult(
"SELECT * FROM myTable WHERE TIMESTAMPADD(YEAR, 2, a) >= b",
Seq(
row(localDateTime("2021-03-30 10:00:00"), localDateTime("2021-03-30 14:59:59")),
row(localDateTime("2021-03-30 10:00:00"), localDateTime("2021-03-30 15:00:00")),
row(localDateTime("2021-03-30 10:00:00"), localDateTime("2021-03-30 15:00:01")),
row(localDateTime("2021-03-30 10:00:00"), localDateTime("2023-03-30 09:59:59")),
row(localDateTime("2021-03-30 10:00:00"), localDateTime("2023-03-30 10:00:00"))))
}
@Test
def testOrWithIsNullPredicate(): Unit = {
checkResult(
"""
|SELECT * FROM NullTable3 AS T
|WHERE T.a = 1 OR T.a = 3 OR T.a IS NULL
|""".stripMargin,
Seq(
row(1, 1L, "Hi"),
row(3, 2L, "Hello world"),
row(null, 999L, "NullTuple"),
row(null, 999L, "NullTuple")))
}
@Test
def testOrWithIsNullInIf(): Unit = {
val data = Seq(
row("", "N"),
row("X", "Y"),
row(null, "Y"))
registerCollection(
"MyTable", data, new RowTypeInfo(STRING_TYPE_INFO, STRING_TYPE_INFO), "a, b")
checkResult(
"SELECT IF(a = '', 'a', 'b') FROM MyTable",
Seq(row('a'), row('b'), row('b')))
checkResult(
"SELECT IF(a IS NULL, 'a', 'b') FROM MyTable",
Seq(row('b'), row('b'), row('a')))
checkResult(
"SELECT IF(a = '' OR a IS NULL, 'a', 'b') FROM MyTable",
Seq(row('a'), row('b'), row('a')))
}
@Test
def testFilterConditionWithCast(): Unit = {
val dataId = TestValuesTableFactory.registerData(
Seq(
row(1, "true"),
row(2, "false"),
row(3, "invalid"),
row(4, null)))
val ddl =
s"""
|CREATE TABLE MyTable (
| a int,
| b string
|) WITH (
| 'connector' = 'values',
| 'data-id' = '$dataId',
| 'bounded' = 'true'
|)
""".stripMargin
tEnv.executeSql(ddl)
checkResult(
"select a from MyTable where cast(b as boolean)",
Seq(row(1)))
checkResult(
"select cast(b as boolean) from MyTable",
Seq(row(true), row(false), row(null), row(null)))
}
@Test
def testTimestampAdd(): Unit = {
// we're not adding this test to ScalarFunctionsTest because that test
// directly uses the generated code and does not check for expression types
val dataId = TestValuesTableFactory.registerData(
Seq(row(
LocalDateTime.of(2021, 7, 15, 16, 50, 0, 123000000),
LocalDateTime.of(2021, 7, 15, 16, 50, 0, 123456789),
Instant.ofEpochMilli(1626339000123L),
Instant.ofEpochSecond(1626339000, 123456789),
LocalDate.of(2021, 7, 15),
LocalTime.of(16, 50, 0, 123000000)
)))
val ddl =
s"""
|CREATE TABLE MyTable (
| a TIMESTAMP(3),
| b TIMESTAMP(9),
| c TIMESTAMP_LTZ(3),
| d TIMESTAMP_LTZ(9),
| e DATE,
| f TIME(3)
|) WITH (
| 'connector' = 'values',
| 'data-id' = '$dataId',
| 'bounded' = 'true'
|)
|""".stripMargin
tEnv.executeSql(ddl)
checkResult(
"""
|select
| timestampadd(day, 1, a),
| timestampadd(hour, 1, a),
| timestampadd(minute, 1, a),
| timestampadd(second, 1, a),
| timestampadd(day, 1, b),
| timestampadd(hour, 1, b),
| timestampadd(minute, 1, b),
| timestampadd(second, 1, b),
| timestampadd(day, 1, c),
| timestampadd(hour, 1, c),
| timestampadd(minute, 1, c),
| timestampadd(second, 1, c),
| timestampadd(day, 1, d),
| timestampadd(hour, 1, d),
| timestampadd(minute, 1, d),
| timestampadd(second, 1, d),
| timestampadd(day, 1, e),
| timestampadd(hour, 1, e),
| timestampadd(minute, 1, e),
| timestampadd(second, 1, e),
| timestampadd(day, 1, f),
| timestampadd(hour, 1, f),
| timestampadd(minute, 1, f),
| timestampadd(second, 1, f)
|from MyTable
|""".stripMargin,
Seq(row(
LocalDateTime.of(2021, 7, 16, 16, 50, 0, 123000000),
LocalDateTime.of(2021, 7, 15, 17, 50, 0, 123000000),
LocalDateTime.of(2021, 7, 15, 16, 51, 0, 123000000),
LocalDateTime.of(2021, 7, 15, 16, 50, 1, 123000000),
LocalDateTime.of(2021, 7, 16, 16, 50, 0, 123456789),
LocalDateTime.of(2021, 7, 15, 17, 50, 0, 123456789),
LocalDateTime.of(2021, 7, 15, 16, 51, 0, 123456789),
LocalDateTime.of(2021, 7, 15, 16, 50, 1, 123456789),
Instant.ofEpochMilli(1626339000123L + 24 * 3600 * 1000L),
Instant.ofEpochMilli(1626339000123L + 3600 * 1000L),
Instant.ofEpochMilli(1626339000123L + 60 * 1000L),
Instant.ofEpochMilli(1626339000123L + 1000L),
Instant.ofEpochSecond(1626339000 + 24 * 3600, 123456789),
Instant.ofEpochSecond(1626339000 + 3600, 123456789),
Instant.ofEpochSecond(1626339000 + 60, 123456789),
Instant.ofEpochSecond(1626339000 + 1, 123456789),
LocalDate.of(2021, 7, 16),
LocalDateTime.of(2021, 7, 15, 1, 0, 0),
LocalDateTime.of(2021, 7, 15, 0, 1, 0),
LocalDateTime.of(2021, 7, 15, 0, 0, 1),
LocalTime.of(16, 50, 0, 123000000),
LocalTime.of(17, 50, 0, 123000000),
LocalTime.of(16, 51, 0, 123000000),
LocalTime.of(16, 50, 1, 123000000)
)))
}
}
| StephanEwen/incubator-flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/CalcITCase.scala | Scala | apache-2.0 | 51,901 |
/*
* Tup2_1Expanded.scala
* (FScape)
*
* Copyright (c) 2001-2022 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.lucre.swing.graph.impl
import de.sciss.lucre.{IExpr, ITargets, Txn}
import de.sciss.lucre.expr.graph.impl.MappedIExpr
/** N.B.: disposes the input `tup`! */
final class Tup2_1Expanded[T <: Txn[T], A, B](tup: IExpr[T, (A, B)], tx0: T)
(implicit targets: ITargets[T])
extends MappedIExpr[T, (A, B), A](tup, tx0) {
protected def mapValue(tupVal: (A, B))(implicit tx: T): A = tupVal._1
override def dispose()(implicit tx: T): Unit = {
super.dispose()
tup.dispose()
}
}
| Sciss/FScape-next | views/src/main/scala/de/sciss/lucre/swing/graph/impl/Tup2_1Expanded.scala | Scala | agpl-3.0 | 846 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// Licence: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.intg
import org.ensime.api._
import org.ensime.core._
import org.ensime.fixture._
import org.ensime.util.EnsimeSpec
import org.ensime.util.file._
class BasicWorkflow extends EnsimeSpec
with IsolatedEnsimeConfigFixture
with IsolatedTestKitFixture
with IsolatedProjectFixture
with RefactoringHandlerTestUtils {
val original = EnsimeConfigFixture.SimpleTestProject
"ensime-server" should "open the simple test project" in {
withEnsimeConfig { implicit config =>
withTestKit { implicit testkit =>
withProject { (project, asyncHelper) =>
import testkit._
val sourceRoot = scalaMain(config)
val fooFile = sourceRoot / "org/example/Foo.scala"
val fooFilePath = fooFile.getAbsolutePath
val barFile = sourceRoot / "org/example/Bar.scala"
// typeCheck module
project ! TypecheckModule("testingSimple")
expectMsg(VoidResponse)
asyncHelper.expectMsgType[NewScalaNotesEvent]
asyncHelper.expectMsgType[FullTypeCheckCompleteEvent.type]
project ! TypeByNameReq("org.example.Bloo")
expectMsgType[BasicTypeInfo]
project ! UnloadModuleReq("testingSimple")
expectMsg(VoidResponse)
project ! TypeByNameReq("org.example.Bloo")
expectMsg(BasicTypeInfo("<none>", DeclaredAs.Nil, "<none>", Nil, Nil, None))
// trigger typeCheck
project ! TypecheckFilesReq(List(Left(fooFile), Left(barFile)))
expectMsg(VoidResponse)
asyncHelper.expectMsg(FullTypeCheckCompleteEvent)
// Asking to typecheck mising file should report an error not kill system
val missingFile = sourceRoot / "missing.scala"
val missingFilePath = missingFile.getAbsolutePath
project ! TypecheckFilesReq(List(Left(missingFile)))
expectMsg(EnsimeServerError(s"""file(s): "$missingFilePath" do not exist"""))
//-----------------------------------------------------------------------------------------------
// semantic highlighting
project ! SymbolDesignationsReq(Left(fooFile), -1, 299, SourceSymbol.allSymbols)
val designations = expectMsgType[SymbolDesignations]
designations.file shouldBe fooFile
designations.syms should contain(SymbolDesignation(12, 19, PackageSymbol))
// expected Symbols
// ((package 12 19) (package 8 11) (trait 40 43) (valField 69 70) (class 100 103) (param 125 126) (class 128 131) (param 133 134) (class 136 142) (operator 156 157) (param 154 155) (functionCall 160 166) (param 158 159) (valField 183 186) (class 193 199) (class 201 204) (valField 214 217) (class 224 227) (functionCall 232 239) (operator 250 251) (valField 256 257) (valField 252 255) (functionCall 261 268) (functionCall 273 283) (valField 269 272)))
//-----------------------------------------------------------------------------------------------
// symbolAtPoint
project ! SymbolAtPointReq(Left(fooFile), 128)
val symbolAtPointOpt: SymbolInfo = expectMsgType[SymbolInfo]
project ! TypeByNameReq("org.example.Foo")
val fooClassByNameOpt = expectMsgType[TypeInfo]
project ! TypeByNameReq("org.example.Foo$")
val fooObjectByNameOpt = expectMsgType[TypeInfo]
//-----------------------------------------------------------------------------------------------
// public symbol search - java.io.File
project ! PublicSymbolSearchReq(List("java", "io", "File"), 30)
val javaSearchSymbol = expectMsgType[SymbolSearchResults]
assert(javaSearchSymbol.syms.exists {
case TypeSearchResult("java.io.File", "File", DeclaredAs.Class, Some(_)) => true
case _ => false
})
//-----------------------------------------------------------------------------------------------
// public symbol search - scala.util.Random
project ! PublicSymbolSearchReq(List("scala", "util", "Random"), 2)
expectMsgPF() {
case SymbolSearchResults(List(
TypeSearchResult("scala.util.Random", "Random", DeclaredAs.Class, Some(_)),
TypeSearchResult("scala.util.Random$", "Random$", DeclaredAs.Class, Some(_)))) =>
case SymbolSearchResults(List(
TypeSearchResult("java.util.Random", "Random", DeclaredAs.Class, Some(_)),
TypeSearchResult("scala.util.Random", "Random", DeclaredAs.Class, Some(_)))) =>
// this is a pretty ropey test at the best of times
}
//-----------------------------------------------------------------------------------------------
// documentation for type at point
val intDocSig = DocSigPair(DocSig(DocFqn("scala", "Int"), None), DocSig(DocFqn("", "int"), None))
// NOTE these are handled as multi-phase queries in requesthandler
project ! DocUriAtPointReq(Left(fooFile), OffsetRange(128))
expectMsg(Some(intDocSig))
project ! DocUriForSymbolReq("scala.Int", None, None)
expectMsg(Some(intDocSig))
project ! intDocSig
expectMsgType[StringResponse].text should endWith("/index.html#scala.Int")
//-----------------------------------------------------------------------------------------------
// uses of symbol at point
project ! TypecheckFilesReq(List(Left(fooFile)))
expectMsg(VoidResponse)
asyncHelper.expectMsg(FullTypeCheckCompleteEvent)
project ! UsesOfSymbolAtPointReq(Left(fooFile), 119) // point on testMethod
expectMsgType[ERangePositions].positions should contain theSameElementsAs List(
ERangePosition(`fooFilePath`, 114, 110, 172), ERangePosition(`fooFilePath`, 273, 269, 283)
)
// note that the line numbers appear to have been stripped from the
// scala library classfiles, so offset/line comes out as zero unless
// loaded by the pres compiler
project ! SymbolAtPointReq(Left(fooFile), 276)
expectMsgPF() {
case SymbolInfo("testMethod", "testMethod", Some(OffsetSourcePosition(`fooFile`, 114)), ArrowTypeInfo("(Int, String) => Int", "(scala.Int, java.lang.String) => scala.Int", BasicTypeInfo("Int", DeclaredAs.Class, "scala.Int", Nil, Nil, None), List(ParamSectionInfo(List(("i", BasicTypeInfo("Int", DeclaredAs.Class, "scala.Int", Nil, Nil, None)), (s, BasicTypeInfo("String", DeclaredAs.Class, "java.lang.String", Nil, Nil, None))), false))), true) =>
}
// M-. external symbol
project ! SymbolAtPointReq(Left(fooFile), 190)
expectMsgPF() {
case SymbolInfo("Map", "Map", Some(OffsetSourcePosition(_, _)),
BasicTypeInfo("Map", DeclaredAs.Object, "scala.collection.immutable.Map", Nil, Nil, None),
false) =>
}
project ! SymbolAtPointReq(Left(fooFile), 343)
expectMsgPF() {
case SymbolInfo("fn", "fn", Some(OffsetSourcePosition(`fooFile`, 304)),
BasicTypeInfo("(String) => Int", DeclaredAs.Trait, "(java.lang.String) => scala.Int",
List(
BasicTypeInfo("String", DeclaredAs.Class, "java.lang.String", Nil, Nil, None),
BasicTypeInfo("Int", DeclaredAs.Class, "scala.Int", Nil, Nil, None)),
Nil, None),
false) =>
}
project ! SymbolAtPointReq(Left(barFile), 150)
expectMsgPF() {
case SymbolInfo("apply", "apply", Some(OffsetSourcePosition(`barFile`, 59)),
ArrowTypeInfo("(String, Int) => Foo", "(java.lang.String, scala.Int) => org.example.Bar.Foo",
BasicTypeInfo("Foo", DeclaredAs.Class, "org.example.Bar.Foo", Nil, Nil, None),
List(ParamSectionInfo(
List(
("bar", BasicTypeInfo("String", DeclaredAs.Class, "java.lang.String", Nil, Nil, None)),
("baz", BasicTypeInfo("Int", DeclaredAs.Class, "scala.Int", Nil, Nil, None))), false))),
true) =>
}
project ! SymbolAtPointReq(Left(barFile), 193)
expectMsgPF() {
case SymbolInfo("copy", "copy", Some(OffsetSourcePosition(`barFile`, 59)),
ArrowTypeInfo("(String, Int) => Foo", "(java.lang.String, scala.Int) => org.example.Bar.Foo",
BasicTypeInfo("Foo", DeclaredAs.Class, "org.example.Bar.Foo", List(), List(), None),
List(ParamSectionInfo(
List(
("bar", BasicTypeInfo("String", DeclaredAs.Class, "java.lang.String", List(), List(), None)),
("baz", BasicTypeInfo("Int", DeclaredAs.Class, "scala.Int", List(), List(), None))), false))),
true) =>
}
// C-c C-v p Inspect source of current package
project ! InspectPackageByPathReq("org.example")
val packageInfo = expectMsgType[PackageInfo]
packageInfo.name shouldBe "example"
packageInfo.fullName shouldBe "org.example"
packageInfo.members.collect {
case b: BasicTypeInfo => b.copy(pos = None)
} should contain theSameElementsAs List(
BasicTypeInfo("Bar", DeclaredAs.Class, "org.example.Bar", Nil, Nil, None),
BasicTypeInfo("Bar", DeclaredAs.Object, "org.example.Bar", Nil, Nil, None),
BasicTypeInfo("Bloo", DeclaredAs.Class, "org.example.Bloo", Nil, Nil, None),
BasicTypeInfo("Bloo", DeclaredAs.Object, "org.example.Bloo", Nil, Nil, None),
BasicTypeInfo("Blue", DeclaredAs.Class, "org.example.Blue", Nil, Nil, None),
BasicTypeInfo("Blue", DeclaredAs.Object, "org.example.Blue", Nil, Nil, None),
BasicTypeInfo("CaseClassWithCamelCaseName", DeclaredAs.Class, "org.example.CaseClassWithCamelCaseName", Nil, Nil, None),
BasicTypeInfo("CaseClassWithCamelCaseName", DeclaredAs.Object, "org.example.CaseClassWithCamelCaseName", Nil, Nil, None),
BasicTypeInfo("Foo", DeclaredAs.Class, "org.example.Foo", Nil, Nil, None),
BasicTypeInfo("Foo", DeclaredAs.Object, "org.example.Foo", Nil, Nil, None),
BasicTypeInfo("Qux", DeclaredAs.Class, "org.example.Qux", Nil, Nil, None),
BasicTypeInfo("Test1", DeclaredAs.Class, "org.example.Test1", Nil, Nil, None),
BasicTypeInfo("Test1", DeclaredAs.Object, "org.example.Test1", Nil, Nil, None),
BasicTypeInfo("Test2", DeclaredAs.Class, "org.example.Test2", Nil, Nil, None),
BasicTypeInfo("Test2", DeclaredAs.Object, "org.example.Test2", Nil, Nil, None),
BasicTypeInfo("package", DeclaredAs.Object, "org.example.package", Nil, Nil, None)
)
// expand selection around 'val foo'
project ! ExpandSelectionReq(fooFile, 215, 215)
val expandRange1 = expectMsgType[FileRange]
expandRange1 shouldBe FileRange(fooFilePath, 214, 217)
project ! ExpandSelectionReq(fooFile, 214, 217)
val expandRange2 = expectMsgType[FileRange]
expandRange2 shouldBe FileRange(fooFilePath, 210, 229)
project ! RefactorReq(1234, RenameRefactorDesc("bar", fooFile, 215, 215), false)
expectMsgPF() {
case RefactorDiffEffect(1234, RefactorType.Rename, diff) =>
val relevantExpectedPart = s"""|@@ -14,5 +14,5 @@
| val map = Map[String, Int]()
|- val foo = new Foo()
|- println("Hello, " + foo.x)
|- println(foo.testMethod(7, "seven"))
|+ val bar = new Foo()
|+ println("Hello, " + bar.x)
|+ println(bar.testMethod(7, "seven"))
| \\n""".stripMargin
val diffContents = diff.canon.readString()
val expectedContents = expectedDiffContent(fooFilePath, relevantExpectedPart)
if (diffContents == expectedContents) true
else fail(s"Different diff content than expected. \\n Actual content: '$diffContents' \\n ExpectedRelevantContent: '$relevantExpectedPart'")
}
val oldTree1 = """Apply[1](Select[2](Select[1](Apply[3](TypeApply[4](Select[5](Select[6](This[7](newTypeName("immutable")), scala.collection.immutable.List#MOD%M1), newTermName("apply")#METH%M1), List(TypeTree[1]())), List(Literal[8](Constant(1)), Literal[9](Constant(2)), Literal[10](Constant(3)))), newTermName("head")#METH%M1), newTermName("$plus")#METH%M1), List(Literal[9](Constant(2))))
|[1] TypeRef(ThisType(scala#PK%M1), scala.Int#CLS%M1, List())
|[2] MethodType(List(newTermName("x")#VAL%M1), TypeRef(ThisType(scala#PK%M1), scala.Int#CLS%M1, List()))
|[3] TypeRef(ThisType(scala.collection.immutable#PK%M1), scala.collection.immutable.List#CLS%M1, List(TypeRef(ThisType(scala#PK%M1), scala.Int#CLS%M1, List())))
|[4] MethodType(List(newTermName("xs")#VAL%M1), TypeRef(ThisType(scala.collection.immutable#PK%M1), scala.collection.immutable.List#CLS%M1, List(TypeRef(ThisType(scala#PK%M1), scala.Int#CLS%M1, List()))))
|[5] PolyType(List(newTypeName("A")#TPE%M1), MethodType(List(newTermName("xs")#VAL%M1), TypeRef(ThisType(scala.collection.immutable#PK%M1), scala.collection.immutable.List#CLS%M1, List(TypeRef(NoPrefix, newTypeName("A")#TPE%M1, List())))))
|[6] SingleType(ThisType(scala.collection.immutable#PK%M1), scala.collection.immutable.List#MOD%M1)
|[7] ThisType(scala.collection.immutable#PK%M1)
|[8] ConstantType(Constant(1))
|[9] ConstantType(Constant(2))
|[10] ConstantType(Constant(3))
|[1] compiler mirror""".stripMargin
val tree1 = """Apply[1](Select[2](Select[1](Apply[3](TypeApply[4](Select[5](Select[6](This[7](TypeName("immutable")), scala.collection.immutable.List#MOD%M1), TermName("apply")#METH%M1), List(TypeTree[1]())), List(Literal[8](Constant(1)), Literal[9](Constant(2)), Literal[10](Constant(3)))), TermName("head")#METH%M1), TermName("$plus")#METH%M1), List(Literal[9](Constant(2))))
|[1] TypeRef(ThisType(scala#PKC%M1), scala.Int#CLS%M1, List())
|[2] MethodType(List(TermName("x")#VAL%M1), TypeRef(ThisType(scala#PKC%M1), scala.Int#CLS%M1, List()))
|[3] TypeRef(ThisType(scala.collection.immutable#PKC%M1), scala.collection.immutable.List#CLS%M1, List(TypeRef(ThisType(scala#PKC%M1), scala.Int#CLS%M1, List())))
|[4] MethodType(List(TermName("xs")#VAL%M1), TypeRef(ThisType(scala.collection.immutable#PKC%M1), scala.collection.immutable.List#CLS%M1, List(TypeRef(ThisType(scala#PKC%M1), scala.Int#CLS%M1, List()))))
|[5] PolyType(List(TypeName("A")#TPE%M1), MethodType(List(TermName("xs")#VAL%M1), TypeRef(ThisType(scala.collection.immutable#PKC%M1), scala.collection.immutable.List#CLS%M1, List(TypeRef(NoPrefix, TypeName("A")#TPE%M1, List())))))
|[6] SingleType(ThisType(scala.collection.immutable#PKC%M1), scala.collection.immutable.List#MOD%M1)
|[7] ThisType(scala.collection.immutable#PKC%M1)
|[8] ConstantType(Constant(1))
|[9] ConstantType(Constant(2))
|[10] ConstantType(Constant(3))
|[1] compiler mirror""".stripMargin
project ! AstAtPointReq(SourceFileInfo(fooFile), OffsetRange(475, 496))
expectMsgPF() {
case AstInfo(ast) if {
val stripped = ast.replaceAll("[\\n\\r]", "")
stripped == tree1.replaceAll("[\\n\\r]", "") || stripped == oldTree1.replaceAll("[\\n\\r]", "")
} =>
}
val oldTree2 = """Apply[11](TypeApply[12](Select[13](Select[14](Select[15](This[16](newTypeName("scala")), scala.Predef#MOD%M1), newTermName("Map")#GET%M1), newTermName("apply")#METH%M1), List(TypeTree[17]().setOriginal(Select[17](Select[15](This[16](newTypeName("scala")), scala.Predef#MOD%M1), newTypeName("String")#TPE%M1)), TypeTree[1]().setOriginal(Select[1](Ident[18](scala#PK%M1), scala.Int#CLS%M1)))), List())
|[1] TypeRef(ThisType(scala#PK%M1), scala.Int#CLS%M1, List())
|[11] TypeRef(ThisType(scala.collection.immutable#PK%M1), scala.collection.immutable.Map#TRT%M1, List(TypeRef(SingleType(ThisType(scala#PK%M1), scala.Predef#MOD%M1), newTypeName("String")#TPE%M1, List()), TypeRef(ThisType(scala#PK%M1), scala.Int#CLS%M1, List())))
|[12] MethodType(List(newTermName("elems")#VAL%M1), TypeRef(ThisType(scala.collection.immutable#PK%M1), scala.collection.immutable.Map#TRT%M1, List(TypeRef(SingleType(ThisType(scala#PK%M1), scala.Predef#MOD%M1), newTypeName("String")#TPE%M1, List()), TypeRef(ThisType(scala#PK%M1), scala.Int#CLS%M1, List()))))
|[13] PolyType(List(newTypeName("A")#TPE%M1, newTypeName("B")#TPE%M1), MethodType(List(newTermName("elems")#VAL%M1), TypeRef(ThisType(scala.collection.immutable#PK%M1), scala.collection.immutable.Map#TRT%M1, List(TypeRef(NoPrefix, newTypeName("A")#TPE%M1, List()), TypeRef(NoPrefix, newTypeName("B")#TPE%M1, List())))))
|[14] SingleType(SingleType(ThisType(scala#PK%M1), scala.Predef#MOD%M1), newTermName("Map")#GET%M1)
|[15] SingleType(ThisType(scala#PK%M1), scala.Predef#MOD%M1)
|[16] ThisType(scala#PK%M1)
|[17] TypeRef(SingleType(ThisType(scala#PK%M1), scala.Predef#MOD%M1), newTypeName("String")#TPE%M1, List())
|[18] SingleType(ThisType(<root>#PK%M1), scala#PK%M1)
|[1] compiler mirror""".stripMargin
val tree2 = """Apply[11](TypeApply[12](Select[13](Select[14](Select[15](This[16](TypeName("scala")), scala.Predef#MOD%M1), TermName("Map")#GET%M1), TermName("apply")#METH%M1), List(TypeTree[17]().setOriginal(Select[17](Select[15](This[16](TypeName("scala")), scala.Predef#MOD%M1), TypeName("String")#TPE%M1)), TypeTree[1]().setOriginal(Select[1](Ident[18](scala#PK%M1), scala.Int#CLS%M1)))), List())
|[1] TypeRef(ThisType(scala#PKC%M1), scala.Int#CLS%M1, List())
|[11] TypeRef(ThisType(scala.collection.immutable#PKC%M1), scala.collection.immutable.Map#TRT%M1, List(TypeRef(SingleType(ThisType(scala#PKC%M1), scala.Predef#MOD%M1), TypeName("String")#TPE%M1, List()), TypeRef(ThisType(scala#PKC%M1), scala.Int#CLS%M1, List())))
|[12] MethodType(List(TermName("elems")#VAL%M1), TypeRef(ThisType(scala.collection.immutable#PKC%M1), scala.collection.immutable.Map#TRT%M1, List(TypeRef(SingleType(ThisType(scala#PKC%M1), scala.Predef#MOD%M1), TypeName("String")#TPE%M1, List()), TypeRef(ThisType(scala#PKC%M1), scala.Int#CLS%M1, List()))))
|[13] PolyType(List(TypeName("A")#TPE%M1, TypeName("B")#TPE%M1), MethodType(List(TermName("elems")#VAL%M1), TypeRef(ThisType(scala.collection.immutable#PKC%M1), scala.collection.immutable.Map#TRT%M1, List(TypeRef(NoPrefix, TypeName("A")#TPE%M1, List()), TypeRef(NoPrefix, TypeName("B")#TPE%M1, List())))))
|[14] SingleType(SingleType(ThisType(scala#PKC%M1), scala.Predef#MOD%M1), TermName("Map")#GET%M1)
|[15] SingleType(ThisType(scala#PKC%M1), scala.Predef#MOD%M1)
|[16] ThisType(scala#PKC%M1)
|[17] TypeRef(SingleType(ThisType(scala#PKC%M1), scala.Predef#MOD%M1), TypeName("String")#TPE%M1, List())
|[18] SingleType(ThisType(<root>#PKC%M1), scala#PK%M1)
|[1] compiler mirror""".stripMargin
project ! AstAtPointReq(SourceFileInfo(fooFile), OffsetRange(189, 206))
expectMsgPF() {
case AstInfo(ast) if {
val stripped = ast.replaceAll("[\\n\\r]", "")
stripped == tree2.replaceAll("[\\n\\r]", "") || stripped == oldTree2.replaceAll("[\\n\\r]", "")
} =>
}
project ! TypecheckFilesReq(List(Left(fooFile), Left(barFile)))
expectMsg(VoidResponse)
asyncHelper.fishForMessage() {
case FullTypeCheckCompleteEvent => true
case _ => false
}
project ! RefactorReq(4321, RenameRefactorDesc("Renamed", barFile, 30, 30), false)
expectMsgPF() {
case RefactorDiffEffect(4321, RefactorType.Rename, diff) =>
val renamedFile = new File(barFile.getPath.replace("Bar", "Renamed"))
val barChanges = s"""|@@ -1,13 +0,0 @@
|-package org.example
|-
|-object Bar extends App {
|- case class Foo(bar: String, baz: Int)
|- object Bla {
|- val foo: Foo = Foo(
|- bar = "Bar",
|- baz = 123
|- )
|-
|- val fooUpd = foo.copy(bar = foo.bar.reverse)
|- }
|-}
|""".stripMargin
val fooChanges = s"""|@@ -30,3 +30,3 @@
| List(1, 2, 3).head + 2
|- val x = Bar.Bla
|+ val x = Renamed.Bla
| }
|""".stripMargin
val renamedChanges = s"""|@@ -0,0 +1,13 @@
|+package org.example
|+
|+object Renamed extends App {
|+ case class Foo(bar: String, baz: Int)
|+ object Bla {
|+ val foo: Foo = Foo(
|+ bar = "Bar",
|+ baz = 123
|+ )
|+
|+ val fooUpd = foo.copy(bar = foo.bar.reverse)
|+ }
|+}
|""".stripMargin
val changes = Seq(
(barFile.getPath, DeleteFile, barChanges),
(fooFile.getPath, ChangeContents, fooChanges),
(renamedFile.getPath, CreateFile, renamedChanges)
)
val expectedDiff = expectedDiffContent(changes)
val diffContent = diff.canon.readString()
diffContent should ===(expectedDiff)
}
}
}
}
}
}
| d1egoaz/ensime-sbt | src/sbt-test/sbt-ensime/ensime-server/core/src/it/scala/org/ensime/intg/BasicWorkflow.scala | Scala | apache-2.0 | 23,769 |
package unit
import org.specs2.mutable._
import org.specs2.runner._
import org.junit.runner._
import play.api.test._
import play.api.test.Helpers._
import org.specs2.matcher.Matcher
import play.api.mvc.AnyContent
import play.api.mvc.Action
import Helpers._
@RunWith(classOf[JUnitRunner])
class JobsSpec extends Specification {
"Jobs" should {
"appear on the index page for criterion=priorities" in new WithApplication {
val home = route(FakeRequest(GET, "/jobs?criterion=priorities")).get
status(home) must equalTo(OK)
contentType(home) must beSome.which(_ == "text/html")
val content = contentAsString(home)
content must contain("Jobs")
content must contain("Location")
content must contain("Score")
content must contain("9998")
content must contain("9980")
content must contain("Priorities")
content must contain("Attributes")
}
"appear on the index page for criterion=skills" in new WithApplication {
val home = route(FakeRequest(GET, "/jobs?criterion=skills")).get
status(home) must equalTo(OK)
contentType(home) must beSome.which(_ == "text/html")
val content = contentAsString(home)
content must contain("Jobs")
content must contain("Location")
content must contain("Score")
content must contain("96")
content must contain("91")
content must contain("Skills")
content must contain("Attributes")
}
"support text/html content" in {
"text/html" must beAcceptedBy(controllers.Jobs.index("skills"))
"text/html" must beAcceptedBy(controllers.Jobs.index("priorities"))
}
"support application/json content" in {
"application/json" must beAcceptedBy(controllers.Jobs.index("skills"))
"application/json" must beAcceptedBy(controllers.Jobs.index("priorities"))
}
"return valid JSON" in {
controllers.Jobs.index("skills") must returnValidJson
controllers.Jobs.index("priorities") must returnValidJson
}
}
}
| fsteeg/appd | test/unit/JobsSpec.scala | Scala | epl-1.0 | 2,015 |
package services
import utils.Zookeeper
import kafka.utils.ZkUtils
import scala.collection._
import models.topics.Topics
import models.topics.Topic
import models.topics.DetailedTopics
import models.topics.BrokerTopicItem
object TopicService {
val topicsZPath = "/brokers/topics"
/**
* Gets all the topic names registered in zookeeper.
*/
def getTopics = {
val zkClient = Zookeeper.getInstance()
val topicNames = ZkUtils.getChildrenParentMayNotExist(zkClient, topicsZPath)
val topics = new Topics
for(topicName <- topicNames){
topics.add(topicName)
}
topics
}
/**
* Gets all the topics with its details.
* It will query zookeeper for the following path: /brokers/topics
*/
def getTopicsWithDetails = {
val topics = getTopics
val detailedTopics = new DetailedTopics
val zkClient = Zookeeper.getInstance()
val topicNames = topics.getTopics
// Fill in the topics, broker and num of partitions.
for(topicName <- topicNames){
val topic = getTopicDetails(topicName)
detailedTopics.add(topic)
}
detailedTopics
}
/**
* Gets the topic information with all the details according to the topic name
* passed by parameter.
* It will query zookeeper for the following path: /brokers/topics/{topicName parameter}
* @param topicName: The topic name we are searching for.
*/
def getTopicDetails(topicName: String):Topic ={
val topicInfo = Topic(topicName)
val zkClient = Zookeeper.getInstance()
val brokerIds = ZkUtils.getChildrenParentMayNotExist(zkClient, topicsZPath + "/" + topicName)
for(brokerId <- brokerIds){
val numOfPartitions = ZkUtils.readData(zkClient, topicsZPath + "/" + topicName + "/" + brokerId)
val brokerTopicItem = BrokerTopicItem(brokerId, numOfPartitions)
topicInfo.add(brokerTopicItem)
}
topicInfo
}
} | alanlavintman/kafka-admin | app/services/TopicService.scala | Scala | mit | 1,892 |
package org.eigengo.cm.api
import spray.testkit.Specs2RouteTest
import org.specs2.mutable.Specification
import akka.actor.{Props, Actor}
import org.eigengo.cm.core.CoordinatorActor.Begin
class RecogServiceSpec extends Specification with Specs2RouteTest with BasicRecogService {
class TestCoordinatorActor extends Actor {
def receive: Receive = {
case Begin(_) => sender ! "a10b2f45-87dd-4fe1-accf-3361763c1553"
}
}
"Basic recog service" should {
val coordinator = system.actorOf(Props(new TestCoordinatorActor))
"return the session ID on post" in {
Post("/recog") ~> normalRoute(coordinator) ~> check {
responseAs[String] mustEqual "a10b2f45-87dd-4fe1-accf-3361763c1553"
}
}
}
}
| janm399/lambdadays2014 | jvm/src/test/scala/org/eigengo/cm/api/RecogServiceSpec.scala | Scala | apache-2.0 | 740 |
package play.api.libs.iteratee
import scala.concurrent._
import scala.concurrent.duration.Duration
import scala.concurrent.ExecutionContext.Implicits.global
import org.specs2.mutable._
object EnumerateesSpec extends Specification {
"Enumeratee.drop" should {
"ignore 3 chunkes when applied with 3" in {
val drop3AndConsume = Enumeratee.drop[String](3) &>> Iteratee.consume[String]()
val enumerator = Enumerator(Range(1,20).map(_.toString) :_*)
Await.result(enumerator |>>> drop3AndConsume, Duration.Inf) must equalTo(Range(4,20).map(_.toString).mkString)
}
}
"Enumeratee.dropWhile" should {
"ignore chunkes while predicate is valid" in {
val drop3AndConsume = Enumeratee.dropWhile[String](_ != "4") &>> Iteratee.consume[String]()
val enumerator = Enumerator(Range(1,20).map(_.toString) :_*)
Await.result(enumerator |>>> drop3AndConsume, Duration.Inf) must equalTo(Range(4,20).map(_.toString).mkString)
}
}
"Enumeratee.take" should {
"pass only first 3 chunkes to Iteratee when applied with 3" in {
val take3AndConsume = Enumeratee.take[String](3) &>> Iteratee.consume()
val enumerator = Enumerator(Range(1,20).map(_.toString) :_*)
Await.result(enumerator |>>> take3AndConsume, Duration.Inf) must equalTo(List(1,2,3).map(_.toString).mkString)
}
"passes along what's left of chunks after taking 3" in {
val take3AndConsume = (Enumeratee.take[String](3) &>> Iteratee.consume()).flatMap(_ => Iteratee.consume())
val enumerator = Enumerator(Range(1,20).map(_.toString) :_*)
Await.result(enumerator |>>> take3AndConsume, Duration.Inf) must equalTo(Range(4,20).map(_.toString).mkString)
}
}
"Enumeratee.takeWhile" should {
"pass chunks until condition is not met" in {
val take3AndConsume = Enumeratee.takeWhile[String](_ != "4" ) &>> Iteratee.consume()
val enumerator = Enumerator(Range(1,20).map(_.toString) :_*)
Await.result(enumerator |>>> take3AndConsume, Duration.Inf) must equalTo(List(1,2,3).map(_.toString).mkString)
}
"passes along what's left of chunks after taking" in {
val take3AndConsume = (Enumeratee.takeWhile[String](_ != "4") &>> Iteratee.consume()).flatMap(_ => Iteratee.consume())
val enumerator = Enumerator(Range(1,20).map(_.toString) :_*)
Await.result(enumerator |>>> take3AndConsume, Duration.Inf) must equalTo(Range(4,20).map(_.toString).mkString)
}
}
"Traversable.take" should {
"pass only first 3 elements to Iteratee when applied with 3" in {
val take3AndConsume = Traversable.take[String](3) &>> Iteratee.consume()
val enumerator = Enumerator("he","ybbb","bbb")
Await.result(enumerator |>>> take3AndConsume, Duration.Inf) must equalTo("hey")
}
"pass alnog what's left after taking 3 elements" in {
val take3AndConsume = (Traversable.take[String](3) &>> Iteratee.consume()).flatMap(_ => Iteratee.consume())
val enumerator = Enumerator("he","ybbb","bbb")
Await.result(enumerator |>>> take3AndConsume, Duration.Inf) must equalTo("bbbbbb")
}
}
"Enumeratee.map" should {
"add one to each of the ints enumerated" in {
val add1AndConsume = Enumeratee.map[Int](i => List(i+1)) &>> Iteratee.consume()
val enumerator = Enumerator(1,2,3,4)
Await.result(enumerator |>>> add1AndConsume, Duration.Inf) must equalTo(Seq(2,3,4,5))
}
"infer its types correctly from previous enumeratee" in {
val add1AndConsume = Enumeratee.map[Int](i => i+1) ><> Enumeratee.map(i => List(i)) &>> Iteratee.consume()
add1AndConsume : Iteratee[Int,List[Int]]
true //this test is about compilation and if it compiles it means we got it right
}
"infer its types correctly from the preceeding enumerator" in {
val addOne = Enumerator(1,2,3,4) &> Enumeratee.map(i => i+1)
addOne : Enumerator[Int]
true //this test is about compilation and if it compiles it means we got it right
}
}
"Enumeratee.flatten" should {
"passAlong a future enumerator" in {
val passAlongFuture = Enumeratee.flatten {
concurrent.future {
Enumeratee.passAlong[Int]
}
}
val sum = Iteratee.fold[Int, Int](0)(_+_)
val enumerator = Enumerator(1,2,3,4,5,6,7,8,9)
Await.result(enumerator |>>> passAlongFuture &>> sum, Duration.Inf) must equalTo(45)
}
}
"Enumeratee.filter" should {
"ignore input that doesn't satisfy the predicate" in {
val takesOnlyStringsWithLessThan4Chars = Enumeratee.filter[String](_.length < 4) &>> Iteratee.consume()
val enumerator = Enumerator("One","Two","Three","Four", "Five", "Six")
Await.result(enumerator |>>> takesOnlyStringsWithLessThan4Chars, Duration.Inf) must equalTo("OneTwoSix")
}
}
"Enumeratee.collect" should {
"ignores input that doesn't satisfy the predicate and transform the input when matches" in {
val takesOnlyStringsWithLessThan4Chars = Enumeratee.collect[String]{ case e@("One" | "Two" | "Six") => e.toUpperCase } &>> Iteratee.consume()
val enumerator = Enumerator("One","Two","Three","Four", "Five", "Six")
Await.result(enumerator |>>> takesOnlyStringsWithLessThan4Chars, Duration.Inf) must equalTo("ONETWOSIX")
}
}
"Enumeratee.grouped" should {
"group input elements according to a folder iteratee" in {
val folderIteratee =
Enumeratee.mapInput[String]{
case Input.El("Concat") => Input.EOF;
case other => other } &>>
Iteratee.fold("")((s,e) => s + e)
val result =
Enumerator("He","ll","o","Concat", "Wo", "r", "ld", "Concat","!") &>
Enumeratee.grouped(folderIteratee) ><>
Enumeratee.map(List(_)) |>>>
Iteratee.consume()
Await.result(result, Duration.Inf) must equalTo(List("Hello","World","!"))
}
}
"Enumeratee.grouped" should {
"pass along what is consumed by the last folder iteratee on EOF" in {
val upToSpace = Traversable.splitOnceAt[String,Char](c => c != '\\n') &>> Iteratee.consume()
val result = (Enumerator("dasdasdas ", "dadadasda\\nshouldb\\neinnext") &> Enumeratee.grouped(upToSpace) ><> Enumeratee.map(_+"|")) |>>> Iteratee.consume[String]()
Await.result(result, Duration.Inf) must equalTo("dasdasdas dadadasda|shouldb|einnext|")
}
}
"Enumeratee.scanLeft" should {
"transform elements using a sate" in {
val result =
Enumerator(1,2,3,4) &>
Enumeratee.scanLeft[Int](0)(_ + _) ><>
Enumeratee.map(List(_)) |>>>
Iteratee.consume()
Await.result(result, Duration.Inf) must equalTo(List(1,3,6,10))
}
}
}
| noel-yap/setter-for-catan | play-2.1.1/framework/src/iteratees/src/test/scala/play/api/libs/iteratee/EnumerateesSpec.scala | Scala | apache-2.0 | 6,813 |
package com.learning.akka.typedactors.remoting
import java.util.concurrent.TimeUnit
import akka.actor._
import akka.util.Timeout
import com.typesafe.config.ConfigFactory
import scala.concurrent.duration._
import scala.collection.concurrent.TrieMap
trait Processor {
def process(message: String): String
}
class ProcessorImpl extends Processor {
def process(message: String): String = s"processed message: $message"
}
case class RegisterWorker(workerId: String)
case class DispatchWork(processorRef: ActorRef)
case object SendDispatchWork
class Master extends Actor {
private val idToWorkers: TrieMap[String, ActorRef] = TrieMap[String, ActorRef]()
val duration: FiniteDuration = 5 seconds
implicit val timeout = Timeout(duration)
implicit val executionContext = context.system.dispatcher
// Define typed actor here
val processorActor = TypedActor(context.system).typedActorOf(TypedProps(classOf[Processor], new ProcessorImpl), "ProcessorImpl")
val processorActorRef = TypedActor(context.system).getActorRefFor(processorActor)
override def preStart() = {
println(s"Master started: ${self.path}, ${self.path.address}")
}
def receive = {
case RegisterWorker(workerId) =>
println(s"register worker: $workerId")
idToWorkers += workerId -> sender()
case SendDispatchWork =>
println("dispatch works to workers")
for ((workerId, worker) <- idToWorkers) {
println(s"dispatch work to $workerId")
worker ! DispatchWork(processorActorRef)
}
}
context.system.scheduler.schedule(duration, duration, self, SendDispatchWork)
}
object Master {
def main(args: Array[String]) {
val config = ConfigFactory.load("remote").getConfig("master")
val system = ActorSystem("MasterSystem", config)
val masterActor = system.actorOf(Props[Master], "Master")
}
}
| lgrcyanny/LearningAkka | src/main/scala/com/learning/akka/typedactors/remoting/Master.scala | Scala | gpl-2.0 | 1,844 |
/**
* Copyright (C) 2010-2012 LShift Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lshift.diffa.agent.itest
import support.TestConstants.{ agentURL, yesterday }
import net.lshift.diffa.agent.client.ConfigurationRestClient
import net.lshift.diffa.kernel.frontend.EndpointDef
import net.lshift.diffa.adapter.changes.ChangeEvent
import org.junit.Assert.fail
import net.lshift.diffa.client.{RateLimitExceededException, ChangesRestClient}
import net.lshift.diffa.kernel.client.ChangesClient
import org.junit.{Before, Test}
import com.hazelcast.util.Clock
import org.apache.commons.lang3.RandomStringUtils
class ChangeEventRateLimitingTest extends IsolatedDomainTest {
var clientCreateTime: Long = 0L
var changesClient: ChangesClient = _
var event: ChangeEvent = _
val endpoint = RandomStringUtils.randomAlphanumeric(10)
val lastUpdated = yesterday
val ratePerSecondLimit = 1
@Before
def initializeChangesClient {
new ConfigurationRestClient(agentURL, isolatedDomain).declareEndpoint(EndpointDef(name = endpoint))
clientCreateTime = Clock.currentTimeMillis()
changesClient = new ChangesRestClient(agentURL, isolatedDomain, endpoint)
event = ChangeEvent.forChange("id", "aaff00001111", lastUpdated)
// Make sure that no previous change events interfere with the acceptance of
// the next test.
Thread.sleep(1000 / ratePerSecondLimit)
}
@Test
def shouldAcceptFirstEvent {
try {
changesClient.onChangeEvent(event)
} catch {
case x: RateLimitExceededException =>
fail("First event was rate limited, but should not have been")
}
}
@Test
def givenDefaultConfigurationAndRateLimitAlreadyReachedWhenSubsequentChangeEventReceivedThenRejectEventSubmission {
try {
changesClient.onChangeEvent(event)
assertFailUntil(clientCreateTime + 1000L)
} catch {
case x: Exception =>
fail("Unexpected failure of first change event submission: " + x.toString)
}
}
private def assertFailUntil(sysTimeMillis: Long) {
val retryFrequency = 50 // milliseconds
while (Clock.currentTimeMillis < sysTimeMillis) {
try {
changesClient.onChangeEvent(event)
// check the time again in case the previous call took a while to execute,
// in which case it's not necessarily true that the action should have been
// rate limited.
if (Clock.currentTimeMillis < sysTimeMillis) {
fail("Change Event submission was expected to raise an exception due to violating the rate limit, but succeeded")
}
} catch {
case x: RateLimitExceededException =>
}
Thread.sleep(retryFrequency)
}
}
}
| lshift/diffa | agent/src/test/scala/net/lshift/diffa/agent/itest/ChangeEventRateLimitingTest.scala | Scala | apache-2.0 | 3,218 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qscript.analysis
import slamdata.Predef.{Map => _, _}
import quasar.fp._
import quasar.fp.ski._
import quasar.qscript._
import matryoshka.{Hole => _, _}
import matryoshka.data.free._
import matryoshka.implicits._
import matryoshka.patterns._
import scalaz._, Scalaz._
import simulacrum._
/** Determines if `IN[_]` preserves the shape of its underlying `ShiftedRead`.
* If it does preserve that shape then it returns the `IdStatus` of that
* `ShiftedRead`. If it doesn't then `none` is returned.
* This can easily be extended to also include `Read`, possibly with extra info
* to indicate whether the underlying read was `Read` or `ShiftedRead`.
*/
@typeclass
trait ShapePreserving[IN[_]] {
def shapePreservingƒ: Algebra[IN, Option[IdStatus]]
}
object ShapePreserving {
def shapePreserving[F[_]: Functor, T](t: T)(implicit
RT: Recursive.Aux[T, F],
SP: ShapePreserving[F])
: Option[IdStatus]
= t.cata(SP.shapePreservingƒ)
def shapePreservingF[F[_]: Functor, A]
(fa: Free[F, A])
(f: A => Option[IdStatus])
(implicit SP: ShapePreserving[F])
: Option[IdStatus] =
fa.cata(interpret(f, SP.shapePreservingƒ))
def shapePreservingP[F[_], G[_]: Functor, T](t: T, GtoF: PrismNT[G, F])(implicit
RT: Recursive.Aux[T, G],
SP: ShapePreserving[F])
: Option[IdStatus]
= t.cata(prismNT(GtoF).shapePreservingƒ)
def prismNT[F[_], G[_]](GtoF: PrismNT[G, F])
(implicit F: ShapePreserving[F])
: ShapePreserving[G] =
new ShapePreserving[G] {
def shapePreservingƒ: Algebra[G, Option[IdStatus]] =
x => GtoF.get(x).flatMap(F.shapePreservingƒ)
}
implicit def coproduct[F[_], G[_]]
(implicit F: ShapePreserving[F], G: ShapePreserving[G])
: ShapePreserving[Coproduct[F, G, ?]] =
new ShapePreserving[Coproduct[F, G, ?]] {
def shapePreservingƒ: Algebra[Coproduct[F, G, ?], Option[IdStatus]] =
_.run.fold(F.shapePreservingƒ, G.shapePreservingƒ)
}
implicit def constShiftedRead[A]: ShapePreserving[Const[ShiftedRead[A], ?]] =
new ShapePreserving[Const[ShiftedRead[A], ?]] {
def shapePreservingƒ: Algebra[Const[ShiftedRead[A], ?], Option[IdStatus]] =
_.getConst.idStatus.some
}
implicit def constRead[A]: ShapePreserving[Const[Read[A], ?]] =
notShapePreserving[Const[Read[A], ?]]
implicit val constDeadEnd: ShapePreserving[Const[DeadEnd, ?]] =
notShapePreserving[Const[DeadEnd, ?]]
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
implicit def qscriptCore[T[_[_]]]: ShapePreserving[QScriptCore[T, ?]] =
new ShapePreserving[QScriptCore[T, ?]] {
def shapePreservingƒ: Algebra[QScriptCore[T, ?], Option[IdStatus]] = {
case Map(_, _) => none
case LeftShift(_, _, _, _, _, _) => none
case Reduce(_, _, _, _) => none
case Sort(src, _, _) => src
case Union(src, l, r) =>
val lId = shapePreservingF(l)(κ(src))
val rId = shapePreservingF(r)(κ(src))
if (lId === rId) lId else None
case Filter(src, _) => src
case Subset(src, from, _, _) => shapePreservingF(from)(κ(src))
case Unreferenced() => none
}
}
implicit def projectBucket[T[_[_]]]: ShapePreserving[ProjectBucket[T, ?]] =
notShapePreserving[ProjectBucket[T, ?]]
implicit def thetaJoin[T[_[_]]]: ShapePreserving[ThetaJoin[T, ?]] =
notShapePreserving[ThetaJoin[T, ?]]
implicit def equiJoin[T[_[_]]]: ShapePreserving[EquiJoin[T, ?]] =
notShapePreserving[EquiJoin[T, ?]]
implicit def coEnv[E, G[_], A](implicit SP: ShapePreserving[G]): ShapePreserving[CoEnv[E, G, ?]] =
new ShapePreserving[CoEnv[E, G, ?]] {
def shapePreservingƒ: Algebra[CoEnv[E, G, ?], Option[IdStatus]] =
_.run.map(SP.shapePreservingƒ).getOrElse(none)
}
def notShapePreserving[F[_]] =
new ShapePreserving[F] {
def shapePreservingƒ: Algebra[F, Option[IdStatus]] = κ(none)
}
}
| jedesah/Quasar | connector/src/main/scala/quasar/qscript/analysis/ShapePreserving.scala | Scala | apache-2.0 | 4,575 |
package io.drakon.forgeraven.logging
/**
* Processing utilities for wrangling Log4j2 to behave.
*
* @author Arkan
*/
object LogHacks {
// TODO: Bully log4j2 horribly and without mercy, and insert Appenders into it.
}
| Emberwalker/ForgeRaven | src/main/scala/io/drakon/forgeraven/logging/LogHacks.scala | Scala | mit | 226 |
package mockws
import akka.NotUsed
import akka.stream.scaladsl.Source
import akka.util.ByteString
import mockws.MockWSHelpers._
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks
import play.api.http.HttpEntity
import play.api.libs.ws.WSClient
import play.api.mvc.MultipartFormData.DataPart
import play.api.mvc.MultipartFormData.FilePart
import play.api.mvc.MultipartFormData.Part
import play.api.mvc.Results._
import play.api.mvc.ResponseHeader
import play.api.mvc.Result
import play.api.test.FakeRequest
import play.api.test.Helpers._
import scala.collection.immutable.Seq
import scala.concurrent.ExecutionContext.Implicits._
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
/**
* Tests that [[MockWS]] simulates a WS client, in particular the methods involving authentication
*/
class StreamingTest extends AnyFunSuite with Matchers with ScalaCheckPropertyChecks {
test("mock WS simulates a streaming") {
def testedController(ws: WSClient) = Action.async {
ws.url("/").stream().map { resp =>
Result(
header = ResponseHeader(resp.status, resp.headers.map { case (k, v) => (k, v.head) }),
body = HttpEntity.Streamed(resp.bodyAsSource, None, None)
)
}
}
val ws = MockWS { case (GET, "/") =>
Action {
val body: Source[ByteString, _] = Source(Seq("first", "second", "third").map(ByteString.apply))
Result(
header = ResponseHeader(201, Map("x-header" -> "x-value")),
body = HttpEntity.Streamed(body, None, None)
)
}
}
val response = testedController(ws).apply(FakeRequest())
status(response) shouldEqual CREATED
contentAsString(response) shouldEqual "firstsecondthird"
header("x-header", response) shouldEqual Some("x-value")
ws.close()
}
test("mock WS supports streaming of MultipartFormData") {
val ws = MockWS { case (PUT, "/") =>
Action { request =>
request.body.asMultipartFormData match {
case None => InternalServerError("error")
case Some(data) => Ok(data.dataParts.toList.sortBy(_._1).mkString(", "))
}
}
}
val fileData: Source[Part[Source[ByteString, _]], NotUsed] = Source(
FilePart("file", "", Some(BINARY), Source.single(ByteString("test"))) ::
DataPart("key 1", "data 1") ::
DataPart("key 2", "data 2") ::
Nil
)
val response = await(ws.url("/").put(fileData))
response.body shouldEqual "(key 1,Vector(data 1)), (key 2,Vector(data 2))"
ws.close()
}
test("mock WS supports method in stream") {
def testedController(ws: WSClient) = Action.async {
ws.url("/").withMethod("POST").stream().map { resp =>
Result(
header = ResponseHeader(resp.status, resp.headers.map { case (k, v) => (k, v.head) }),
body = HttpEntity.Streamed(resp.bodyAsSource, None, None)
)
}
}
val ws = MockWS { case (POST, "/") =>
Action {
val body: Source[ByteString, _] = Source(Seq("first", "second", "third").map(ByteString.apply))
Result(
header = ResponseHeader(201, Map("x-header" -> "x-value")),
body = HttpEntity.Streamed(body, None, None)
)
}
}
val response = testedController(ws).apply(FakeRequest())
status(response) shouldEqual CREATED
contentAsString(response) shouldEqual "firstsecondthird"
header("x-header", response) shouldEqual Some("x-value")
ws.close()
}
test("should pass through all elements of a Source") {
val content = Source(Seq("hello, ", "world").map(ByteString(_)))
val ws = MockWS { case (GET, "/get") =>
Action {
Result(
header = ResponseHeader(200),
body = HttpEntity.Streamed(content, None, None)
)
}
}
await(
ws.url("/get")
.get()
).body shouldEqual "hello, world"
ws.close()
}
val streamBackAction = Action { req =>
val inputWords: Seq[String] = Seq() ++ req.body.asMultipartFormData.toSeq.flatMap(_.dataParts("k1"))
val returnWords = Seq(req.method + ": ") ++ inputWords
val outputStream: Source[ByteString, _] = Source(returnWords.map(v => ByteString(v)))
Result(
header = ResponseHeader(200),
body = HttpEntity.Streamed(outputStream, None, None)
)
}
test("receive a stream of back what we sent as [POST]") {
val content = Source(Seq("hello,", " this", " is", " world")).map(v => DataPart("k1", v))
val ws = MockWS { case (POST, "/post") =>
streamBackAction
}
await(ws.url("/post").post(content)).body shouldEqual "POST: hello, this is world"
ws.close()
}
test("receive a stream of back what we sent as [PUT]") {
val content = Source(Seq("hello,", " this", " is", " world")).map(v => DataPart("k1", v))
val ws = MockWS { case (PUT, "/put") =>
streamBackAction
}
await(ws.url("/put").put(content)).body shouldEqual "PUT: hello, this is world"
ws.close()
}
test("receive a stream of back what we sent as [PATCH]") {
val content = Source(Seq("hello,", " this", " is", " world")).map(v => DataPart("k1", v))
val ws = MockWS { case (PATCH, "/patch") =>
streamBackAction
}
await(ws.url("/patch").patch(content)).body shouldEqual "PATCH: hello, this is world"
ws.close()
}
}
| leanovate/play-mockws | src/test/scala/mockws/StreamingTest.scala | Scala | mit | 5,413 |
/*
* ParticleFilter.scala
* Particle Filtering
*
* Created By: Avi Pfeffer (apfeffer@cra.com)
* Creation Date: Jan 1, 2009
*
* Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email figaro@cra.com for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.algorithm.filtering
import com.cra.figaro.algorithm.sampling._
import com.cra.figaro.language._
import com.cra.figaro.util._
import sun.swing.AccumulativeRunnable
/**
* An abstract class of particle filters.
* A particle filter is provided with three models:
* a static model, containing a universe defining a distribution over static elements that do not change over time;
* an initial model, containing a universe defining a distribution over the initial state of time-varying elements;
* and a transition model, which is a function from the previous universe to a new universe. defining the way the distribution over the new state
* of the time-varying variables depends on their values in the previous state.
* The fourth argument to the particle filter is the number of particles to use at each time step.
*
* The particle filter works in an online fashion. At each point in time, it maintains its current beliefs about the state of the system as a set of
* representative states. advanceTime is used to move forward one time step. The particle filter updates its beliefs in light
* of the new evidence.
*
* @param static A universe with static elements that do not change over time.
* @param intitial The universe describing the initial distribution of the model.
* @param transition The transition function that returns a new universe from a static and previous universe, respectively.
*/
abstract class ParticleFilter(static: Universe = new Universe(), initial: Universe, transition: (Universe, Universe) => Universe, numParticles: Int)
extends Filtering(static, initial, transition) {
/** The belief about the state of the system at the current point in time. */
val beliefState: ParticleFilter.BeliefState = Array.fill(numParticles)(null)
protected var logProbEvidence: Double = 0.0
protected var previousUniverse: Universe = _
protected var currentUniverse = initial
/**
* Returns the expectation of the element referred to by the reference
* under the given function at the current time point.
*/
def computeCurrentExpectation[T](reference: Reference[T], function: T => Double): Double = {
val fValues: Seq[Double] = beliefState.map(state => function(state.get(reference)))
val total = (fValues :\\ 0.0)(_ + _)
total.toDouble / numParticles
}
/**
* Returns the distribution over the element referred to by the reference at the current time point.
*/
def computeCurrentDistribution[T](reference: Reference[T]): Stream[(Double, T)] = {
val map = scala.collection.mutable.Map[T, Int]()
for {
state <- beliefState
} {
val t = state.get(reference)
val prevCount = map.getOrElse(t, 0)
map += t -> (prevCount + 1)
}
val z = 1.0 / beliefState.size
val normalized = map.toList.map((pair: (T, Int)) => (pair._2 * z, pair._1))
normalized.toStream
}
/*
* Careful: makeWeightedParticle overwrites the previous state with the new state. That means we can't use it to generate another new particle from the same previous
* state. The reason for this design is to avoid creating new snapshots and states to conserve memory.
*/
protected def makeWeightedParticle(previousState: State): ParticleFilter.WeightedParticle = {
Forward(currentUniverse)
// avoiding recursion
var satisfied = true
var conditionedElementsRemaining = currentUniverse.conditionedElements
while (!conditionedElementsRemaining.isEmpty) {
satisfied &= conditionedElementsRemaining.head.conditionSatisfied
conditionedElementsRemaining = conditionedElementsRemaining.tail
}
val weight =
if (satisfied) {
var w = 1.0
var constrainedElementsRemaining = currentUniverse.constrainedElements
while (!constrainedElementsRemaining.isEmpty) {
w *= math.exp(constrainedElementsRemaining.head.constraintValue)
constrainedElementsRemaining = constrainedElementsRemaining.tail
}
w
} else 0.0
val snapshot = new Snapshot
snapshot.store(currentUniverse)
val state = new State(snapshot, previousState.static)
(weight, state)
}
private[figaro] def updateBeliefState(weightedParticles: Seq[ParticleFilter.WeightedParticle]) {
// If all the particles have weight 1, there is no need to resample
// If all the particles have weight 0, none of them satisfy the conditions, so the best we can do is produce a uniform distribution over them.
if (weightedParticles.forall(_._1 == 1.0) || weightedParticles.forall(_._1 == 0.0)) {
val weightedParticleArray = weightedParticles.toArray
for { i <- 0 until numParticles } {
beliefState(i) = weightedParticleArray(i)._2
}
} else {
val resampler = new MapResampler(weightedParticles)
for { i <- 0 until numParticles } {
beliefState(i) = resampler.resample()
}
}
}
private[figaro] def computeProbEvidence(weightedParticles: Seq[ParticleFilter.WeightedParticle]) {
// compute probability of evidence here by taking the average weight of the weighted particles and store it so you can later return it as a query result
val weightedParticleArray = weightedParticles.toArray
val sum = weightedParticleArray.map(_._1).sum
logProbEvidence = logProbEvidence + scala.math.log(sum / numParticles)
}
protected def addWeightedParticle(evidence: Seq[NamedEvidence[_]], index: Int): ParticleFilter.WeightedParticle = {
val previousState = beliefState(index)
previousState.dynamic.restore(previousUniverse)
previousState.static.restore(static)
currentUniverse.assertEvidence(evidence)
val result = makeWeightedParticle(previousState)
result
}
protected def initialWeightedParticle(): ParticleFilter.WeightedParticle = {
Forward(static)
val staticSnapshot = new Snapshot
staticSnapshot.store(static)
val state = new State(new Snapshot, staticSnapshot)
makeWeightedParticle(state)
}
/*
* Advance the universe one time step.
* The previous universe becomes a copy of the current universe with all named elements replaced by constants.
* This is done so we don't have to store the previous universe (and the universes previous to it), and we can release the memory.
*/
protected def advanceUniverse() {
previousUniverse = Universe.createNew()
for { element <- currentUniverse.activeElements.filter(!_.name.isEmpty) } {
new Settable(element.name.string, element.value, previousUniverse)
}
currentUniverse = transition(static, previousUniverse)
}
def getlogProbEvidence(): Double = {
logProbEvidence
}
def probEvidence(): Double = {
val probEvidence = scala.math.exp(logProbEvidence)
probEvidence
}
}
/**
* A one-time particle filter.
*
* @param static The universe of elements whose values do not change over time
* @param initial The universe describing the distribution over the initial state of the system
* @param transition The transition model describing how the current state of the system depends on the previous
* @param numParticles The number of particles to use at each time step
*/
class OneTimeParticleFilter(static: Universe = new Universe(), initial: Universe, transition: (Universe, Universe) => Universe, numParticles: Int)
extends ParticleFilter(static, initial, transition, numParticles) with OneTimeFiltering {
private def doTimeStep(weightedParticleCreator: Int => ParticleFilter.WeightedParticle) {
val weightedParticles = for { i <- 0 until numParticles } yield weightedParticleCreator(i)
// compute probability of evidence here by taking the average weight of the weighted particles and store it so you can later return it as a query result
computeProbEvidence(weightedParticles)
updateBeliefState(weightedParticles)
}
/**
* Begin the particle filter, determining the initial distribution.
*/
def run(): Unit = {
doTimeStep((i: Int) => initialWeightedParticle())
}
/**
* Advance the filtering one time step, conditioning on the given evidence at the new time point.
*/
def advanceTime(evidence: Seq[NamedEvidence[_]] = List()): Unit = {
advanceUniverse()
doTimeStep((i: Int) => addWeightedParticle(evidence, i))
}
}
object ParticleFilter {
/**
* A one-time particle filter.
*
* @param static The universe of elements whose values do not change over time
* @param initial The universe describing the distribution over the initial state of the system
* @param transition The transition model describing how the current state of the system depends on the static and previous, respectively
* @param numParticles Number of particles to use at each time step
*/
def apply(static: Universe, initial: Universe, transition: (Universe, Universe) => Universe, numParticles: Int): OneTimeParticleFilter =
new OneTimeParticleFilter(static, initial, transition, numParticles)
/**
* A one-time particle filter.
*
* @param static The universe of elements whose values do not change over time
* @param initial The universe describing the distribution over the initial state of the system
* @param transition The transition model describing how the current state of the system depends on the previous
* @param numParticles Number of particles to use at each time step
*/
@deprecated("If the static universe is defined, use the constructor where the transition function takes two universes", "2.3.0.0")
def apply(static: Universe, initial: Universe, transition: Universe => Universe, numParticles: Int): OneTimeParticleFilter =
new OneTimeParticleFilter(static, initial, (static: Universe, previous: Universe) => transition(previous), numParticles)
/**
* A one-time particle filter in which the static universe is empty.
*
* @param initial The universe describing the distribution over the initial state of the system
* @param transition The transition model describing how the current state of the system depends on the previous
* @param numParticles Number of particles to use at each time step
*/
def apply(initial: Universe, transition: Universe => Universe, numParticles: Int): OneTimeParticleFilter =
apply(new Universe(), initial, (static: Universe, previous: Universe) => transition(previous), numParticles)
/**
* A representation of the current beliefs of the particle filter.
* A BeliefState should not be confused with a State, which is a particular configuration of the system.
* A BeliefState represents a distribution over States, and in a particle filter, it is implemented as a collection of representative States.
*/
type BeliefState = Array[State] // dynamic and static
/** Weighted particles, consisting of a weight and a state. */
type WeightedParticle = (Double, State)
}
| bruttenberg/figaro | Figaro/src/main/scala/com/cra/figaro/algorithm/filtering/ParticleFilter.scala | Scala | bsd-3-clause | 11,221 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.keras.layers
import com.intel.analytics.bigdl.dllib.nn.abstractnn.AbstractModule
import com.intel.analytics.bigdl.dllib.nn.internal.KerasLayer
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.Shape
import com.intel.analytics.bigdl.dllib.keras.layers.utils.KerasUtils
import scala.reflect.ClassTag
class GetShape[T: ClassTag](
val inputShape: Shape = null)(implicit ev: TensorNumeric[T])
extends KerasLayer[Tensor[T], Tensor[T], T](KerasUtils.addBatch(inputShape)) {
override def computeOutputShape(inputShape: Shape): Shape = {
Shape.apply(Array(inputShape.toSingle().toArray.length))
}
override def doBuild(inputShape: Shape): AbstractModule[Tensor[T], Tensor[T], T] = {
new InternalGetShape[T]()
}
}
private class InternalGetShape[T: ClassTag](implicit ev: TensorNumeric[T])
extends AbstractModule[Tensor[T], Tensor[T], T] {
override def updateOutput(input: Tensor[T]): Tensor[T] = {
val shape = input.toTensor[T].size().map(i => ev.fromType[Int](i))
Tensor(data = shape, shape = Array(shape.length))
}
override def updateGradInput(input: Tensor[T], gradOutput: Tensor[T]): Tensor[T] = {
input.toTensor.clone().fill(ev.zero)
}
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/keras/layers/GetShape.scala | Scala | apache-2.0 | 1,953 |
package chapter4
import org.scalatest.{FlatSpec, Matchers}
import scala.{Option => _, Some => _, Either => _, _}
class OptionSpec
extends FlatSpec
with Matchers {
behavior of "Chapter4 Option"
"Options Some.map" should "convert Some" in {
Some(6) should equal(Some(3).map(_ * 2))
}
"Options None.map" should "do nothing" in {
val out: Option[Int] = None
None should equal(out.map(_ * 2))
}
def doubleIfEven(n: Int): Option[Int] = if (n%2 == 0) Some(2*n) else None
"Options Some.flatMap" should "convert Some" in {
Some(8) should equal(Some(4).flatMap(doubleIfEven))
None should equal(Some(3).flatMap(doubleIfEven))
}
"Options None.flatMap" should "do nothing" in {
val out: Option[Int] = None
None should equal(out.flatMap(doubleIfEven))
}
"Options Some.getOrElse" should "Return Some's value" in {
6 should equal(Some(6).getOrElse(55))
}
"Options None.getOrElse" should "Return default value" in {
val out: Option[Int] = None
55 should equal(out.getOrElse(55))
}
"Options Some.orElse" should "Return Some's value" in {
Some(6) should equal(Some(6).orElse(Some(55)))
}
"Options None.orElse" should "Return default value" in {
val out: Option[Int] = None
Some(55) should equal(out.orElse(Some(55)))
}
"Options Some.filter" should "Return Some's value" in {
Some(6) should equal(Some(6).filter(_ == 6))
None should equal(Some(6).filter(_ != 6))
}
"Options None.filter" should "Return None" in {
val out: Option[Int] = None
None should equal(out.filter(n => true))
}
} | chelck/fpis | src/test/scala/chapter4/OptionSpec.scala | Scala | mit | 1,599 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.SparkException
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.types.DataType
/**
* User-defined function.
* @param function The user defined scala function to run.
* Note that if you use primitive parameters, you are not able to check if it is
* null or not, and the UDF will return null for you if the primitive input is
* null. Use boxed type or [[Option]] if you wanna do the null-handling yourself.
* @param dataType Return type of function.
* @param children The input expressions of this UDF.
* @param inputTypes The expected input types of this UDF, used to perform type coercion. If we do
* not want to perform coercion, simply use "Nil". Note that it would've been
* better to use Option of Seq[DataType] so we can use "None" as the case for no
* type coercion. However, that would require more refactoring of the codebase.
* @param udfName The user-specified name of this UDF.
* @param nullable True if the UDF can return null value.
* @param udfDeterministic True if the UDF is deterministic. Deterministic UDF returns same result
* each time it is invoked with a particular input.
*/
case class ScalaUDF(
function: AnyRef,
dataType: DataType,
children: Seq[Expression],
inputTypes: Seq[DataType] = Nil,
udfName: Option[String] = None,
nullable: Boolean = true,
udfDeterministic: Boolean = true)
extends Expression with ImplicitCastInputTypes with NonSQLExpression with UserDefinedExpression {
override def deterministic: Boolean = udfDeterministic && children.forall(_.deterministic)
override def toString: String =
s"${udfName.map(name => s"UDF:$name").getOrElse("UDF")}(${children.mkString(", ")})"
// scalastyle:off line.size.limit
/** This method has been generated by this script
(1 to 22).map { x =>
val anys = (1 to x).map(x => "Any").reduce(_ + ", " + _)
val childs = (0 to x - 1).map(x => s"val child$x = children($x)").reduce(_ + "\n " + _)
val converters = (0 to x - 1).map(x => s"lazy val converter$x = CatalystTypeConverters.createToScalaConverter(child$x.dataType)").reduce(_ + "\n " + _)
val evals = (0 to x - 1).map(x => s"converter$x(child$x.eval(input))").reduce(_ + ",\n " + _)
s"""case $x =>
val func = function.asInstanceOf[($anys) => Any]
$childs
$converters
(input: InternalRow) => {
func(
$evals)
}
"""
}.foreach(println)
*/
// Accessors used in genCode
def userDefinedFunc(): AnyRef = function
def getChildren(): Seq[Expression] = children
private[this] val f = children.size match {
case 0 =>
val func = function.asInstanceOf[() => Any]
(input: InternalRow) => {
func()
}
case 1 =>
val func = function.asInstanceOf[(Any) => Any]
val child0 = children(0)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)))
}
case 2 =>
val func = function.asInstanceOf[(Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)))
}
case 3 =>
val func = function.asInstanceOf[(Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)))
}
case 4 =>
val func = function.asInstanceOf[(Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)))
}
case 5 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)))
}
case 6 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)))
}
case 7 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)))
}
case 8 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)))
}
case 9 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)))
}
case 10 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)))
}
case 11 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)))
}
case 12 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)))
}
case 13 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)))
}
case 14 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)))
}
case 15 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)))
}
case 16 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)))
}
case 17 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)))
}
case 18 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
val child17 = children(17)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)),
converter17(child17.eval(input)))
}
case 19 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
val child17 = children(17)
val child18 = children(18)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
lazy val converter18 = CatalystTypeConverters.createToScalaConverter(child18.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)),
converter17(child17.eval(input)),
converter18(child18.eval(input)))
}
case 20 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
val child17 = children(17)
val child18 = children(18)
val child19 = children(19)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
lazy val converter18 = CatalystTypeConverters.createToScalaConverter(child18.dataType)
lazy val converter19 = CatalystTypeConverters.createToScalaConverter(child19.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)),
converter17(child17.eval(input)),
converter18(child18.eval(input)),
converter19(child19.eval(input)))
}
case 21 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
val child17 = children(17)
val child18 = children(18)
val child19 = children(19)
val child20 = children(20)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
lazy val converter18 = CatalystTypeConverters.createToScalaConverter(child18.dataType)
lazy val converter19 = CatalystTypeConverters.createToScalaConverter(child19.dataType)
lazy val converter20 = CatalystTypeConverters.createToScalaConverter(child20.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)),
converter17(child17.eval(input)),
converter18(child18.eval(input)),
converter19(child19.eval(input)),
converter20(child20.eval(input)))
}
case 22 =>
val func = function.asInstanceOf[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) => Any]
val child0 = children(0)
val child1 = children(1)
val child2 = children(2)
val child3 = children(3)
val child4 = children(4)
val child5 = children(5)
val child6 = children(6)
val child7 = children(7)
val child8 = children(8)
val child9 = children(9)
val child10 = children(10)
val child11 = children(11)
val child12 = children(12)
val child13 = children(13)
val child14 = children(14)
val child15 = children(15)
val child16 = children(16)
val child17 = children(17)
val child18 = children(18)
val child19 = children(19)
val child20 = children(20)
val child21 = children(21)
lazy val converter0 = CatalystTypeConverters.createToScalaConverter(child0.dataType)
lazy val converter1 = CatalystTypeConverters.createToScalaConverter(child1.dataType)
lazy val converter2 = CatalystTypeConverters.createToScalaConverter(child2.dataType)
lazy val converter3 = CatalystTypeConverters.createToScalaConverter(child3.dataType)
lazy val converter4 = CatalystTypeConverters.createToScalaConverter(child4.dataType)
lazy val converter5 = CatalystTypeConverters.createToScalaConverter(child5.dataType)
lazy val converter6 = CatalystTypeConverters.createToScalaConverter(child6.dataType)
lazy val converter7 = CatalystTypeConverters.createToScalaConverter(child7.dataType)
lazy val converter8 = CatalystTypeConverters.createToScalaConverter(child8.dataType)
lazy val converter9 = CatalystTypeConverters.createToScalaConverter(child9.dataType)
lazy val converter10 = CatalystTypeConverters.createToScalaConverter(child10.dataType)
lazy val converter11 = CatalystTypeConverters.createToScalaConverter(child11.dataType)
lazy val converter12 = CatalystTypeConverters.createToScalaConverter(child12.dataType)
lazy val converter13 = CatalystTypeConverters.createToScalaConverter(child13.dataType)
lazy val converter14 = CatalystTypeConverters.createToScalaConverter(child14.dataType)
lazy val converter15 = CatalystTypeConverters.createToScalaConverter(child15.dataType)
lazy val converter16 = CatalystTypeConverters.createToScalaConverter(child16.dataType)
lazy val converter17 = CatalystTypeConverters.createToScalaConverter(child17.dataType)
lazy val converter18 = CatalystTypeConverters.createToScalaConverter(child18.dataType)
lazy val converter19 = CatalystTypeConverters.createToScalaConverter(child19.dataType)
lazy val converter20 = CatalystTypeConverters.createToScalaConverter(child20.dataType)
lazy val converter21 = CatalystTypeConverters.createToScalaConverter(child21.dataType)
(input: InternalRow) => {
func(
converter0(child0.eval(input)),
converter1(child1.eval(input)),
converter2(child2.eval(input)),
converter3(child3.eval(input)),
converter4(child4.eval(input)),
converter5(child5.eval(input)),
converter6(child6.eval(input)),
converter7(child7.eval(input)),
converter8(child8.eval(input)),
converter9(child9.eval(input)),
converter10(child10.eval(input)),
converter11(child11.eval(input)),
converter12(child12.eval(input)),
converter13(child13.eval(input)),
converter14(child14.eval(input)),
converter15(child15.eval(input)),
converter16(child16.eval(input)),
converter17(child17.eval(input)),
converter18(child18.eval(input)),
converter19(child19.eval(input)),
converter20(child20.eval(input)),
converter21(child21.eval(input)))
}
}
// scalastyle:on line.size.limit
// Generate codes used to convert the arguments to Scala type for user-defined functions
private[this] def genCodeForConverter(ctx: CodegenContext, index: Int): String = {
val converterClassName = classOf[Any => Any].getName
val typeConvertersClassName = CatalystTypeConverters.getClass.getName + ".MODULE$"
val expressionClassName = classOf[Expression].getName
val scalaUDFClassName = classOf[ScalaUDF].getName
val converterTerm = ctx.freshName("converter")
val expressionIdx = ctx.references.size - 1
ctx.addMutableState(converterClassName, converterTerm,
s"$converterTerm = ($converterClassName)$typeConvertersClassName" +
s".createToScalaConverter(((${expressionClassName})((($scalaUDFClassName)" +
s"references[$expressionIdx]).getChildren().apply($index))).dataType());")
converterTerm
}
override def doGenCode(
ctx: CodegenContext,
ev: ExprCode): ExprCode = {
val scalaUDF = ctx.addReferenceObj("scalaUDF", this)
val converterClassName = classOf[Any => Any].getName
val typeConvertersClassName = CatalystTypeConverters.getClass.getName + ".MODULE$"
// Generate codes used to convert the returned value of user-defined functions to Catalyst type
val catalystConverterTerm = ctx.freshName("catalystConverter")
ctx.addMutableState(converterClassName, catalystConverterTerm,
s"$catalystConverterTerm = ($converterClassName)$typeConvertersClassName" +
s".createToCatalystConverter($scalaUDF.dataType());")
val resultTerm = ctx.freshName("result")
// This must be called before children expressions' codegen
// because ctx.references is used in genCodeForConverter
val converterTerms = children.indices.map(genCodeForConverter(ctx, _))
// Initialize user-defined function
val funcClassName = s"scala.Function${children.size}"
val funcTerm = ctx.freshName("udf")
ctx.addMutableState(funcClassName, funcTerm,
s"$funcTerm = ($funcClassName)$scalaUDF.userDefinedFunc();")
// codegen for children expressions
val evals = children.map(_.genCode(ctx))
// Generate the codes for expressions and calling user-defined function
// We need to get the boxedType of dataType's javaType here. Because for the dataType
// such as IntegerType, its javaType is `int` and the returned type of user-defined
// function is Object. Trying to convert an Object to `int` will cause casting exception.
val evalCode = evals.map(_.code).mkString
val (converters, funcArguments) = converterTerms.zipWithIndex.map { case (converter, i) =>
val eval = evals(i)
val argTerm = ctx.freshName("arg")
val convert = s"Object $argTerm = ${eval.isNull} ? null : $converter.apply(${eval.value});"
(convert, argTerm)
}.unzip
val getFuncResult = s"$funcTerm.apply(${funcArguments.mkString(", ")})"
val callFunc =
s"""
${ctx.boxedType(dataType)} $resultTerm = null;
try {
$resultTerm = (${ctx.boxedType(dataType)})$catalystConverterTerm.apply($getFuncResult);
} catch (Exception e) {
throw new org.apache.spark.SparkException($scalaUDF.udfErrorMessage(), e);
}
"""
ev.copy(code = s"""
$evalCode
${converters.mkString("\n")}
$callFunc
boolean ${ev.isNull} = $resultTerm == null;
${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
${ev.value} = $resultTerm;
}""")
}
private[this] val converter = CatalystTypeConverters.createToCatalystConverter(dataType)
lazy val udfErrorMessage = {
val funcCls = function.getClass.getSimpleName
val inputTypes = children.map(_.dataType.simpleString).mkString(", ")
s"Failed to execute user defined function($funcCls: ($inputTypes) => ${dataType.simpleString})"
}
override def eval(input: InternalRow): Any = {
val result = try {
f(input)
} catch {
case e: Exception =>
throw new SparkException(udfErrorMessage, e)
}
converter(result)
}
}
| minixalpha/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDF.scala | Scala | apache-2.0 | 53,794 |
package entity
/**
* Samsara Aquarius
* Article share case class
*
* @author sczyh30
* @since 0.2.22
*
* @param sid share id, auto increment
* @param title article title
* @param url article url
* @param user user id, if not login could be default 0
*/
case class Share(sid: Int, title: String, url: String, user: Option[Int] = Some(0))
| sczyh30/samsara-aquarius | app/entity/Share.scala | Scala | mit | 360 |
package composition.webserviceclients.vehicleandkeeperlookup
import com.tzavellas.sse.guice.ScalaModule
import composition.webserviceclients.vehicleandkeeperlookup.TestVehicleAndKeeperLookupWebService.createResponse
import org.mockito.Matchers.any
import org.mockito.Mockito.when
import org.scalatest.mock.MockitoSugar
import scala.concurrent.Future
import uk.gov.dvla.vehicles.presentation.common
import common.clientsidesession.TrackingId
import common.webserviceclients.vehicleandkeeperlookup.VehicleAndKeeperLookupRequest
import common.webserviceclients.vehicleandkeeperlookup.VehicleAndKeeperLookupWebService
import webserviceclients.fakes.VehicleAndKeeperLookupWebServiceConstants.vehicleAndKeeperDetailsServerDown
final class VehicleAndKeeperDetailsCallServerDown extends ScalaModule with MockitoSugar {
val stub = {
val webService = mock[VehicleAndKeeperLookupWebService]
when(webService.invoke(any[VehicleAndKeeperLookupRequest], any[TrackingId]))
.thenReturn(Future.successful(createResponse(vehicleAndKeeperDetailsServerDown)))
webService
}
def configure() = bind[VehicleAndKeeperLookupWebService].toInstance(stub)
} | dvla/vrm-retention-online | test/composition/webserviceclients/vehicleandkeeperlookup/VehicleAndKeeperDetailsCallServerDown.scala | Scala | mit | 1,155 |
package chapter25
/**
* 25.2 공통 연산 한데 묶기
*
* 컬렉션 라이브러리 재설계의 주 목적은 타입을 자연스럽게 만드는 동시에 구현 코드를
* 최대한 공유하는 것이었다. 특히 스칼라의 컬렉션은 동일 결과타입(same result type) 원칙을
* 따른다. 가능하면, 어떤 컬렉션에 대해 실행한 변환 연산의 결과가 같은 타입의 컬렉션이어야 한다는 것이다.
*
* 이 절의 내용은 다른 부분보다 조금 심오하기 때문에 흡수하는데 시간이 걸릴 것이다.
*
* 구현 트레이트(implementation trait)라 불리는 제네릭 빌더와 순회를 사용해 코드 중복을 줄이고
* 동일 결과 타입 원칙을 달성한다. 이런 트레이트에는 Like라는 접미사가 붙는다.
*
* IndexedSeqLike는 IndexedSeq의 구현 트레이트이고
* Traversable의 구현 트레이트는 TraversableLike이다.
*
* 일반 컬렉션에는 하나의 타입 파라미터가 있는 반면, 구현 트레이트에는 2개가 있다. 이는 컬렉션 원소의 타입과
* 컬렉션이 표현하는 타입을 지정한다.
*
* trait TraversableLike[+Elem, +Repr] { ... }
* Repr에는 제약이 없는데, Traversable의 서브타입이 아닌 타입으로 인스턴스화도 가능하여,
* String이나 Array 같이 컬렉션 계층구조에 참여하지 않는 클래스들까지도 컬렉션 구현 트레이트가 정의하는
* 모든 연산을 사용할 수 있다.
*
* filter는 TraversableLike 트레이트 안에 모든 컬렉션 클래스를 위해 정의되어 있다.
* 트레이트는 두 가지 추상 메소드 newBuilder와 foreach를 정의한다.
*
* newBuilder를 상둉해 Repr에 대한 빌더를 새로 만들고, 모든 원소를 foreach를 사용해 차례로 방문하며
* 원소 x가 술어를 만족하면 빌더에 추가하고, 마지막으로 result 메소드를 호출해서 빌더에 모은 원소들을
* Repr 컬렉션 타입의 인스턴스로 반환한다.
*
* map의 경우는 복잡하다.
*
*/
object c25_i02 extends App {
import scala.collection.immutable.BitSet
val bits = BitSet(5,2,3) //> bits : scala.collection.immutable.BitSet = BitSet(2, 3, 5)
bits map (_ * 2) //> res0: scala.collection.immutable.BitSet = BitSet(4, 6, 10)
bits map (_.toFloat) //> res1: scala.collection.immutable.SortedSet[Float] = TreeSet(2.0, 3.0, 5.0)
val f:Set[Float] = bits map (_.toFloat) //> f : Set[Float] = TreeSet(2.0, 3.0, 5.0)
/*
* *2를 하면 비트집합이 나오지만, toFloat를 하면, 더 일반적인 집합인 SortedSet가 된다. 비트 집합에는 Int만 들어갈 수 있기 때문에
* Float가 들어가야 하는 이 결과 집합은 결코 비트 집합일 수 없다. 어떻게 스칼라는 이렇게 유연하게 처리할 수 있을까?
*/
Map("a" -> 1, "b" -> 2) map { case (x, y) => (y, x) }
//> res0: scala.collection.immutable.Map[Int,String] = Map(1 -> a, 2 -> b)
Map("a" -> 1, "b" -> 2) map { case (x, y) => y }//> res1: scala.collection.immutable.Iterable[Int] = List(1, 2)
/*
* 두번째 Map의 경우 그대로 Map을 만들 순 없지만 여전히 Map의 서브트레이트인 Iterable을 만들 수는 있다.
*
* 왜 map을 제한해서 항상 같은 종류의 컬렉션을 반환하게 만들지 않느냐 할 수 있는데, 그런식의 제한은 객체지향 모델링 관점에서 바람직하지도 않고
* 리스코프 치환 법칙을 어기기 때문에 올바르지도 않다.
*
* Map은 ITerable이기도 하므로 Iterable에서 할 수 있는 일은 Map에서도 할 수 있어야 한다.
*
* 스칼라는 이 문제를 오버로드를 사용해 풀었다. 다만, 자바에서 가져온 간단한 오버로드(유연하지 못한)가 아닌,
* 암시적 파라미터가 제공하는 더 시스템적인 방식을 사용했다.
*
* TraversableLike의 map 구현
* def map[B, That](p: Elem => B)(implicat bf: CanBuildFrom[This, B, That]): That = {
* val b = bf(this)
* for (x <- this) b += f(x)
* b.result
* }
*
* 위 구현은 filter 구현과 아주 비슷한데, 큰 차이는 추상 메소드 newBuilder가 아닌,
* 암시적 파라미터로 넘어오는 CanBuildFrom 타입의 빌더 팩토리(builder factory)를 사용한다는 점에 있다.
*
* trait CanBuildFrom[-From, -Elem, +To[ {
* // 새로운 빌더를 만든다.
* def apply(from: From): Builder[Elem, To]
* }
*
* From - 빌더 팩토리를 적용할 타입
* Elem - 만들 컬렉션의 원소 타입
* To - 만들 컬렉션의 타입
*
* BitSet의 동반객체에는 CanBuildFrom[BitSet, Int, BitSet]이 들어 있을 것이다.
* 이는 map 연산 적용 시, 만들려는 결과 컬렉션의 원소 타입이 Int라면 새 BitSet을 만들 수 있다는 뜻이다.
* 만일 만족 못한다면, mutable.Set의 동반 객체에 있는 다른 값을 찾아볼 것이다.
*
* CanBuildFrom[Set[_], A, Set[A]]
* 임의의 집합(Set[_])에 대해 연산을 수행할 때, A 타입과 관계없이 다시 Set을 만들 수 있다는 의미다.
* 난해한 컬렉션 연산에도 올바르게 정적인 타입을 정의할 수 있는데, 동적인 경우는 어떨까?
*/
val xs2 = List(1,2,3) //> xs2 : List[Int] = List(1, 2, 3)
val ys2 = xs2 map (x => x * x) //> ys2 : List[Int] = List(1, 4, 9)
val xs: Iterable[Int] = List(1,2,3) //> xs : Iterable[Int] = List(1, 2, 3)
val ys = xs map (x => x * x) //> ys : Iterable[Int] = List(1, 4, 9)
/*
* List -> Iterable 의 map 결과가 List이며 ys에 다시 Iterable로 할당된다.
* 위 ys의 정적 타입은 Iterable인데, 동적 타입은 List이다. CanBuildFrom의 apply 메소드에는
* 원래의 컬렉션이 인자로 넘어간다. 제네릭 순회 가능 클래스의 대부분의 빌더 팩토리는 이 호출을 컬렉션의 genericBuilder에 있는 메소드에 넘긴다.
* 이 메소드는 실제로 그 메소드를 정의한 컬렉션의 빌더를 호출한다. 따라서 이런 제약을 만족하는 가장 좋은 동적인 타입을 가져온다.
*/
} | seraekim/srkim-lang-scala | src/main/java/chapter25/c25_i02.scala | Scala | bsd-3-clause | 6,420 |
def f(a: Int)(b: Int) = {}
println(/* */f {1} {2}) | ilinum/intellij-scala | testdata/resolve2/function/block/BlocksToCurryied.scala | Scala | apache-2.0 | 51 |
package spatial.nodes
import argon.core._
import spatial.aliases._
object CounterChainType extends Type[CounterChain] {
override def wrapped(x: Exp[CounterChain]) = CounterChain(x)
override def unwrapped(x: CounterChain) = x.s
override def typeArguments = Nil
override def isPrimitive = false
override def stagedClass = classOf[CounterChain]
}
case class CounterChainNew(counters: Seq[Exp[Counter]]) extends DynamicAlloc[CounterChain] {
def mirror(f:Tx) = CounterChain.fromseq(f(counters))
}
| stanford-ppl/spatial-lang | spatial/core/src/spatial/nodes/CounterChain.scala | Scala | mit | 507 |
package com.twitter.finagle.netty3.transport
import com.twitter.concurrent.AsyncQueue
import com.twitter.finagle.transport.{Transport, TransportContext, LegacyContext}
import com.twitter.finagle.{ChannelClosedException, ChannelException, Status}
import com.twitter.util.{Future, Promise, Return, Time}
import java.net.SocketAddress
import java.security.cert.Certificate
import java.util.concurrent.atomic.AtomicBoolean
import org.jboss.netty.channel._
import org.jboss.netty.handler.ssl.SslHandler
import scala.util.control.NonFatal
class ChannelTransport[In, Out](ch: Channel)
extends Transport[In, Out]
with ChannelUpstreamHandler {
type Context = TransportContext
private[this] var nneed = 0
private[this] def need(n: Int): Unit = synchronized {
nneed += n
// Note: we buffer 1 message here so that we receive socket
// closes proactively.
val r = nneed >= 0
if (ch.isReadable != r && ch.isOpen)
ch.setReadable(r)
}
ch.getPipeline.addLast("finagleTransportBridge", this)
private[this] val readq = new AsyncQueue[Out]
private[this] val failed = new AtomicBoolean(false)
private[this] val readInterruptHandler: PartialFunction[Throwable, Unit] = {
case e => fail(e)
}
private[this] def fail(exc: Throwable) {
if (!failed.compareAndSet(false, true))
return
// Do not discard existing queue items. Doing so causes a race
// between reading off of the transport and a peer closing it.
// For example, in HTTP, a remote server may send its content in
// many chunks and then promptly close its connection.
readq.fail(exc, false)
// Note: we have to fail the readq before fail, otherwise control is
// returned to netty potentially allowing subsequent offers to the readq,
// which should be illegal after failure.
close()
closep.updateIfEmpty(Return(exc))
}
override def handleUpstream(ctx: ChannelHandlerContext, e: ChannelEvent) {
e match {
case msg: MessageEvent =>
readq.offer(msg.getMessage.asInstanceOf[Out])
need(-1)
case e: ChannelStateEvent
if e.getState == ChannelState.OPEN && e.getValue != java.lang.Boolean.TRUE =>
fail(new ChannelClosedException(ch.getRemoteAddress))
case e: ChannelStateEvent if e.getState == ChannelState.INTEREST_OPS =>
// Make sure we have the right interest ops. This allows us to fix
// up any races that may occur when setting interest ops without
// having to explicitly serialize them -- it guarantees convergence
// of interest ops.
//
// This can't deadlock, because:
// #1 Updates from other threads are enqueued onto a pending
// operations queue for the owner thread, and they never wait
// for completion.
// #2 Within the context of this thread, Channel.isReadable cannot
// change while we're invoking setReadable(): subsequent channel
// state events will be terminated early by need()'s check.
need(0)
case e: ChannelStateEvent
if e.getState == ChannelState.CONNECTED
&& e.getValue == java.lang.Boolean.TRUE =>
need(0)
case e: ExceptionEvent =>
fail(ChannelException(e.getCause, ch.getRemoteAddress))
case _ => // drop.
}
// We terminate the upstream here on purpose: this must always
// be the last handler.
}
def write(msg: In): Future[Unit] = {
val p = new Promise[Unit]
// This is not cancellable because write operations in netty3
// are note cancellable. That is, there is no way to interrupt or
// preempt them once the write event has been sent into the pipeline.
val writeFuture = new DefaultChannelFuture(ch, false /* cancellable */ )
writeFuture.addListener(new ChannelFutureListener {
def operationComplete(f: ChannelFuture): Unit = {
if (f.isSuccess) p.setDone()
else {
// since we can't cancel, `f` must be an exception.
p.setException(ChannelException(f.getCause, ch.getRemoteAddress))
}
}
})
// Ordering here is important. We want to call `addListener` on
// `writeFuture` before giving it a chance to be satisfied, since
// `addListener` will invoke all listeners on the calling thread
// if the target future is complete. This allows us to present a
// more consistent threading model where callbacks are invoked
// on the event loop thread.
ch.getPipeline().sendDownstream(new DownstreamMessageEvent(ch, writeFuture, msg, null))
// We avoid setting an interrupt handler on the future exposed
// because the backing opertion isn't interruptible.
p
}
def read(): Future[Out] = {
need(1)
// This is fine, but we should consider being a little more fine-grained
// here. For example, if a read behind another read interrupts, perhaps the
// transport shouldn’t be failed, only the read dequeued.
val p = new Promise[Out]
// Note: We use become instead of proxyTo here even though become is
// recommended when `p` has interrupt handlers. `become` merges the
// listeners of two promises, which continue to share state via Linked and
// is a gain in space-efficiency.
p.become(readq.poll())
// Note: We don't raise on readq.poll's future, because it doesn't set an
// interrupt handler, but perhaps we should; and perhaps we should always
// raise on the "other" side of the become indiscriminately in all cases.
p.setInterruptHandler(readInterruptHandler)
p
}
def status: Status =
if (failed.get || !ch.isOpen) Status.Closed
else Status.Open
def close(deadline: Time): Future[Unit] = {
if (ch.isOpen)
Channels.close(ch)
closep.unit
}
def localAddress: SocketAddress = ch.getLocalAddress()
def remoteAddress: SocketAddress = ch.getRemoteAddress()
val peerCertificate: Option[Certificate] =
ch.getPipeline.get(classOf[SslHandler]) match {
case null => None
case handler =>
try {
handler.getEngine.getSession.getPeerCertificates.headOption
} catch {
case NonFatal(_) => None
}
}
private[this] val closep = new Promise[Throwable]
val onClose: Future[Throwable] = closep
override def toString = s"Transport<channel=$ch, onClose=$closep>"
val context: TransportContext = new LegacyContext(this)
}
| mkhq/finagle | finagle-netty3/src/main/scala/com/twitter/finagle/netty3/transport/ChannelTransport.scala | Scala | apache-2.0 | 6,444 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.mesos.framework.task
import java.util.{Calendar, Date}
import com.bwsw.sj.common.rest.{Directory, FrameworkTask}
import com.bwsw.sj.mesos.framework.config.FrameworkConfigNames
import com.bwsw.sj.mesos.framework.schedule.FrameworkUtil
import com.typesafe.config.ConfigFactory
import org.apache.mesos.Protos.Resource
import scala.util.Try
import scala.collection.mutable
/**
* Task data model
* @param taskId
*/
class Task(taskId: String) {
private val config = FrameworkUtil.config.get
val id: String = taskId
var state: String = "TASK_STAGING"
var stateChanged: Long = Calendar.getInstance().getTime.getTime
var reason: String = ""
var node: String = ""
var lastNode: String = ""
// var description: InstanceTask = _
var maxDirectories = Try(config.getInt(FrameworkConfigNames.maxSandboxView)).getOrElse(7)
var directories: mutable.ListBuffer[Directory] = mutable.ListBuffer()
var host: Option[String] = None
var ports: Resource = _
/**
* Update task with concrete parameter
* @param state Current task state
* @param stateChanged Time when state changed
* @param reason Reason why state changed
* @param node Current slave
* @param lastNode Previous slave
* @param directory Links to sandbox
* @param host Slave host
* @param ports Occupied ports
*/
def update(state: String = state,
stateChanged: Long = stateChanged,
reason: String = reason,
node: String = node,
lastNode: String = lastNode,
directory: String = "",
host: String = this.host.orNull,
ports: Resource = ports): Unit = {
this.state = state
this.stateChanged = stateChanged
this.reason = reason
this.node = node
this.lastNode = lastNode
this.host = Option(host)
this.ports = ports
if (!directories.exists(_.path == directory) && directory.nonEmpty)
directories.append(
Directory(new Date(stateChanged).toString, directory))
if (directories.toList.length > maxDirectories) directories = directories.dropRight(1)
}
/**
* Transform to FrameworkTask
* @return
*/
def toFrameworkTask: FrameworkTask = {
FrameworkTask(id, state, new Date(stateChanged).toString, reason, node, lastNode, directories)
}
}
| bwsw/sj-platform | core/sj-mesos-framework/src/main/scala/com/bwsw/sj/mesos/framework/task/Task.scala | Scala | apache-2.0 | 3,137 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogx.compiler.codegenerator.opencl.hyperkernels
import cogx.compiler.codegenerator.opencl.fragments.{TensorElementAddressing, SmallTensorAddressing, AddressingMode, HyperKernel}
import cogx.compiler.codegenerator.opencl.fragments.HyperKernel._
import cogx.platform.types.{VirtualFieldRegister, Opcode, FieldType}
import cogx.compiler.codegenerator.common.FieldPolicies._
import cogx.compiler.parser.op.SupersampleOp
/** 2X supersampling (each pixel is replicated 2 times in each dimension
* in the output image). The approach taken here seems on the surface
* to be wasteful: multiple threads load the same value. But, with
* GPU L1/L2 cacheing, the performance impact should be be minimal.
*
* @author Dick Carter
*
* @param in The virtual field register of the input to be supersampled..
* @param operation The binary opcode for this operation.
* @param resultType The FieldType of the result of this kernel.
* @param addressMode The addressing mode of this kernel.
*/
private[cogx]
class SupersampleHyperKernel private (in: Array[VirtualFieldRegister],
operation: Opcode,
resultType: FieldType,
addressMode: AddressingMode)
extends HyperKernel(operation, in, resultType, addressMode){
val inType = in(0).fieldType
val code = new StringBuffer
code append setLayerRowColumn(inType, "_layer/2", "_row/2", "_column/2")
code append "@out0 = readNonlocal(@in0);"
addCode(code.toString)
}
/** Factory object for creating kernels of this type.
*/
private[cogx]
object SupersampleHyperKernel {
/** Create a hyperkernel that supersamples a 2D tensor field.
*
* @param in The virtual field register of the input to be supersampled.
* @param operation The binary opcode for this operation.
* @param resultType The FieldType of the result of this kernel.
* @return The synthesized hyperkernel.
*/
def apply(in: Array[VirtualFieldRegister], operation: Opcode, resultType: FieldType):
HyperKernel =
{
require(in.length == 1)
val inType = in(0).fieldType
require(inType.dimensions >= 1 && inType.dimensions <= 3)
val addressing =
if (isSmallTensorField(inType))
SmallTensorAddressing
else
TensorElementAddressing
val outputShape = inType.fieldShape.supersample
val expectedOutputType = new FieldType(outputShape, inType.tensorShape, inType.elementType)
require(expectedOutputType == resultType)
require(operation == SupersampleOp)
new SupersampleHyperKernel(in, operation, resultType, addressing)
}
}
| hpe-cct/cct-core | src/main/scala/cogx/compiler/codegenerator/opencl/hyperkernels/SupersampleHyperKernel.scala | Scala | apache-2.0 | 3,285 |
package com.meteorcode.pathway.test
import me.hawkweisman.util.random._
import org.scalacheck.Gen
import org.scalatest.mock.MockitoSugar
import org.scalatest.prop.PropertyChecks
import org.scalatest.{Matchers, WordSpec, BeforeAndAfterEach}
/**
* General-purpose base class for a Pathway ScalaTest spec
*
* Created by hawk on 5/30/15.
*/
abstract class PathwaySpec
extends WordSpec
with Matchers
with MockitoSugar
with PropertyChecks
with BeforeAndAfterEach{
override def beforeEach() {
super.beforeEach()
System.gc()
}
}
trait IdentGenerators {
val random = new scala.util.Random
// generates random Java identifiers
val ident: Gen[String] = for {
len ← Gen.choose(1,500) // 500 seems reasonable
name ← randomJavaIdent(len)(random)
} yield name
val invalidIdentStart: Gen[String] = for {
start ← Gen.oneOf('1','2','3','4','5','6','7','8','9','-','+','*','?','\\'','{','}',';',',')
len ← Gen.choose(1,500)
} yield s"$start${randomJavaIdent(len)(random)}"
val invalidAt: Gen[(Int,String)] = for {
len ← Gen.choose(1,500)
pos ← Gen.choose(0,len)
invalid ← Gen.oneOf('-','+','*','?','\\'','{','}',';',',', '/', '[',']','"','\\\\','|')
} yield (pos, randomJavaIdent(len)(random).patch(pos, s"$invalid", 1))
val spaceAt: Gen[(Int,String)] = for {
len ← Gen.choose(1,500)
pos ← Gen.choose(0,len)
} yield (pos, randomJavaIdent(len)(random).patch(pos, " ", 1))
val reservedWords: Gen[String] = Gen.oneOf("abstract", "assert", "boolean",
"break", "byte", "case", "catch", "char", "class", "const",
"continue", "do", "double", "else", "enum", "extends", "final",
"finally", "float", "for", "if", "goto", "implements",
"import", "instanceof", "int", "interface", "long", "native",
"new", "package", "private", "protected", "public", "return",
"short", "static", "strictfp", "super", "switch",
"synchronized", "this", "throw", "throws", "transient", "try",
"void", "volatile", "while")
}
| MeteorCode/Pathway | src/test/scala/com/meteorcode/pathway/test/PathwaySpec.scala | Scala | mit | 2,018 |
package de.frosner.broccoli.websocket
import de.frosner.broccoli.models.Template.templateApiWrites
import de.frosner.broccoli.models._
import enumeratum._
import play.api.libs.json._
import scala.collection.immutable
sealed trait OutgoingMessage
object OutgoingMessage {
/**
* The type of an outgoing message on the web socket.
*
* Entry names are uncaptialised, ie, start with a lowercase letter, for compatibility with the previous Scala Enum
* declaration and thus the webui frontend.
*/
sealed trait Type extends EnumEntry with EnumEntry.Uncapitalised
object Type extends Enum[Type] with PlayJsonEnum[Type] {
override val values: immutable.IndexedSeq[Type] = findValues
case object ListTemplates extends Type
case object ListInstances extends Type
case object AboutInfo extends Type
case object Error extends Type
case object Notification extends Type
case object AddInstanceSuccess extends Type
case object AddInstanceError extends Type
case object DeleteInstanceSuccess extends Type
case object DeleteInstanceError extends Type
case object UpdateInstanceSuccess extends Type
case object UpdateInstanceError extends Type
case object GetInstanceTasksSuccess extends Type
case object GetInstanceTasksError extends Type
}
final case class ListTemplates(templates: Seq[Template]) extends OutgoingMessage
final case class ListInstances(instances: Seq[InstanceWithStatus]) extends OutgoingMessage
final case class AboutInfoMsg(info: AboutInfo) extends OutgoingMessage
final case class Error(error: String) extends OutgoingMessage
final case class Notification(message: String) extends OutgoingMessage
final case class AddInstanceSuccess(result: InstanceCreated) extends OutgoingMessage
final case class AddInstanceError(error: InstanceError) extends OutgoingMessage
final case class DeleteInstanceSuccess(result: InstanceDeleted) extends OutgoingMessage
final case class DeleteInstanceError(error: InstanceError) extends OutgoingMessage
final case class UpdateInstanceSuccess(result: InstanceUpdated) extends OutgoingMessage
final case class UpdateInstanceError(error: InstanceError) extends OutgoingMessage
final case class GetInstanceTasksSuccess(tasks: InstanceTasks) extends OutgoingMessage
final case class GetInstanceTasksError(instanceId: String, error: InstanceError) extends OutgoingMessage
/**
* JSON writes for a message outgoing to a websocket.
*
* The JSON structure is not particularly straight-forward and deviates from what a generated Reads instance would
* deserialize. However, it maintains compatibility with the earlier implementation of OutgoingWsMessage that used
* a dedicated "type" enum and an unsafe any-typed payload.
*/
implicit val outgoingMessageWrites: Writes[OutgoingMessage] =
Writes {
case ListTemplates(templates) => write(Type.ListTemplates, templates)
case ListInstances(instances) => write(Type.ListInstances, instances)
case AboutInfoMsg(info) => write(Type.AboutInfo, info)
case Error(error) => write(Type.Error, error)
case Notification(message) => write(Type.Notification, message)
case AddInstanceSuccess(result) => write(Type.AddInstanceSuccess, result)
case AddInstanceError(error) => write(Type.AddInstanceError, error)
case DeleteInstanceSuccess(result) => write(Type.DeleteInstanceSuccess, result)
case DeleteInstanceError(error) => write(Type.DeleteInstanceError, error)
case UpdateInstanceSuccess(result) => write(Type.UpdateInstanceSuccess, result)
case UpdateInstanceError(error) => write(Type.UpdateInstanceError, error)
case GetInstanceTasksSuccess(result) => write(Type.GetInstanceTasksSuccess, result)
case error: GetInstanceTasksError =>
write(Type.GetInstanceTasksError, error)(Json.writes[GetInstanceTasksError])
}
private def write[P](`type`: Type, payload: P)(implicit writesP: Writes[P]): JsObject =
Json.obj("messageType" -> `type`, "payload" -> payload)
}
| FRosner/cluster-broccoli | server/src/main/scala/de/frosner/broccoli/websocket/OutgoingMessage.scala | Scala | apache-2.0 | 4,152 |
package au.id.cxd.math.model.evaluation
/**
* compute peak percentage deviation.
*
* @param obs
* @param sim
*/
class PeakPercentDeviation(val obs:Seq[Double], val sim:Seq[Double]) {
def op():Double = {
val maxObs = obs.max
val maxSim = sim.max
val pdv = 100.0 * (maxSim - maxObs) / maxObs
pdv
}
}
object PeakPercentDeviation {
def apply(obs:Seq[Double], sim:Seq[Double]):Double =
new PeakPercentDeviation(obs, sim).op()
}
| cxd/scala-au.id.cxd.math | math/src/main/scala/au/id/cxd/math/model/evaluation/PeakPercentDeviation.scala | Scala | mit | 463 |
package org.juanitodread.pitayafinch.nlp.tools.sentences
import org.juanitodread.pitayafinch.UnitSpec
import org.juanitodread.pitayafinch.formatters.NumberFormatter
import org.juanitodread.pitayafinch.model.nlp.sentences.FinderResult
class FinderSpec extends UnitSpec {
val trainingModel: String = "/nlp/models/sentences/en-sent.bin"
"A Finder" should "fail when provided training model does not exist" in {
assertThrows[IllegalArgumentException] {
new Finder("invalid-path")
}
}
it should "get an empty list of sentences when empty text" in {
val finder: Finder = new Finder(trainingModel)
assert(finder.find("") === List.empty[FinderResult])
}
it should "get a list of sentences" in {
val finder: Finder = new Finder(trainingModel)
val text: String = "When determining the end of sentences we need to consider several factors. " +
"Sentences may end with exclamation marks! Or possibly questions marks? " +
"Within sentences we may find numbers like 3.14159, abbreviations such as found in Mr. Smith, " +
"and possibly ellipses either within a sentence ..., or at the end of a sentence..."
val sentences: List[String] = finder.find(text).map(result => result.sentence) // ignoring confidence
assert(sentences === List(
"When determining the end of sentences we need to consider several factors.",
"Sentences may end with exclamation marks!",
"Or possibly questions marks?",
"Within sentences we may find numbers like 3.14159, abbreviations such as found in Mr. Smith, " +
"and possibly ellipses either within a sentence ..., or at the end of a sentence..."))
}
"A Finder object" should "extends NumberFormatter trait" in {
assert(Finder.isInstanceOf[NumberFormatter] === true)
}
}
| juanitodread/pitaya-finch | src/test/scala/org/juanitodread/pitayafinch/nlp/tools/sentences/FinderSpec.scala | Scala | apache-2.0 | 1,800 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.view.testutil
object Tpcds_1_4_Tables {
val tpcds1_4Tables = Seq[String](
s"""
|CREATE TABLE IF NOT EXISTS catalog_sales (
| `cs_sold_date_sk` int,
| `cs_sold_time_sk` int,
| `cs_ship_date_sk` int,
| `cs_bill_customer_sk` int,
| `cs_bill_cdemo_sk` int,
| `cs_bill_hdemo_sk` int,
| `cs_bill_addr_sk` int,
| `cs_ship_customer_sk` int,
| `cs_ship_cdemo_sk` int,
| `cs_ship_hdemo_sk` int,
| `cs_ship_addr_sk` int,
| `cs_call_center_sk` int,
| `cs_catalog_page_sk` int,
| `cs_ship_mode_sk` int,
| `cs_warehouse_sk` int,
| `cs_item_sk` int,
| `cs_promo_sk` int,
| `cs_order_number` bigint,
| `cs_quantity` int,
| `cs_wholesale_cost` decimal(7,2),
| `cs_list_price` decimal(7,2),
| `cs_sales_price` decimal(7,2),
| `cs_ext_discount_amt` decimal(7,2),
| `cs_ext_sales_price` decimal(7,2),
| `cs_ext_wholesale_cost` decimal(7,2),
| `cs_ext_list_price` decimal(7,2),
| `cs_ext_tax` decimal(7,2),
| `cs_coupon_amt` decimal(7,2),
| `cs_ext_ship_cost` decimal(7,2),
| `cs_net_paid` decimal(7,2),
| `cs_net_paid_inc_tax` decimal(7,2),
| `cs_net_paid_inc_ship` decimal(7,2),
| `cs_net_paid_inc_ship_tax` decimal(7,2),
| `cs_net_profit` decimal(7,2)
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS catalog_returns (
| `cr_returned_date_sk` int,
| `cr_returned_time_sk` int,
| `cr_item_sk` int,
| `cr_refunded_customer_sk` int,
| `cr_refunded_cdemo_sk` int,
| `cr_refunded_hdemo_sk` int,
| `cr_refunded_addr_sk` int,
| `cr_returning_customer_sk` int,
| `cr_returning_cdemo_sk` int,
| `cr_returning_hdemo_sk` int,
| `cr_returning_addr_sk` int,
| `cr_call_center_sk` int,
| `cr_catalog_page_sk` int,
| `cr_ship_mode_sk` int,
| `cr_warehouse_sk` int,
| `cr_reason_sk` int,
| `cr_order_number` bigint,
| `cr_return_quantity` int,
| `cr_return_amount` decimal(7,2),
| `cr_return_tax` decimal(7,2),
| `cr_return_amt_inc_tax` decimal(7,2),
| `cr_fee` decimal(7,2),
| `cr_return_ship_cost` decimal(7,2),
| `cr_refunded_cash` decimal(7,2),
| `cr_reversed_charge` decimal(7,2),
| `cr_store_credit` decimal(7,2),
| `cr_net_loss` decimal(7,2)
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS inventory (
| `inv_date_sk` int,
| `inv_item_sk` int,
| `inv_warehouse_sk` int,
| `inv_quantity_on_hand` int
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS store_sales (
| `ss_sold_date_sk` int,
| `ss_sold_time_sk` int,
| `ss_item_sk` int,
| `ss_customer_sk` int,
| `ss_cdemo_sk` int,
| `ss_hdemo_sk` int,
| `ss_addr_sk` int,
| `ss_store_sk` int,
| `ss_promo_sk` int,
| `ss_ticket_number` bigint,
| `ss_quantity` int,
| `ss_wholesale_cost` decimal(7,2),
| `ss_list_price` decimal(7,2),
| `ss_sales_price` decimal(7,2),
| `ss_ext_discount_amt` decimal(7,2),
| `ss_ext_sales_price` decimal(7,2),
| `ss_ext_wholesale_cost` decimal(7,2),
| `ss_ext_list_price` decimal(7,2),
| `ss_ext_tax` decimal(7,2),
| `ss_coupon_amt` decimal(7,2),
| `ss_net_paid` decimal(7,2),
| `ss_net_paid_inc_tax` decimal(7,2),
| `ss_net_profit` decimal(7,2)
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS store_returns (
| `sr_returned_date_sk` int,
| `sr_return_time_sk` int,
| `sr_item_sk` int,
| `sr_customer_sk` int,
| `sr_cdemo_sk` int,
| `sr_hdemo_sk` int,
| `sr_addr_sk` int,
| `sr_store_sk` int,
| `sr_reason_sk` int,
| `sr_ticket_number` bigint,
| `sr_return_quantity` int,
| `sr_return_amt` decimal(7,2),
| `sr_return_tax` decimal(7,2),
| `sr_return_amt_inc_tax` decimal(7,2),
| `sr_fee` decimal(7,2),
| `sr_return_ship_cost` decimal(7,2),
| `sr_refunded_cash` decimal(7,2),
| `sr_reversed_charge` decimal(7,2),
| `sr_store_credit` decimal(7,2),
| `sr_net_loss` decimal(7,2)
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS web_sales (
| `ws_sold_date_sk` int,
| `ws_sold_time_sk` int,
| `ws_ship_date_sk` int,
| `ws_item_sk` int,
| `ws_bill_customer_sk` int,
| `ws_bill_cdemo_sk` int,
| `ws_bill_hdemo_sk` int,
| `ws_bill_addr_sk` int,
| `ws_ship_customer_sk` int,
| `ws_ship_cdemo_sk` int,
| `ws_ship_hdemo_sk` int,
| `ws_ship_addr_sk` int,
| `ws_web_page_sk` int,
| `ws_web_site_sk` int,
| `ws_ship_mode_sk` int,
| `ws_warehouse_sk` int,
| `ws_promo_sk` int,
| `ws_order_number` bigint,
| `ws_quantity` int,
| `ws_wholesale_cost` decimal(7,2),
| `ws_list_price` decimal(7,2),
| `ws_sales_price` decimal(7,2),
| `ws_ext_discount_amt` decimal(7,2),
| `ws_ext_sales_price` decimal(7,2),
| `ws_ext_wholesale_cost` decimal(7,2),
| `ws_ext_list_price` decimal(7,2),
| `ws_ext_tax` decimal(7,2),
| `ws_coupon_amt` decimal(7,2),
| `ws_ext_ship_cost` decimal(7,2),
| `ws_net_paid` decimal(7,2),
| `ws_net_paid_inc_tax` decimal(7,2),
| `ws_net_paid_inc_ship` decimal(7,2),
| `ws_net_paid_inc_ship_tax` decimal(7,2),
| `ws_net_profit` decimal(7,2)
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS web_returns (
| `wr_returned_date_sk` int,
| `wr_returned_time_sk` int,
| `wr_item_sk` int,
| `wr_refunded_customer_sk` int,
| `wr_refunded_cdemo_sk` int,
| `wr_refunded_hdemo_sk` int,
| `wr_refunded_addr_sk` int,
| `wr_returning_customer_sk` int,
| `wr_returning_cdemo_sk` int,
| `wr_returning_hdemo_sk` int,
| `wr_returning_addr_sk` int,
| `wr_web_page_sk` int,
| `wr_reason_sk` int,
| `wr_order_number` bigint,
| `wr_return_quantity` int,
| `wr_return_amt` decimal(7,2),
| `wr_return_tax` decimal(7,2),
| `wr_return_amt_inc_tax` decimal(7,2),
| `wr_fee` decimal(7,2),
| `wr_return_ship_cost` decimal(7,2),
| `wr_refunded_cash` decimal(7,2),
| `wr_reversed_charge` decimal(7,2),
| `wr_account_credit` decimal(7,2),
| `wr_net_loss` decimal(7,2)
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS call_center (
| `cc_call_center_sk` int,
| `cc_call_center_id` string,
| `cc_rec_start_date` date,
| `cc_rec_end_date` date,
| `cc_closed_date_sk` int,
| `cc_open_date_sk` int,
| `cc_name` string,
| `cc_class` string,
| `cc_employees` int,
| `cc_sq_ft` int,
| `cc_hours` string,
| `cc_manager` string,
| `cc_mkt_id` int,
| `cc_mkt_class` string,
| `cc_mkt_desc` string,
| `cc_market_manager` string,
| `cc_division` int,
| `cc_division_name` string,
| `cc_company` int,
| `cc_company_name` string,
| `cc_street_number` string,
| `cc_street_name` string,
| `cc_street_type` string,
| `cc_suite_number` string,
| `cc_city` string,
| `cc_county` string,
| `cc_state` string,
| `cc_zip` string,
| `cc_country` string,
| `cc_gmt_offset` decimal(5,2),
| `cc_tax_percentage` decimal(5,2)
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS catalog_page (
| `cp_catalog_page_sk` int,
| `cp_catalog_page_id` string,
| `cp_start_date_sk` int,
| `cp_end_date_sk` int,
| `cp_department` string,
| `cp_catalog_number` int,
| `cp_catalog_page_number` int,
| `cp_description` string,
| `cp_type` string
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS customer (
| `c_customer_sk` int,
| `c_customer_id` string,
| `c_current_cdemo_sk` int,
| `c_current_hdemo_sk` int,
| `c_current_addr_sk` int,
| `c_first_shipto_date_sk` int,
| `c_first_sales_date_sk` int,
| `c_salutation` string,
| `c_first_name` string,
| `c_last_name` string,
| `c_preferred_cust_flag` string,
| `c_birth_day` int,
| `c_birth_month` int,
| `c_birth_year` int,
| `c_birth_country` string,
| `c_login` string,
| `c_email_address` string,
| `c_last_review_date` string
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS customer_address (
| `ca_address_sk` int,
| `ca_address_id` string,
| `ca_street_number` string,
| `ca_street_name` string,
| `ca_street_type` string,
| `ca_suite_number` string,
| `ca_city` string,
| `ca_county` string,
| `ca_state` string,
| `ca_zip` string,
| `ca_country` string,
| `ca_gmt_offset` decimal(5,2),
| `ca_location_type` string
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS customer_demographics (
| `cd_demo_sk` int,
| `cd_gender` string,
| `cd_marital_status` string,
| `cd_education_status` string,
| `cd_purchase_estimate` int,
| `cd_credit_rating` string,
| `cd_dep_count` int,
| `cd_dep_employed_count` int,
| `cd_dep_college_count` int
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS date_dim (
| `d_date_sk` int,
| `d_date_id` string,
| `d_date` date,
| `d_month_seq` int,
| `d_week_seq` int,
| `d_quarter_seq` int,
| `d_year` int,
| `d_dow` int,
| `d_moy` int,
| `d_dom` int,
| `d_qoy` int,
| `d_fy_year` int,
| `d_fy_quarter_seq` int,
| `d_fy_week_seq` int,
| `d_day_name` string,
| `d_quarter_name` string,
| `d_holiday` string,
| `d_weekend` string,
| `d_following_holiday` string,
| `d_first_dom` int,
| `d_last_dom` int,
| `d_same_day_ly` int,
| `d_same_day_lq` int,
| `d_current_day` string,
| `d_current_week` string,
| `d_current_month` string,
| `d_current_quarter` string,
| `d_current_year` string
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS household_demographics (
| `hd_demo_sk` int,
| `hd_income_band_sk` int,
| `hd_buy_potential` string,
| `hd_dep_count` int,
| `hd_vehicle_count` int
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS income_band (
| `ib_income_band_sk` int,
| `ib_lower_bound` int,
| `ib_upper_bound` int
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS item (
| `i_item_sk` int,
| `i_item_id` string,
| `i_rec_start_date` date,
| `i_rec_end_date` date,
| `i_item_desc` string,
| `i_current_price` decimal(7,2),
| `i_wholesale_cost` decimal(7,2),
| `i_brand_id` int,
| `i_brand` string,
| `i_class_id` int,
| `i_class` string,
| `i_category_id` int,
| `i_category` string,
| `i_manufact_id` int,
| `i_manufact` string,
| `i_size` string,
| `i_formulation` string,
| `i_color` string,
| `i_units` string,
| `i_container` string,
| `i_manager_id` int,
| `i_product_name` string
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS promotion (
| `p_promo_sk` int,
| `p_promo_id` string,
| `p_start_date_sk` int,
| `p_end_date_sk` int,
| `p_item_sk` int,
| `p_cost` decimal(15,2),
| `p_response_target` int,
| `p_promo_name` string,
| `p_channel_dmail` string,
| `p_channel_email` string,
| `p_channel_catalog` string,
| `p_channel_tv` string,
| `p_channel_radio` string,
| `p_channel_press` string,
| `p_channel_event` string,
| `p_channel_demo` string,
| `p_channel_details` string,
| `p_purpose` string,
| `p_discount_active` string
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS reason (
| `r_reason_sk` int,
| `r_reason_id` string,
| `r_reason_desc` string
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS ship_mode (
| `sm_ship_mode_sk` int,
| `sm_ship_mode_id` string,
| `sm_type` string,
| `sm_code` string,
| `sm_carrier` string,
| `sm_contract` string
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS store (
| `s_store_sk` int,
| `s_store_id` string,
| `s_rec_start_date` date,
| `s_rec_end_date` date,
| `s_closed_date_sk` int,
| `s_store_name` string,
| `s_number_employees` int,
| `s_floor_space` int,
| `s_hours` string,
| `s_manager` string,
| `s_market_id` int,
| `s_geography_class` string,
| `s_market_desc` string,
| `s_market_manager` string,
| `s_division_id` int,
| `s_division_name` string,
| `s_company_id` int,
| `s_company_name` string,
| `s_street_number` string,
| `s_street_name` string,
| `s_street_type` string,
| `s_suite_number` string,
| `s_city` string,
| `s_county` string,
| `s_state` string,
| `s_zip` string,
| `s_country ` string,
| `s_gmt_offset` decimal(5,2),
| `s_tax_precentage` decimal(5,2)
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS time_dim (
| `t_time_sk` int,
| `t_time_id` string,
| `t_time` int,
| `t_hour` int,
| `t_minute` int,
| `t_second` int,
| `t_am_pm` string,
| `t_shift` string,
| `t_sub_shift` string,
| `t_meal_time` string
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS warehouse (
| `w_warehouse_sk` int,
| `w_warehouse_id` string,
| `w_warehouse_name` string,
| `w_warehouse_sq_ft` int,
| `w_street_number` string,
| `w_street_name` string,
| `w_street_type` string,
| `w_suite_number` string,
| `w_city` string,
| `w_county` string,
| `w_state` string,
| `w_zip` string,
| `w_country` string,
| `w_gmt_offset` decimal(5,2)
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS web_page (
| `wp_web_page_sk` int,
| `wp_web_page_id` string,
| `wp_rec_start_date` date,
| `wp_rec_end_date` date,
| `wp_creation_date_sk` int,
| `wp_access_date_sk` int,
| `wp_autogen_flag` string,
| `wp_customer_sk` int,
| `wp_url` string,
| `wp_type` string,
| `wp_char_count` int,
| `wp_link_count` int,
| `wp_image_count` int,
| `wp_max_ad_count` int
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS web_site (
| `web_site_sk` int,
| `web_site_id` string,
| `web_rec_start_date` date,
| `web_rec_end_date` date,
| `web_name` string,
| `web_open_date_sk` int,
| `web_close_date_sk` int,
| `web_class` string,
| `web_manager` string,
| `web_mkt_id` int,
| `web_mkt_class` string,
| `web_mkt_desc` string,
| `web_market_manager` string,
| `web_company_id` int,
| `web_company_name` string,
| `web_street_number` string,
| `web_street_name` string,
| `web_street_type` string,
| `web_suite_number` string,
| `web_city` string,
| `web_county` string,
| `web_state` string,
| `web_zip` string,
| `web_country` string,
| `web_gmt_offset` decimal(5,2),
| `web_tax_percentage` decimal(5,2)
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS sdr_dyn_seq_custer_iot_all_hour_60min
|(
| `dim_1` String,
| `dim_51` String,
| `starttime` String,
| `dim_2` String,
| `dim_3` String,
| `dim_4` String,
| `dim_5` String,
| `dim_6` String,
| `dim_7` String,
| `dim_8` String,
| `dim_9` String,
| `dim_10` String,
| `dim_11` String,
| `dim_12` String,
| `dim_13` String,
| `dim_14` String,
| `dim_15` String,
| `dim_16` String,
| `dim_17` String,
| `dim_18` String,
| `dim_19` String,
| `dim_20` String,
| `dim_21` String,
| `dim_22` String,
| `dim_23` String,
| `dim_24` String,
| `dim_25` String,
| `dim_26` String,
| `dim_27` String,
| `dim_28` String,
| `dim_29` String,
| `dim_30` String,
| `dim_31` String,
| `dim_32` String,
| `dim_33` String,
| `dim_34` String,
| `dim_35` String,
| `dim_36` String,
| `dim_37` String,
| `dim_38` String,
| `dim_39` String,
| `dim_40` String,
| `dim_41` String,
| `dim_42` String,
| `dim_43` String,
| `dim_44` String,
| `dim_45` String,
| `dim_46` String,
| `dim_47` String,
| `dim_48` String,
| `dim_49` String,
| `dim_50` String,
| `dim_52` String,
| `dim_53` String,
| `dim_54` String,
| `dim_55` String,
| `dim_56` String,
| `dim_57` String,
| `dim_58` String,
| `dim_59` String,
| `dim_60` String,
| `dim_61` String,
| `dim_62` String,
| `dim_63` String,
| `dim_64` String,
| `dim_65` String,
| `dim_66` String,
| `dim_67` String,
| `dim_68` String,
| `dim_69` String,
| `dim_70` String,
| `dim_71` String,
| `dim_72` String,
| `dim_73` String,
| `dim_74` String,
| `dim_75` String,
| `dim_76` String,
| `dim_77` String,
| `dim_78` String,
| `dim_79` String,
| `dim_80` String,
| `dim_81` String,
| `dim_82` String,
| `dim_83` String,
| `dim_84` String,
| `dim_85` String,
| `dim_86` String,
| `dim_87` String,
| `dim_88` String,
| `dim_89` String,
| `dim_90` String,
| `dim_91` String,
| `dim_92` String,
| `dim_93` String,
| `dim_94` String,
| `dim_95` String,
| `dim_96` String,
| `dim_97` String,
| `dim_98` String,
| `dim_99` String,
| `dim_100` String,
| `counter_1` double,
| `counter_2` double,
| `counter_3` double,
| `counter_4` double,
| `counter_5` double,
| `counter_6` double,
| `counter_7` double,
| `counter_8` double,
| `counter_9` double,
| `counter_10` double,
| `counter_11` double,
| `counter_12` double,
| `counter_13` double,
| `counter_14` double,
| `counter_15` double,
| `counter_16` double,
| `counter_17` double,
| `counter_18` double,
| `counter_19` double,
| `counter_20` double,
| `counter_21` double,
| `counter_22` double,
| `counter_23` double,
| `counter_24` double,
| `counter_25` double,
| `counter_26` double,
| `counter_27` double,
| `counter_28` double,
| `counter_29` double,
| `counter_30` double,
| `counter_31` double,
| `counter_32` double,
| `counter_33` double,
| `counter_34` double,
| `counter_35` double,
| `counter_36` double,
| `counter_37` double,
| `counter_38` double,
| `counter_39` double,
| `counter_40` double,
| `counter_41` double,
| `counter_42` double,
| `counter_43` double,
| `counter_44` double,
| `counter_45` double,
| `counter_46` double,
| `counter_47` double,
| `counter_48` double,
| `counter_49` double,
| `counter_50` double,
| `counter_51` double,
| `counter_52` double,
| `counter_53` double,
| `counter_54` double,
| `counter_55` double,
| `counter_56` double,
| `counter_57` double,
| `counter_58` double,
| `counter_59` double,
| `counter_60` double,
| `counter_61` double,
| `counter_62` double,
| `counter_63` double,
| `counter_64` double,
| `counter_65` double,
| `counter_66` double,
| `counter_67` double,
| `counter_68` double,
| `counter_69` double,
| `counter_70` double,
| `counter_71` double,
| `counter_72` double,
| `counter_73` double,
| `counter_74` double,
| `counter_75` double,
| `counter_76` double,
| `counter_77` double,
| `counter_78` double,
| `counter_79` double,
| `counter_80` double,
| `counter_81` double,
| `counter_82` double,
| `counter_83` double,
| `counter_84` double,
| `counter_85` double,
| `counter_86` double,
| `counter_87` double,
| `counter_88` double,
| `counter_89` double,
| `counter_90` double,
| `counter_91` double,
| `counter_92` double,
| `counter_93` double,
| `counter_94` double,
| `counter_95` double,
| `counter_96` double,
| `counter_97` double,
| `counter_98` double,
| `counter_99` double,
| `counter_100` double,
| `batchno` double
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS dim_apn_iot
|(
| `city_ascription` String,
| `industry` String,
| `apn_name` String,
| `service_level` String,
| `customer_name` String,
| `id` bigint
|)
|ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
|STORED AS TEXTFILE
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS tradeflow_all (
| m_month smallint,
| hs_code string ,
| country smallint,
| dollar_value double ,
| quantity double ,
| unit smallint,
| b_country smallint,
| imex smallint,
| y_year smallint)
|STORED AS parquet
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS country (
| countryid smallint ,
| country_en string ,
| country_cn string )
|STORED AS parquet
""".stripMargin.trim,
s"""
|CREATE TABLE IF NOT EXISTS updatetime (
| countryid smallint ,
| imex smallint ,
| hs_len smallint ,
| minstartdate string ,
| startdate string ,
| newdate string ,
| minnewdate string )
|STORED AS parquet
""".stripMargin.trim,
s"""
| CREATE TABLE IF NOT EXISTS fact_table1 (empname String, designation String, doj Timestamp,
| workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
| projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
| utilization int,salary int)
|STORED AS parquet
""".stripMargin.trim,
s"""
| CREATE TABLE IF NOT EXISTS fact_table2 (empname String, designation String, doj Timestamp,
| workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
| projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
| utilization int,salary int)
|STORED AS parquet
""".stripMargin.trim
)
}
| zzcclp/carbondata | integration/spark/src/test/scala/org/apache/carbondata/view/testutil/Tpcds_1_4_Tables.scala | Scala | apache-2.0 | 29,063 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.abstractnn.TensorModule
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import scala.reflect.ClassTag
/**
* A simple layer for each element of the input tensor, do the following operation
* during the forward process:
* [f(x) = tanh(x) - 1]
*/
@SerialVersionUID(7783278258985544682L)
class TanhShrink[T: ClassTag](
implicit ev: TensorNumeric[T]) extends TensorModule[T] {
private val tanh = new Tanh[T]()
override def updateOutput(input: Tensor[T]): Tensor[T] = {
val th = tanh.updateOutput(input)
output.resizeAs(input).copy(input)
output.add(ev.fromType[Int](-1), th)
output
}
override def updateGradInput(input: Tensor[T], gradOutput: Tensor[T]): Tensor[T] = {
val dth = tanh.updateGradInput(input, gradOutput)
gradInput.resizeAs(input).copy(gradOutput)
gradInput.add(ev.fromType[Int](-1), dth)
gradInput
}
override def toString: String = s"nn.TanhShrink"
}
object TanhShrink {
def apply[@specialized(Float, Double) T: ClassTag]()
(implicit ev: TensorNumeric[T]) : TanhShrink[T] = {
new TanhShrink[T]()
}
}
| luchy0120/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/TanhShrink.scala | Scala | apache-2.0 | 1,831 |
package com.hyenawarrior.oldnorsedictionary.new_word.new_pos_helpers
import android.view.View
import com.hyenawarrior.OldNorseGrammar.grammar.enums.{Case, GNumber}
import com.hyenawarrior.oldnorsedictionary.new_word.pages.WordData
/**
* Created by HyenaWarrior on 2017.04.17..
*/
object AddNewPosHelper
{
val DECLENSIONS: Vector[(GNumber, Case)] = GNumber.conventionalValues.flatMap(n => Case.values.map(cs => (n, cs))).toVector
}
trait AddNewPosHelper
{
def activate(): Unit
def deactivate(): Unit
def onRemoveOverride(tableRow: View)
def primaryFromSetter(): View
def createOverrideFormSetter(isPrimary: Boolean = false) : View
def onStemClassSelected(index: Int): Unit
def onTextFormOverride(overridingView: View)(str: String): Unit
def getWordFormsBy(view: View): WordData = null
}
| HyenaSoftware/IG-Dictionary | app/src/main/scala/com/hyenawarrior/oldnorsedictionary/new_word/new_pos_helpers/AddNewPosHelper.scala | Scala | lgpl-3.0 | 804 |
package com.excilys.computerdatabase.gatling.process
import com.typesafe.config.ConfigFactory
import io.gatling.core.Predef._
import io.gatling.http.Predef._
/**
* Created by Cédric Cousseran on 29/03/16.
* Process to add a computer when Spring Security is enabled.
*/
object AddSecurity {
val config = ConfigFactory.load()
val random = new util.Random
val feederName = Iterator.continually(Map("addComputerName" -> (random.nextInt.toString() + random.nextInt.toString() + random.nextInt.toString()))
)
val feederAdd = csv("data/addComputer.csv").random
val add = exec(http("AddSecurity: Add page")
.get(config.getString("application.urls.addPage")).check(status.is(200))
.check(
css(config.getString("application.urls.idElement.add.csrf").get, "value").saveAs("csrf_token")
)
.resources(http("AddSecurity: Add js")
.get(config.getString("application.urls.static.js.add"))))
.pause(3, 10)
.feed(feederName)
.feed(feederAdd)
.exec(http("AddSecurity: Add post")
.post(config.getString("application.urls.addPost").get)
.formParam(config.getString("application.urls.form.add.name").get, "${addComputerName}")
.formParam(config.getString("application.urls.form.add.introduced").get, "${addComputerIntroduced}")
.formParam(config.getString("application.urls.form.add.discontinued").get, "${addComputerDiscontinued}")
.formParam(config.getString("application.urls.form.add.companyId").get, "${addComputerCompany}")
.formParam(config.getString("application.urls.form.add.csrf").get, "${csrf_token}"))
.pause(3, 10)
}
| nadiacom/training-java | gatling-test/src/test/scala/com/excilys/computerdatabase/gatling/process/AddSecurity.scala | Scala | apache-2.0 | 1,617 |
package is.hail.expr.ir.functions
import is.hail.annotations._
import is.hail.asm4s._
import is.hail.expr.ir._
import is.hail.types._
import is.hail.utils._
import is.hail.asm4s.coerce
import is.hail.experimental.ExperimentalFunctions
import is.hail.types.physical._
import is.hail.types.physical.stypes.{EmitType, SCode, SType, SValue}
import is.hail.types.physical.stypes.concrete._
import is.hail.types.physical.stypes.interfaces._
import is.hail.types.physical.stypes.primitives._
import is.hail.types.virtual._
import is.hail.variant.Locus
import org.apache.spark.sql.Row
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.reflect._
object IRFunctionRegistry {
private val userAddedFunctions: mutable.Set[(String, (Type, Seq[Type], Seq[Type]))] = mutable.HashSet.empty
def clearUserFunctions() {
userAddedFunctions.foreach { case (name, (rt, typeParameters, valueParameterTypes)) =>
removeIRFunction(name, rt, typeParameters, valueParameterTypes) }
userAddedFunctions.clear()
}
type IRFunctionSignature = (Seq[Type], Seq[Type], Type, Boolean)
type IRFunctionImplementation = (Seq[Type], Seq[IR], Int) => IR
val irRegistry: mutable.Map[String, mutable.Map[IRFunctionSignature, IRFunctionImplementation]] = new mutable.HashMap()
val jvmRegistry: mutable.MultiMap[String, JVMFunction] =
new mutable.HashMap[String, mutable.Set[JVMFunction]] with mutable.MultiMap[String, JVMFunction]
private[this] def requireJavaIdentifier(name: String): Unit = {
if (!isJavaIdentifier(name))
throw new IllegalArgumentException(s"Illegal function name, not Java identifier: ${ name }")
}
def addJVMFunction(f: JVMFunction): Unit = {
requireJavaIdentifier(f.name)
jvmRegistry.addBinding(f.name, f)
}
def addIR(
name: String,
typeParameters: Seq[Type],
valueParameterTypes: Seq[Type],
returnType: Type,
alwaysInline: Boolean,
f: IRFunctionImplementation
): Unit = {
requireJavaIdentifier(name)
val m = irRegistry.getOrElseUpdate(name, new mutable.HashMap())
m.update((typeParameters, valueParameterTypes, returnType, alwaysInline), f)
}
def pyRegisterIR(
name: String,
typeParamStrs: java.util.ArrayList[String],
argNames: java.util.ArrayList[String],
argTypeStrs: java.util.ArrayList[String],
returnType: String,
body: IR
): Unit = {
requireJavaIdentifier(name)
val typeParameters = typeParamStrs.asScala.map(IRParser.parseType).toFastIndexedSeq
val valueParameterTypes = argTypeStrs.asScala.map(IRParser.parseType).toFastIndexedSeq
userAddedFunctions += ((name, (body.typ, typeParameters, valueParameterTypes)))
addIR(name,
typeParameters,
valueParameterTypes, IRParser.parseType(returnType), false, { (_, args, _) =>
Subst(body,
BindingEnv(Env[IR](argNames.asScala.zip(args): _*)))
})
}
def removeIRFunction(
name: String,
returnType: Type,
typeParameters: Seq[Type],
valueParameterTypes: Seq[Type]
): Unit = {
val m = irRegistry(name)
m.remove((typeParameters, valueParameterTypes, returnType, false))
}
def lookupFunction(
name: String,
returnType: Type,
typeParameters: Seq[Type],
valueParameterTypes: Seq[Type]
): Option[JVMFunction] = {
jvmRegistry.lift(name).map { fs => fs.filter(t => t.unify(typeParameters, valueParameterTypes, returnType)).toSeq }.getOrElse(FastSeq()) match {
case Seq() => None
case Seq(f) => Some(f)
case _ => fatal(s"Multiple functions found that satisfy $name(${ valueParameterTypes.mkString(",") }).")
}
}
def lookupFunctionOrFail(
name: String,
returnType: Type,
typeParameters: Seq[Type],
valueParameterTypes: Seq[Type]
): JVMFunction = {
jvmRegistry.lift(name) match {
case None =>
fatal(s"no functions found with the name ${name}")
case Some(functions) =>
functions.filter(t => t.unify(typeParameters, valueParameterTypes, returnType)).toSeq match {
case Seq() =>
val prettyFunctionSignature = s"$name[${ typeParameters.mkString(", ") }](${ valueParameterTypes.mkString(", ") }): $returnType"
val prettyMismatchedFunctionSignatures = functions.map(x => s" $x").mkString("\\n")
fatal(
s"No function found with the signature $prettyFunctionSignature.\\n" +
s"However, there are other functions with that name:\\n$prettyMismatchedFunctionSignatures")
case Seq(f) => f
case _ => fatal(s"Multiple functions found that satisfy $name(${ valueParameterTypes.mkString(", ") }).")
}
}
}
def lookupIR(
name: String,
returnType: Type,
typeParameters: Seq[Type],
valueParameterTypes: Seq[Type]
): Option[(IRFunctionSignature, IRFunctionImplementation)] = {
irRegistry.getOrElse(name, Map.empty).filter { case ((typeParametersFound: Seq[Type], valueParameterTypesFound: Seq[Type], _, _), _) =>
typeParametersFound.length == typeParameters.length && {
typeParametersFound.foreach(_.clear())
(typeParametersFound, typeParameters).zipped.forall(_.unify(_))
} && valueParameterTypesFound.length == valueParameterTypes.length && {
valueParameterTypesFound.foreach(_.clear())
(valueParameterTypesFound, valueParameterTypes).zipped.forall(_.unify(_))
}
}.toSeq match {
case Seq() => None
case Seq(kv) => Some(kv)
case _ => fatal(s"Multiple functions found that satisfy $name(${valueParameterTypes.mkString(",")}).")
}
}
def lookupSeeded(name: String, seed: Long, returnType: Type, arguments: Seq[Type]): Option[(Seq[IR]) => IR] = {
lookupFunction(name, returnType, Array.empty[Type], arguments)
.filter(_.isInstanceOf[SeededJVMFunction])
.map { case f: SeededJVMFunction =>
(irArguments: Seq[IR]) => ApplySeeded(name, irArguments, seed, f.returnType.subst())
}
}
def lookupUnseeded(name: String, returnType: Type, arguments: Seq[Type]): Option[IRFunctionImplementation] =
lookupUnseeded(name, returnType, Array.empty[Type], arguments)
def lookupUnseeded(name: String, returnType: Type, typeParameters: Seq[Type], arguments: Seq[Type]): Option[IRFunctionImplementation] = {
val validIR: Option[IRFunctionImplementation] = lookupIR(name, returnType, typeParameters, arguments).map {
case ((_, _, _, inline), conversion) => (typeParametersPassed, args, errorID) =>
val x = ApplyIR(name, typeParametersPassed, args, errorID)
x.conversion = conversion
x.inline = inline
x
}
val validMethods = lookupFunction(name, returnType, typeParameters, arguments)
.filter(!_.isInstanceOf[SeededJVMFunction]).map { f =>
{ (irValueParametersTypes: Seq[Type], irArguments: Seq[IR], errorID: Int) =>
f match {
case _: UnseededMissingnessObliviousJVMFunction =>
Apply(name, irValueParametersTypes, irArguments, f.returnType.subst(), errorID)
case _: UnseededMissingnessAwareJVMFunction =>
ApplySpecial(name, irValueParametersTypes, irArguments, f.returnType.subst(), errorID)
}
}
}
(validIR, validMethods) match {
case (None , None) => None
case (None , Some(x)) => Some(x)
case (Some(x), None) => Some(x)
case _ => fatal(s"Multiple methods found that satisfy $name(${ arguments.mkString(",") }).")
}
}
Seq(
ArrayFunctions,
NDArrayFunctions,
CallFunctions,
DictFunctions,
GenotypeFunctions,
IntervalFunctions,
LocusFunctions,
MathFunctions,
RandomSeededFunctions,
SetFunctions,
StringFunctions,
UtilFunctions,
ExperimentalFunctions,
ReferenceGenomeFunctions
).foreach(_.registerAll())
def dumpFunctions(): Unit = {
def dtype(t: Type): String = s"""dtype("${ StringEscapeUtils.escapeString(t.toString) }\\")"""
irRegistry.foreach { case (name, fns) =>
fns.foreach { case ((typeParameters, valueParameterTypes, returnType, _), f) =>
println(s"""register_function("${ StringEscapeUtils.escapeString(name) }", (${ typeParameters.map(dtype).mkString(",") }), (${ valueParameterTypes.map(dtype).mkString(",") }), ${ dtype(returnType) })""")
}
}
jvmRegistry.foreach { case (name, fns) =>
fns.foreach { f =>
println(s"""${
if (f.isInstanceOf[SeededJVMFunction])
"register_seeded_function"
else
"register_function"
}("${ StringEscapeUtils.escapeString(name) }", (${ f.typeParameters.map(dtype).mkString(",") }), (${ f.valueParameterTypes.map(dtype).mkString(",") }), ${ dtype(f.returnType) })""")
}
}
}
}
object RegistryHelpers {
def stupidUnwrapStruct(r: Region, value: Row, ptype: PType): Long = {
assert(value != null)
ptype.unstagedStoreJavaObject(value, r)
}
}
abstract class RegistryFunctions {
def registerAll(): Unit
private val boxes = mutable.Map[String, Box[Type]]()
def tv(name: String): TVariable =
TVariable(name)
def tv(name: String, cond: String): TVariable =
TVariable(name, cond)
def tnum(name: String): TVariable =
tv(name, "numeric")
def boxedTypeInfo(t: Type): TypeInfo[_ >: Null] = t match {
case TBoolean => classInfo[java.lang.Boolean]
case TInt32 => classInfo[java.lang.Integer]
case TInt64 => classInfo[java.lang.Long]
case TFloat32 => classInfo[java.lang.Float]
case TFloat64 => classInfo[java.lang.Double]
case TCall => classInfo[java.lang.Integer]
case TString => classInfo[java.lang.String]
case _: TLocus => classInfo[Locus]
case _ => classInfo[AnyRef]
}
def svalueToJavaValue(cb: EmitCodeBuilder, r: Value[Region], sc: SValue): Value[AnyRef] = {
sc.st match {
case SInt32 => cb.memoize(Code.boxInt(sc.asInt32.value))
case SInt64 => cb.memoize(Code.boxLong(sc.asInt64.value))
case SFloat32 => cb.memoize(Code.boxFloat(sc.asFloat32.value))
case SFloat64 => cb.memoize(Code.boxDouble(sc.asFloat64.value))
case SBoolean => cb.memoize(Code.boxBoolean(sc.asBoolean.value))
case _: SCall => cb.memoize(Code.boxInt(sc.asCall.canonicalCall(cb)))
case _: SString => sc.asString.loadString(cb)
case _: SLocus => sc.asLocus.getLocusObj(cb)
case t =>
val pt = PType.canonical(t.storageType())
val addr = pt.store(cb, r, sc, deepCopy = false)
cb.memoize(Code.invokeScalaObject3[PType, Region, Long, AnyRef](
UnsafeRow.getClass, "readAnyRef",
cb.emb.getPType(pt),
r, addr))
}
}
def unwrapReturn(cb: EmitCodeBuilder, r: Value[Region], st: SType, value: Code[_]): SValue = st.virtualType match {
case TBoolean => primitive(cb.memoize(coerce[Boolean](value)))
case TInt32 => primitive(cb.memoize(coerce[Int](value)))
case TInt64 => primitive(cb.memoize(coerce[Long](value)))
case TFloat32 => primitive(cb.memoize(coerce[Float](value)))
case TFloat64 => primitive(cb.memoize(coerce[Double](value)))
case TString =>
val sst = st.asInstanceOf[SJavaString.type]
sst.constructFromString(cb, r, coerce[String](value))
case TCall =>
assert(st == SCanonicalCall)
new SCanonicalCallValue(cb.memoize(coerce[Int](value)))
case TArray(TInt32) =>
val ast = st.asInstanceOf[SIndexablePointer]
val pca = ast.pType.asInstanceOf[PCanonicalArray]
val arr = cb.newLocal[IndexedSeq[Int]]("unrwrap_return_array_int32_arr", coerce[IndexedSeq[Int]](value))
val len = cb.newLocal[Int]("unwrap_return_array_int32_len", arr.invoke[Int]("length"))
pca.constructFromElements(cb, r, len, deepCopy = false) { (cb, idx) =>
val elt = cb.newLocal[java.lang.Integer]("unwrap_return_array_int32_elt",
Code.checkcast[java.lang.Integer](arr.invoke[Int, java.lang.Object]("apply", idx)))
IEmitCode(cb, elt.isNull, primitive(cb.memoize(elt.invoke[Int]("intValue"))))
}
case TArray(TFloat64) =>
val ast = st.asInstanceOf[SIndexablePointer]
val pca = ast.pType.asInstanceOf[PCanonicalArray]
val arr = cb.newLocal[IndexedSeq[Double]]("unrwrap_return_array_float64_arr", coerce[IndexedSeq[Double]](value))
val len = cb.newLocal[Int]("unwrap_return_array_float64_len", arr.invoke[Int]("length"))
pca.constructFromElements(cb, r, len, deepCopy = false) { (cb, idx) =>
val elt = cb.newLocal[java.lang.Double]("unwrap_return_array_float64_elt",
Code.checkcast[java.lang.Double](arr.invoke[Int, java.lang.Object]("apply", idx)))
IEmitCode(cb, elt.isNull, primitive(cb.memoize(elt.invoke[Double]("doubleValue"))))
}
case TArray(TString) =>
val ast = st.asInstanceOf[SJavaArrayString]
ast.construct(cb, coerce[Array[String]](value))
case t: TBaseStruct =>
val sst = st.asInstanceOf[SBaseStructPointer]
val pt = sst.pType.asInstanceOf[PCanonicalBaseStruct]
val addr = cb.memoize(Code.invokeScalaObject3[Region, Row, PType, Long](
RegistryHelpers.getClass, "stupidUnwrapStruct", r.region, coerce[Row](value), cb.emb.ecb.getPType(pt)))
new SBaseStructPointerValue(SBaseStructPointer(pt.setRequired(false).asInstanceOf[PBaseStruct]), addr)
}
def registerSCode(
name: String,
valueParameterTypes: Array[Type],
returnType: Type,
calculateReturnType: (Type, Seq[SType]) => SType,
typeParameters: Array[Type] = Array.empty
)(
impl: (EmitRegion, EmitCodeBuilder, Seq[Type], SType, Array[SValue], Value[Int]) => SValue
) {
IRFunctionRegistry.addJVMFunction(
new UnseededMissingnessObliviousJVMFunction(name, typeParameters, valueParameterTypes, returnType, calculateReturnType) {
override def apply(r: EmitRegion, cb: EmitCodeBuilder, returnSType: SType, typeParameters: Seq[Type], errorID: Value[Int], args: SValue*): SValue =
impl(r, cb, typeParameters, returnSType, args.toArray, errorID)
})
}
def registerCode(
name: String,
valueParameterTypes: Array[Type],
returnType: Type,
calculateReturnType: (Type, Seq[SType]) => SType,
typeParameters: Array[Type] = Array.empty
)(
impl: (EmitRegion, EmitCodeBuilder, SType, Array[Type], Array[SValue]) => Value[_]
) {
IRFunctionRegistry.addJVMFunction(
new UnseededMissingnessObliviousJVMFunction(name, typeParameters, valueParameterTypes, returnType, calculateReturnType) {
override def apply(r: EmitRegion, cb: EmitCodeBuilder, returnSType: SType, typeParameters: Seq[Type], errorID: Value[Int], args: SValue*): SValue = {
assert(unify(typeParameters, args.map(_.st.virtualType), returnSType.virtualType))
val returnValue = impl(r, cb, returnSType, typeParameters.toArray, args.toArray)
returnSType.fromValues(FastIndexedSeq(returnValue))
}
})
}
def registerEmitCode(
name: String,
valueParameterTypes: Array[Type],
returnType: Type,
calculateReturnType: (Type, Seq[EmitType]) => EmitType,
typeParameters: Array[Type] = Array.empty
)(
impl: (EmitRegion, SType, Value[Int], Array[EmitCode]) => EmitCode
) {
IRFunctionRegistry.addJVMFunction(
new UnseededMissingnessAwareJVMFunction(name, typeParameters, valueParameterTypes, returnType, calculateReturnType) {
override def apply(r: EmitRegion, rpt: SType, typeParameters: Seq[Type], errorID: Value[Int], args: EmitCode*): EmitCode = {
assert(unify(typeParameters, args.map(_.st.virtualType), rpt.virtualType))
impl(r, rpt, errorID, args.toArray)
}
})
}
def registerIEmitCode(
name: String,
valueParameterTypes: Array[Type],
returnType: Type,
calculateReturnType: (Type, Seq[EmitType]) => EmitType,
typeParameters: Array[Type] = Array.empty
)(
impl: (EmitCodeBuilder, Value[Region], SType , Value[Int], Array[EmitCode]) => IEmitCode
) {
IRFunctionRegistry.addJVMFunction(
new UnseededMissingnessAwareJVMFunction(name, typeParameters, valueParameterTypes, returnType, calculateReturnType) {
override def apply(
cb: EmitCodeBuilder,
r: Value[Region],
rpt: SType,
typeParameters: Seq[Type],
errorID: Value[Int],
args: EmitCode*
): IEmitCode = {
val res = impl(cb, r, rpt, errorID, args.toArray)
if (res.emitType != calculateReturnType(rpt.virtualType, args.map(_.emitType)))
throw new RuntimeException(s"type mismatch while registering $name" +
s"\\n got ${ res.emitType }, got ${ calculateReturnType(rpt.virtualType, args.map(_.emitType)) }")
res
}
override def apply(r: EmitRegion, rpt: SType, typeParameters: Seq[Type], errorID: Value[Int], args: EmitCode*): EmitCode = {
EmitCode.fromI(r.mb) { cb =>
apply(cb, r.region, rpt, typeParameters, errorID, args: _*)
}
}
})
}
def registerScalaFunction(
name: String,
valueParameterTypes: Array[Type],
returnType: Type,
calculateReturnType: (Type, Seq[SType]) => SType
)(
cls: Class[_],
method: String
) {
registerSCode(name, valueParameterTypes, returnType, calculateReturnType) { case (r, cb, _, rt, args, _) =>
val cts = valueParameterTypes.map(PrimitiveTypeToIRIntermediateClassTag(_).runtimeClass)
val returnValue = cb.memoizeAny(
Code.invokeScalaObject(cls, method, cts, args.map { a => SType.extractPrimValue(cb, a).get })(PrimitiveTypeToIRIntermediateClassTag(returnType)),
rt.settableTupleTypes()(0))
rt.fromValues(FastIndexedSeq(returnValue))
}
}
def registerWrappedScalaFunction(
name: String,
valueParameterTypes: Array[Type],
returnType: Type,
calculateReturnType: (Type, Seq[SType]) => SType
)(
cls: Class[_],
method: String
) {
def ct(typ: Type): ClassTag[_] = typ match {
case TString => classTag[String]
case TArray(TInt32) => classTag[IndexedSeq[Int]]
case TArray(TFloat64) => classTag[IndexedSeq[Double]]
case TArray(TString) => classTag[Array[String]]
case TSet(TString) => classTag[Set[String]]
case TDict(TString, TString) => classTag[Map[String, String]]
case TCall => classTag[Int]
case t => PrimitiveTypeToIRIntermediateClassTag(t)
}
def wrap(cb: EmitCodeBuilder, r: Value[Region], code: SValue): Value[_] = code.st.virtualType match {
case t if t.isPrimitive => SType.extractPrimValue(cb, code)
case TCall => code.asCall.canonicalCall(cb)
case TArray(TString) => code.st match {
case _: SJavaArrayString => cb.memoize(code.asInstanceOf[SJavaArrayStringValue].array)
case _ =>
val sv = code.asIndexable
val arr = cb.newLocal[Array[String]]("scode_array_string", Code.newArray[String](sv.loadLength()))
sv.forEachDefined(cb) { case (cb, idx, elt) =>
cb += (arr(idx) = elt.asString.loadString(cb))
}
arr
}
case _ => svalueToJavaValue(cb, r, code)
}
registerSCode(name, valueParameterTypes, returnType, calculateReturnType) { case (r, cb, _, rt, args, _) =>
val cts = valueParameterTypes.map(ct(_).runtimeClass)
try {
unwrapReturn(cb, r.region, rt,
Code.invokeScalaObject(cls, method, cts, args.map { a => wrap(cb, r.region, a).get })(ct(returnType)))
} catch {
case e: Throwable => throw new RuntimeException(s"error while registering function $name", e)
}
}
}
def registerWrappedScalaFunction1(name: String, a1: Type, returnType: Type, pt: (Type, SType) => SType)(cls: Class[_], method: String): Unit =
registerWrappedScalaFunction(name, Array(a1), returnType, unwrappedApply(pt))(cls, method)
def registerWrappedScalaFunction2(name: String, a1: Type, a2: Type, returnType: Type, pt: (Type, SType, SType) => SType)(cls: Class[_], method: String): Unit =
registerWrappedScalaFunction(name, Array(a1, a2), returnType, unwrappedApply(pt))(cls, method)
def registerWrappedScalaFunction3(name: String, a1: Type, a2: Type, a3: Type, returnType: Type,
pt: (Type, SType, SType, SType) => SType)(cls: Class[_], method: String): Unit =
registerWrappedScalaFunction(name, Array(a1, a2, a3), returnType, unwrappedApply(pt))(cls, method)
def registerJavaStaticFunction(name: String, valueParameterTypes: Array[Type], returnType: Type, pt: (Type, Seq[SType]) => SType)(cls: Class[_], method: String) {
registerCode(name, valueParameterTypes, returnType, pt) { case (r, cb, rt, _, args) =>
val cts = valueParameterTypes.map(PrimitiveTypeToIRIntermediateClassTag(_).runtimeClass)
val ct = PrimitiveTypeToIRIntermediateClassTag(returnType)
cb.memoizeAny(
Code.invokeStatic(cls, method, cts, args.map(a => SType.extractPrimValue(cb, a).get))(ct),
typeInfoFromClassTag(ct))
}
}
def registerIR(name: String, valueParameterTypes: Array[Type], returnType: Type, inline: Boolean = false, typeParameters: Array[Type] = Array.empty)(f: (Seq[Type], Seq[IR], Int) => IR): Unit =
IRFunctionRegistry.addIR(name, typeParameters, valueParameterTypes, returnType, inline, f)
def registerSCode1(name: String, mt1: Type, rt: Type, pt: (Type, SType) => SType)(impl: (EmitRegion, EmitCodeBuilder, SType, SValue, Value[Int]) => SValue): Unit =
registerSCode(name, Array(mt1), rt, unwrappedApply(pt)) {
case (r, cb, _, rt, Array(a1), errorID) => impl(r, cb, rt, a1, errorID)
}
def registerSCode1t(name: String, typeParams: Array[Type], mt1: Type, rt: Type, pt: (Type, SType) => SType)(impl: (EmitRegion, EmitCodeBuilder, Seq[Type], SType, SValue, Value[Int]) => SValue): Unit =
registerSCode(name, Array(mt1), rt, unwrappedApply(pt), typeParameters = typeParams) {
case (r, cb, typeParams, rt, Array(a1), errorID) => impl(r, cb, typeParams, rt, a1, errorID)
}
def registerSCode2(name: String, mt1: Type, mt2: Type, rt: Type, pt: (Type, SType, SType) => SType)
(impl: (EmitRegion, EmitCodeBuilder, SType, SValue, SValue, Value[Int]) => SValue): Unit =
registerSCode(name, Array(mt1, mt2), rt, unwrappedApply(pt)) {
case (r, cb, _, rt, Array(a1, a2) , errorID) => impl(r, cb, rt, a1, a2, errorID)
}
def registerSCode2t(name: String, typeParams: Array[Type], mt1: Type, mt2: Type, rt: Type, pt: (Type, SType, SType) => SType)
(impl: (EmitRegion, EmitCodeBuilder, Seq[Type], SType, SValue, SValue, Value[Int]) => SValue): Unit =
registerSCode(name, Array(mt1, mt2), rt, unwrappedApply(pt), typeParameters = typeParams) {
case (r, cb, typeParams, rt, Array(a1, a2), errorID) => impl(r, cb, typeParams, rt, a1, a2, errorID)
}
def registerSCode3(name: String, mt1: Type, mt2: Type, mt3: Type, rt: Type, pt: (Type, SType, SType, SType) => SType)
(impl: (EmitRegion, EmitCodeBuilder, SType, SValue, SValue, SValue, Value[Int]) => SValue): Unit =
registerSCode(name, Array(mt1, mt2, mt3), rt, unwrappedApply(pt)) {
case (r, cb, _, rt, Array(a1, a2, a3), errorID) => impl(r, cb, rt, a1, a2, a3, errorID)
}
def registerSCode4(name: String, mt1: Type, mt2: Type, mt3: Type, mt4: Type, rt: Type, pt: (Type, SType, SType, SType, SType) => SType)
(impl: (EmitRegion, EmitCodeBuilder, SType, SValue, SValue, SValue, SValue, Value[Int]) => SValue): Unit =
registerSCode(name, Array(mt1, mt2, mt3, mt4), rt, unwrappedApply(pt)) {
case (r, cb, _, rt, Array(a1, a2, a3, a4), errorID) => impl(r, cb, rt, a1, a2, a3, a4, errorID)
}
def registerSCode4t(name: String, typeParams: Array[Type], mt1: Type, mt2: Type, mt3: Type, mt4: Type, rt: Type,
pt: (Type, SType, SType, SType, SType) => SType)
(impl: (EmitRegion, EmitCodeBuilder, Seq[Type], SType, SValue, SValue, SValue, SValue, Value[Int]) => SValue): Unit =
registerSCode(name, Array(mt1, mt2, mt3, mt4), rt, unwrappedApply(pt), typeParams) {
case (r, cb, typeParams, rt, Array(a1, a2, a3, a4), errorID) => impl(r, cb, typeParams, rt, a1, a2, a3, a4, errorID)
}
def registerSCode5(name: String, mt1: Type, mt2: Type, mt3: Type, mt4: Type, mt5: Type, rt: Type, pt: (Type, SType, SType, SType, SType, SType) => SType)
(impl: (EmitRegion, EmitCodeBuilder, SType, SValue, SValue, SValue, SValue, SValue, Value[Int]) => SValue): Unit =
registerSCode(name, Array(mt1, mt2, mt3, mt4, mt5), rt, unwrappedApply(pt)) {
case (r, cb, _, rt, Array(a1, a2, a3, a4, a5), errorID) => impl(r, cb, rt, a1, a2, a3, a4, a5, errorID)
}
def registerCode1(name: String, mt1: Type, rt: Type, pt: (Type, SType) => SType)(impl: (EmitCodeBuilder, EmitRegion, SType, SValue) => Value[_]): Unit =
registerCode(name, Array(mt1), rt, unwrappedApply(pt)) {
case (r, cb, rt, _, Array(a1)) => impl(cb, r, rt, a1)
}
def registerCode2(name: String, mt1: Type, mt2: Type, rt: Type, pt: (Type, SType, SType) => SType)
(impl: (EmitCodeBuilder, EmitRegion, SType, SValue, SValue) => Value[_]): Unit =
registerCode(name, Array(mt1, mt2), rt, unwrappedApply(pt)) {
case (r, cb, rt, _, Array(a1, a2)) => impl(cb, r, rt, a1, a2)
}
def registerIEmitCode1(name: String, mt1: Type, rt: Type, pt: (Type, EmitType) => EmitType)
(impl: (EmitCodeBuilder, Value[Region], SType, Value[Int], EmitCode) => IEmitCode): Unit =
registerIEmitCode(name, Array(mt1), rt, unwrappedApply(pt)) { case (cb, r, rt, errorID, Array(a1)) =>
impl(cb, r, rt, errorID, a1)
}
def registerIEmitCode2(name: String, mt1: Type, mt2: Type, rt: Type, pt: (Type, EmitType, EmitType) => EmitType)
(impl: (EmitCodeBuilder, Value[Region], SType, Value[Int], EmitCode, EmitCode) => IEmitCode): Unit =
registerIEmitCode(name, Array(mt1, mt2), rt, unwrappedApply(pt)) { case (cb, r, rt, errorID, Array(a1, a2)) =>
impl(cb, r, rt, errorID, a1, a2)
}
def registerIEmitCode4(name: String, mt1: Type, mt2: Type, mt3: Type, mt4: Type, rt: Type, pt: (Type, EmitType, EmitType, EmitType, EmitType) => EmitType)
(impl: (EmitCodeBuilder, Value[Region], SType, Value[Int], EmitCode, EmitCode, EmitCode, EmitCode) => IEmitCode): Unit =
registerIEmitCode(name, Array(mt1, mt2, mt3, mt4), rt, unwrappedApply(pt)) { case (cb, r, rt, errorID, Array(a1, a2, a3, a4)) =>
impl(cb, r, rt, errorID, a1, a2, a3, a4)
}
def registerIEmitCode6(name: String, mt1: Type, mt2: Type, mt3: Type, mt4: Type, mt5: Type, mt6: Type, rt: Type, pt: (Type, EmitType, EmitType, EmitType, EmitType, EmitType, EmitType) => EmitType)
(impl: (EmitCodeBuilder, Value[Region], SType, Value[Int], EmitCode, EmitCode, EmitCode, EmitCode, EmitCode, EmitCode) => IEmitCode): Unit =
registerIEmitCode(name, Array(mt1, mt2, mt3, mt4, mt5, mt6), rt, unwrappedApply(pt)) { case (cb, r, rt, errorID, Array(a1, a2, a3, a4, a5, a6)) =>
impl(cb, r, rt, errorID, a1, a2, a3, a4, a5, a6)
}
def registerEmitCode2(name: String, mt1: Type, mt2: Type, rt: Type, pt: (Type, EmitType, EmitType) => EmitType)
(impl: (EmitRegion, SType, Value[Int], EmitCode, EmitCode) => EmitCode): Unit =
registerEmitCode(name, Array(mt1, mt2), rt, unwrappedApply(pt)) { case (r, rt, errorID, Array(a1, a2)) => impl(r, rt, errorID, a1, a2) }
def registerIR1(name: String, mt1: Type, returnType: Type, typeParameters: Array[Type] = Array.empty)(f: (Seq[Type], IR, Int) => IR): Unit =
registerIR(name, Array(mt1), returnType, typeParameters = typeParameters) { case (t, Seq(a1), errorID) => f(t, a1, errorID) }
def registerIR2(name: String, mt1: Type, mt2: Type, returnType: Type, typeParameters: Array[Type] = Array.empty)(f: (Seq[Type], IR, IR, Int) => IR): Unit =
registerIR(name, Array(mt1, mt2), returnType, typeParameters = typeParameters) { case (t, Seq(a1, a2), errorID) => f(t, a1, a2, errorID) }
def registerIR3(name: String, mt1: Type, mt2: Type, mt3: Type, returnType: Type, typeParameters: Array[Type] = Array.empty)(f: (Seq[Type], IR, IR, IR, Int) => IR): Unit =
registerIR(name, Array(mt1, mt2, mt3), returnType, typeParameters = typeParameters) { case (t, Seq(a1, a2, a3), errorID) => f(t, a1, a2, a3, errorID) }
def registerIR4(name: String, mt1: Type, mt2: Type, mt3: Type, mt4: Type, returnType: Type, typeParameters: Array[Type] = Array.empty)(f: (Seq[Type], IR, IR, IR, IR, Int) => IR): Unit =
registerIR(name, Array(mt1, mt2, mt3, mt4), returnType, typeParameters = typeParameters) { case (t, Seq(a1, a2, a3, a4), errorID) => f(t, a1, a2, a3, a4, errorID) }
def registerSeeded(
name: String,
valueParameterTypes: Array[Type],
returnType: Type,
computeReturnType: (Type, Seq[SType]) => SType
)(
impl: (EmitCodeBuilder, Value[Region], SType, Long, Array[SValue]) => SValue
) {
IRFunctionRegistry.addJVMFunction(
new SeededMissingnessObliviousJVMFunction(name, valueParameterTypes, returnType, computeReturnType) {
val isDeterministic: Boolean = false
def applySeeded(cb: EmitCodeBuilder, seed: Long, r: Value[Region], rpt: SType, args: SValue*): SValue = {
assert(unify(Array.empty[Type], args.map(_.st.virtualType), rpt.virtualType))
impl(cb, r, rpt, seed, args.toArray)
}
def applySeededI(seed: Long, cb: EmitCodeBuilder, r: Value[Region], rpt: SType, args: EmitCode*): IEmitCode = {
IEmitCode.multiMapEmitCodes(cb, args.toFastIndexedSeq) {
argPCs => applySeeded(cb, seed, r, rpt, argPCs: _*)
}
}
override val isStrict: Boolean = true
})
}
def registerSeeded0(name: String, returnType: Type, pt: SType)(impl: (EmitCodeBuilder, Value[Region], SType, Long) => SValue): Unit =
registerSeeded(name, Array[Type](), returnType, if (pt == null) null else (_: Type, _: Seq[SType]) => pt) { case (cb, r, rt, seed, _) => impl(cb, r, rt, seed) }
def registerSeeded1(name: String, arg1: Type, returnType: Type, pt: (Type, SType) => SType)(impl: (EmitCodeBuilder, Value[Region], SType, Long, SValue) => SValue): Unit =
registerSeeded(name, Array(arg1), returnType, unwrappedApply(pt)) {
case (cb, r, rt, seed, Array(a1)) => impl(cb, r, rt, seed, a1)
}
def registerSeeded2(name: String, arg1: Type, arg2: Type, returnType: Type, pt: (Type, SType, SType) => SType)
(impl: (EmitCodeBuilder, Value[Region], SType, Long, SValue, SValue) => SValue): Unit =
registerSeeded(name, Array(arg1, arg2), returnType, unwrappedApply(pt)) { case
(cb, r, rt, seed, Array(a1, a2)) =>
impl(cb, r, rt, seed, a1, a2)
}
def registerSeeded3(name: String, arg1: Type, arg2: Type, arg3: Type, returnType: Type, pt: (Type, SType, SType, SType) => SType)
(impl: (EmitCodeBuilder, Value[Region], SType, Long, SValue, SValue, SValue) => SValue): Unit =
registerSeeded(name, Array(arg1, arg2, arg3), returnType, unwrappedApply(pt)) {
case (cb, r, rt, seed, Array(a1, a2, a3)) => impl(cb, r, rt, seed, a1, a2, a3)
}
def registerSeeded4(name: String, arg1: Type, arg2: Type, arg3: Type, arg4: Type, returnType: Type, pt: (Type, SType, SType, SType, SType) => SType)
(impl: (EmitCodeBuilder, Value[Region], SType, Long, SValue, SValue, SValue, SValue) => SValue): Unit =
registerSeeded(name, Array(arg1, arg2, arg3, arg4), returnType, unwrappedApply(pt)) {
case (cb, r, rt, seed, Array(a1, a2, a3, a4)) => impl(cb, r, rt, seed, a1, a2, a3, a4)
}
}
sealed abstract class JVMFunction {
def name: String
def typeParameters: Seq[Type]
def valueParameterTypes: Seq[Type]
def returnType: Type
def computeReturnEmitType(returnType: Type, valueParameterTypes: Seq[EmitType]): EmitType
def apply(mb: EmitRegion, returnType: SType, typeParameters: Seq[Type], errorID: Value[Int], args: EmitCode*): EmitCode
override def toString: String = s"$name[${ typeParameters.mkString(", ") }](${ valueParameterTypes.mkString(", ") }): $returnType"
def unify(typeArguments: Seq[Type], valueArgumentTypes: Seq[Type], returnTypeIn: Type): Boolean = {
val concrete = (typeArguments ++ valueArgumentTypes) :+ returnTypeIn
val types = (typeParameters ++ valueParameterTypes) :+ returnType
types.length == concrete.length && {
types.foreach(_.clear())
types.zip(concrete).forall { case (i, j) => i.unify(j) }
}
}
}
object MissingnessObliviousJVMFunction {
def returnSType(computeStrictReturnEmitType: (Type, Seq[SType]) => SType)(returnType: Type, valueParameterTypes: Seq[SType]): SType = {
if (computeStrictReturnEmitType == null)
SType.canonical(returnType)
else
computeStrictReturnEmitType(returnType, valueParameterTypes)
}
}
abstract class UnseededMissingnessObliviousJVMFunction (
override val name: String,
override val typeParameters: Seq[Type],
override val valueParameterTypes: Seq[Type],
override val returnType: Type,
missingnessObliviousComputeReturnType: (Type, Seq[SType]) => SType
) extends JVMFunction {
override def computeReturnEmitType(returnType: Type, valueParameterTypes: Seq[EmitType]): EmitType = {
EmitType(computeStrictReturnEmitType(returnType, valueParameterTypes.map(_.st)), valueParameterTypes.forall(_.required))
}
def computeStrictReturnEmitType(returnType: Type, valueParameterTypes: Seq[SType]): SType =
MissingnessObliviousJVMFunction.returnSType(missingnessObliviousComputeReturnType)(returnType, valueParameterTypes)
def apply(r: EmitRegion, cb: EmitCodeBuilder, returnSType: SType, typeParameters: Seq[Type], errorID: Value[Int], args: SValue*): SValue
def apply(r: EmitRegion, returnType: SType, typeParameters: Seq[Type], errorID: Value[Int], args: EmitCode*): EmitCode = {
EmitCode.fromI(r.mb)(cb => IEmitCode.multiMapEmitCodes(cb, args.toFastIndexedSeq) { args =>
apply(r, cb, returnType, typeParameters, errorID, args: _*)
})
}
def getAsMethod[C](cb: EmitClassBuilder[C], rpt: SType, typeParameters: Seq[Type], args: SType*): EmitMethodBuilder[C] = {
val unified = unify(typeParameters, args.map(_.virtualType), rpt.virtualType)
assert(unified, name)
val methodbuilder = cb.genEmitMethod(name, FastIndexedSeq[ParamType](typeInfo[Region], typeInfo[Int]) ++ args.map(_.paramType), rpt.paramType)
methodbuilder.emitSCode(cb => apply(EmitRegion.default(methodbuilder),
cb,
rpt,
typeParameters,
methodbuilder.getCodeParam[Int](2),
(0 until args.length).map(i => methodbuilder.getSCodeParam(i + 3)): _*))
methodbuilder
}
}
object MissingnessAwareJVMFunction {
def returnSType(calculateReturnType: (Type, Seq[EmitType]) => EmitType)(returnType: Type, valueParameterTypes: Seq[EmitType]): EmitType =
if (calculateReturnType == null) EmitType(SType.canonical(returnType), false)
else calculateReturnType(returnType, valueParameterTypes)
}
abstract class UnseededMissingnessAwareJVMFunction (
override val name: String,
override val typeParameters: Seq[Type],
override val valueParameterTypes: Seq[Type],
override val returnType: Type,
missingnessAwareComputeReturnSType: (Type, Seq[EmitType]) => EmitType
) extends JVMFunction {
override def computeReturnEmitType(returnType: Type, valueParameterTypes: Seq[EmitType]): EmitType =
MissingnessAwareJVMFunction.returnSType(missingnessAwareComputeReturnSType)(returnType, valueParameterTypes)
def apply(cb: EmitCodeBuilder,
r: Value[Region],
rpt: SType,
typeParameters: Seq[Type],
errorID: Value[Int],
args: EmitCode*
): IEmitCode = {
???
}
}
abstract class SeededJVMFunction (
override val name: String,
override val valueParameterTypes: Seq[Type],
override val returnType: Type
) extends JVMFunction {
def typeParameters: Seq[Type] = Seq.empty[Type]
private[this] var seed: Long = _
def setSeed(s: Long): Unit = { seed = s }
def applySeededI(seed: Long, cb: EmitCodeBuilder, region: Value[Region], rpt: SType, args: EmitCode*): IEmitCode
def apply(region: EmitRegion, rpt: SType, typeParameters: Seq[Type], errorID: Value[Int], args: EmitCode*): EmitCode =
fatal("seeded functions must go through IEmitCode path")
def apply(region: EmitRegion, rpt: SType, args: EmitCode*): EmitCode =
fatal("seeded functions must go through IEmitCode path")
def isStrict: Boolean = false
}
abstract class SeededMissingnessObliviousJVMFunction (
override val name: String,
override val valueParameterTypes: Seq[Type],
override val returnType: Type,
missingnessObliviousreturnSType: (Type, Seq[SType]) => SType
) extends SeededJVMFunction(name, valueParameterTypes, returnType) {
override def computeReturnEmitType(returnType: Type, valueParameterTypes: Seq[EmitType]): EmitType = {
EmitType(computeStrictReturnEmitType(returnType, valueParameterTypes.map(_.st)), valueParameterTypes.forall(_.required))
}
def computeStrictReturnEmitType(returnType: Type, valueParameterTypes: Seq[SType]): SType =
MissingnessObliviousJVMFunction.returnSType(missingnessObliviousreturnSType)(returnType, valueParameterTypes)
}
abstract class SeededMissingnessAwareJVMFunction (
override val name: String,
override val valueParameterTypes: Seq[Type],
override val returnType: Type,
missingnessAwarereturnSType: (Type, Seq[EmitType]) => EmitType
) extends SeededJVMFunction(name, valueParameterTypes, returnType) {
override def computeReturnEmitType(returnType: Type, valueParameterTypes: Seq[EmitType]): EmitType =
MissingnessAwareJVMFunction.returnSType(missingnessAwarereturnSType)(returnType, valueParameterTypes)
}
| hail-is/hail | hail/src/main/scala/is/hail/expr/ir/functions/Functions.scala | Scala | mit | 37,580 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package expr
import com.intellij.lang.ASTNode
import org.jetbrains.plugins.scala.lang.psi.api.expr._
/**
* @author Alexander Podkhalyuzin
* Date: 07.03.2008
*/
class ScAnnotationExprImpl(node: ASTNode) extends ScalaPsiElementImpl (node) with ScAnnotationExpr{
override def toString: String = "AnnotationExpression"
} | LPTK/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScAnnotationExprImpl.scala | Scala | apache-2.0 | 395 |
package nl.rabobank.oss.rules.dsl.core
import nl.rabobank.oss.rules.dsl.core.TableSelectorGlossary._
import nl.rabobank.oss.rules.dsl.nl.grammar.DslTableSelector.prikken
import nl.rabobank.oss.rules.dsl.nl.grammar._
class TableSelectorBerekening extends {
} with Berekening (
Gegeven (altijd)
Bereken
ResultString is (prikken in TableFact met waarde(IndexX, IndexY)) en
ResultList is (prikken in TableFact met waardes(IndexXRange, IndexY))
)
| scala-rules/scala-rules | engine/src/test/scala/nl/rabobank/oss/rules/dsl/core/TableSelectorBerekening.scala | Scala | mit | 459 |
package org.ensime.indexer.lucene
import org.apache.lucene.document.Field._
import org.apache.lucene.document._
import org.scalatest.{ FunSpec, Matchers }
class LuceneSerializationSpec extends FunSpec with Matchers {
def thereAndBackAgain[T](t: T)(implicit p: DocumentProvider[T], r: DocumentRecovery[T]): Unit = {
val doc = p.toDocument(t)
val back = r.toEntity(doc)
assert(t === back)
}
case class SimpleThing(id: String, b: String) extends Entity
implicit object SimpleThingS extends EntityS[SimpleThing](classOf[SimpleThing]) {
def addFields(doc: Document, t: SimpleThing): Unit =
doc.add(new TextField("b", t.b, Store.YES))
def toEntity(doc: Document): SimpleThing =
SimpleThing(doc.get("ID"), doc.get("b"))
}
describe("Lucene Entity Serialisation") {
it("should serialise and deserialise a simple type") {
val t = SimpleThing("hello", "world")
thereAndBackAgain(t)
}
}
}
| eddsteel/ensime | core/src/test/scala/org/ensime/indexer/lucene/LuceneSerializationSpec.scala | Scala | gpl-3.0 | 949 |
package org.jetbrains.sbt.shell.sbt_latest
import org.jetbrains.sbt.shell.UseSbtTestRunTest
import org.junit.Ignore
/**
* Created by Roman.Shein on 13.04.2017.
*/
@Ignore
class UseSbtTestRunTest_latest extends UseSbtTestRunTest {
override def getPath: String = "sbt/shell/sbtTestRunTest"
}
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/sbt/shell/sbt_latest/UseSbtTestRunTest_latest.scala | Scala | apache-2.0 | 298 |
abstract class A {
override def toString(): String // crucial
def toString(sb: StringBuilder): StringBuilder // crucial
}
case class B() extends A {
// overloaded version is implemented, causing toString not to be implemented?
def toString(sb: StringBuilder): StringBuilder = sys.error("")
}
object Test extends dotty.runtime.LegacyApp {
Console.println(B)
}
| folone/dotty | tests/pending/run/t1042.scala | Scala | bsd-3-clause | 373 |
/*
* Copyright: Copyright (C) 2016, ATS Advanced Telematic Systems GmbH
* License: MPL-2.0
*/
package org.genivi.sota.core
import java.io.File
import java.net.URI
import org.genivi.sota.data.PackageId._
import org.genivi.sota.marshalling.CirceMarshallingSupport._
import io.circe.generic.auto._
import org.genivi.sota.core.data.{Package => DataPackage}
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.Uri.Path
import akka.http.scaladsl.testkit.ScalatestRouteTest
import akka.stream.scaladsl.FileIO
import akka.testkit.TestKitBase
import akka.util.ByteString
import cats.data.Xor
import io.circe.Json
import io.circe.generic.auto._
import org.genivi.sota.DefaultPatience
import org.genivi.sota.core.db.{BlacklistedPackages, Packages}
import org.genivi.sota.core.resolver.DefaultConnectivity
import org.genivi.sota.core.storage.PackageStorage.PackageStorageOp
import org.genivi.sota.core.storage.LocalPackageStore
import org.genivi.sota.core.transfer.DefaultUpdateNotifier
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{FunSuite, ShouldMatchers}
import org.genivi.sota.data.{Namespace, Namespaces, PackageId}
import org.genivi.sota.messaging.LocalMessageBus
import org.genivi.sota.messaging.Messages.PackageStorageUsage
import scala.concurrent.duration._
import scala.concurrent.Future
class PackagesResourceSpec extends FunSuite
with ScalatestRouteTest
with DatabaseSpec
with ShouldMatchers
with ScalaFutures
with LongRequestTimeout
with DefaultPatience
with Generators
with TestKitBase {
import org.genivi.sota.http.NamespaceDirectives._
val deviceRegistry = new FakeDeviceRegistry(Namespaces.defaultNs)
implicit val connectivity = DefaultConnectivity
lazy val updateService = new UpdateService(DefaultUpdateNotifier, deviceRegistry)
val service = new PackagesResource(updateService, db, LocalMessageBus.publisher(system), defaultNamespaceExtractor) {
override val packageStorageOp: PackageStorageOp = new LocalPackageStore().store _
}
val BasePath = Path("/packages")
val entity = HttpEntity(ByteString("Some Text"))
val multipartForm =
Multipart.FormData(Multipart.FormData.BodyPart.Strict(
"file",
entity,
Map("filename" -> "linux-lts.rpm")))
def readFile(uri: Uri): Future[ByteString] = {
FileIO.fromPath(new File(new URI(uri.toString())).toPath)
.runFold(ByteString.empty)(_ ++ _)
}
test("save packet to local file system") {
val url = Uri.Empty.withPath(BasePath / "linux-lts" / "4.5.0")
Put(url, multipartForm) ~> service.route ~> check {
status shouldBe StatusCodes.NoContent
Get("/packages") ~> service.route ~> check {
val dataPackage = responseAs[List[DataPackage]].headOption
dataPackage.map(_.id.name.get) should contain("linux-lts")
whenReady(readFile(dataPackage.get.uri)) { contents =>
contents shouldBe ByteString("Some Text")
}
}
}
}
test("returns packages for the request namespace only") {
val pkg = PackageGen.sample.get.copy(namespace = Namespace("not-the-default-ns"))
val dbF = db.run(Packages.create(pkg))
whenReady(dbF) { pkg =>
Get("/packages") ~> service.route ~> check {
status shouldBe StatusCodes.OK
val packages = responseAs[List[DataPackage]]
packages.map(_.id) shouldNot contain(pkg.id)
}
}
}
test("returns package blacklist info when searching blacklisted package") {
val pkg = PackageGen.sample.get
val dbF = for {
_ <- db.run(Packages.create(pkg))
_ <- BlacklistedPackages.create(pkg.namespace, pkg.id)
} yield pkg
whenReady(dbF) { pkg =>
Get("/packages") ~> service.route ~> check {
status shouldBe StatusCodes.OK
val responseP = responseAs[List[Json]]
.find { j =>
j.cursor.downField("id").get.as[PackageId] === Xor.right(pkg.id)
}
.map { pp =>
pp.cursor.downField("isBlackListed").get.as[Boolean].toOption.get
}
responseP should contain(true)
}
}
}
test("returns package blacklist info when searching non blacklisted package") {
val pkg = PackageGen.sample.get
val dbF = db.run(Packages.create(pkg))
whenReady(dbF) { pkg =>
Get("/packages") ~> service.route ~> check {
status shouldBe StatusCodes.OK
val responseP = responseAs[List[Json]]
.find { j =>
j.cursor.downField("id").get.as[PackageId] === Xor.right(pkg.id)
}
.map { pp =>
pp.cursor.downField("isBlackListed").get.as[Boolean].toOption.get
}
responseP should contain(false)
}
}
}
test("returns package blacklist info when returning a package") {
val pkg = PackageGen.sample.get
val dbF = for {
_ <- db.run(Packages.create(pkg))
_ <- BlacklistedPackages.create(pkg.namespace, pkg.id)
} yield pkg
whenReady(dbF) { pkg =>
Get(s"/packages/${pkg.id.name.get}/${pkg.id.version.get}") ~> service.route ~> check {
status shouldBe StatusCodes.OK
val responseP = responseAs[Json]
.cursor.downField("isBlackListed").get.as[Boolean].toOption.get
responseP shouldBe true
}
}
}
test("publishes message to bus on create") {
val url = Uri.Empty.withPath(BasePath / "linux-lts" / "4.5.0")
system.eventStream.subscribe(testActor, classOf[PackageStorageUsage])
Put(url, multipartForm) ~> service.route ~> check {
status shouldBe StatusCodes.NoContent
expectMsgPF(10.seconds, "package usage greater than 0") {
case m @ PackageStorageUsage(_, _, usage) if usage > 0L => m
}
}
}
}
| PDXostc/rvi_sota_server | core/src/test/scala/org/genivi/sota/core/PackagesResourceSpec.scala | Scala | mpl-2.0 | 5,724 |
/*
* Copyright © 2014 Teo Klestrup, Carl Dybdahl
*
* This file is part of Republix.
*
* Republix is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Republix is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Republix. If not, see <http://www.gnu.org/licenses/>.
*/
package republix.ui
import republix.game._
import republix.io._
import republix.sim._
import javax.swing._
import javax.swing.event._
object Lobby extends UIPhase {
def open(model: GameModel, player: (In[PhaseUpdate], Out[Command]),
party: Party, startParties: Vector[Party], state: GameState,
nav: UINav): JComponent = new JPanel {
var parties = party +: startParties
var listeners = Vector[ListDataListener]()
val partyModel = new ListModel[Party] {
def addListDataListener(l: ListDataListener): Unit = {
listeners :+= l
}
def removeListDataListener(l: ListDataListener): Unit = {
listeners = listeners.filterNot(_ == l)
}
def getElementAt(i: Int) = parties(i)
def getSize = parties.size
}
object PartyList extends JList[Party](partyModel) {
}
object ReadyButton extends JCheckBox("Ready?") {
addActionListener (on {
player._2.send(SetReady(isSelected))
})
}
player._1.listen {
case NewParty(party) =>
parties :+= party
for (l <- listeners) {
l.intervalAdded(new ListDataEvent(partyModel, ListDataEvent.INTERVAL_ADDED, parties.size, parties.size))
}
case CountryIs(country) =>
// todo: inform player of current country
case _ => sys.exit(0)
}
add(new JLabel("Lobby"))
add(PartyList)
add(ReadyButton)
}
// todo: place this somewhere sane
def join(address: String, port: Int): In[(In[Update], Out[Command])] = {
val conn = Net.connect(address, port)
conn.map { case (in, out) =>
(Net.read[Update](in), Net.write[Command](out))
}
}
} | teozkr/republix-online | republix/src/main/scala/republix/ui/Lobby.scala | Scala | agpl-3.0 | 2,318 |
package core
import org.json4s.jackson.Serialization.{ read, writePretty }
import org.json4s.{ DefaultFormats, Formats, jackson, Serialization }
import org.joda.time.DateTime
import akka.http.scaladsl.marshalling._
import akka.http.scaladsl.unmarshalling._
import akka.http.scaladsl.model.{ ContentType, ContentTypeRange, HttpEntity, MediaType, MediaTypes }
import akka.http.scaladsl.marshalling.{ Marshaller, ToEntityMarshaller }
import akka.http.scaladsl.model.{ HttpCharsets, MediaTypes }
import akka.http.scaladsl.unmarshalling.{ FromEntityUnmarshaller, Unmarshaller }
object BaseFormats extends BaseFormats {
sealed abstract class ShouldWritePretty
object ShouldWritePretty {
object True extends ShouldWritePretty
object False extends ShouldWritePretty
}
}
trait BaseFormats {
import BaseFormats._
implicit val serialization = jackson.Serialization
implicit val formats = DefaultFormats ++ org.json4s.ext.JodaTimeSerializers.all
lazy val `application/collection+json` = customMediaTypeUTF8("collection+json")
def customMediaTypeUTF8(name: String): MediaType.WithFixedCharset =
MediaType.customWithFixedCharset(
"application",
name,
HttpCharsets.`UTF-8`
)
implicit def json4sUnmarshallerMediaType[A: Manifest](mediaType: MediaType)
(serialization: Serialization, formats: Formats): FromEntityUnmarshaller[A] =
unmarshaller(mediaType)(manifest, serialization, formats)
implicit def json4sUnmarshallerConverter[A: Manifest]
(implicit serialization: Serialization, formats: Formats): FromEntityUnmarshaller[A] =
unmarshaller(MediaTypes.`application/json`)(manifest, serialization, formats)
/**
* HTTP entity => `A`
*
* @tparam A type to decode
* @return unmarshaller for `A`
*/
implicit def unmarshaller[A: Manifest](mediaType: MediaType)
(implicit serialization: Serialization, formats: Formats): FromEntityUnmarshaller[A] =
Unmarshaller
.byteStringUnmarshaller
.forContentTypes(mediaType)
.mapWithCharset { (data, charset) =>
val input = if (charset == HttpCharsets.`UTF-8`)
data.utf8String
else
data.decodeString(charset.nioCharset.name)
serialization.read(input)
}
implicit def json4sMarshallMediaType[A <: AnyRef](mediaType: MediaType)
(serialization: Serialization, formats: Formats,
shouldWritePretty: ShouldWritePretty = ShouldWritePretty.False): ToEntityMarshaller[A] =
marshaller(mediaType)(serialization, formats, shouldWritePretty)
//implicit def json4sMarshallerConverter[A <: AnyRef]
//(implicit serialization: Serialization, formats: Formats, shouldWritePretty: ShouldWritePretty = ShouldWritePretty.False): ToEntityMarshaller[A] =
//marshaller(MediaTypes.`application/json`)(serialization, formats, shouldWritePretty)
/**
* `A` => HTTP entity
*
* @tparam A type to encode, must be upper bounded by `AnyRef`
* @return marshaller for any `A` value
*/
implicit def marshaller[A <: AnyRef](mediaType: MediaType)
(implicit serialization: Serialization, formats: Formats,
shouldWritePretty: ShouldWritePretty = ShouldWritePretty.False): ToEntityMarshaller[A] =
shouldWritePretty match {
case ShouldWritePretty.False =>
Marshaller.StringMarshaller.wrap(mediaType)(serialization.write[A])
case _ =>
Marshaller.StringMarshaller.wrap(mediaType)(serialization.writePretty[A])
}
}
| enpassant/wooster | src/main/scala/core/BaseFormats.scala | Scala | apache-2.0 | 3,457 |
/*
* La Trobe University - Distributed Deep Learning System
* Copyright 2016 Matthias Langer (t3l@threelights.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package edu.latrobe.blaze.modules.jvm
import edu.latrobe._
import edu.latrobe.blaze.{Mode, PredictContext}
import edu.latrobe.blaze.modules._
abstract class AddBias_JVM
extends AddBias
with MapLayer_JVM[AddBiasBuilder] {
final override val (bias, biasReference) = {
val ref = builder.biasReference
val tmp = weightBufferBuilder.get(ref)
if (tmp.isDefined) {
val result = tmp.get.asInstanceOf[RealArrayTensor]
assume(result.layout == biasLayout)
(result, None)
}
else {
val result = RealArrayTensor.zeros(biasLayout)
val newRef = weightBufferBuilder.register(ref, result)
(result, Some(newRef))
}
}
override protected def doClose()
: Unit = {
if (biasReference.isDefined) {
bias.close()
}
super.doClose()
}
final override def refresh(): Unit = {}
// ---------------------------------------------------------------------------
// Forward propagation related.
// ---------------------------------------------------------------------------
final override protected def doPredictPerValue(inPlaceAllowed: Boolean,
input: Tensor)
: RealArrayTensor = {
val out = {
if (inPlaceAllowed) {
input.asOrToRealArrayTensor
}
else {
input.toRealArrayTensor
}
}
doPredictPerValue(out)
out
}
protected def doPredictPerValue(output: RealArrayTensor)
: Unit
final override protected def doPredictPerUnit(inPlaceAllowed: Boolean,
input: Tensor)
: RealArrayTensor = {
val out = {
if (inPlaceAllowed) {
input.asOrToRealArrayTensor
}
else {
input.toRealArrayTensor
}
}
doPredictPerUnit(out)
out
}
protected def doPredictPerUnit(output: RealArrayTensor)
: Unit
final override protected def doPredictPerChannel(inPlaceAllowed: Boolean,
input: Tensor)
: RealArrayTensor = {
val out = {
if (inPlaceAllowed) {
input.asOrToRealArrayTensor
}
else {
input.toRealArrayTensor
}
}
doPredictPerChannel(out)
out
}
protected def doPredictPerChannel(output: RealArrayTensor)
: Unit
final override protected def doPredictPerSample(inPlaceAllowed: Boolean,
input: Tensor)
: RealArrayTensor = {
val out = {
if (inPlaceAllowed) {
input.asOrToRealArrayTensor
}
else {
input.toRealArrayTensor
}
}
doPredictPerSample(out)
out
}
protected def doPredictPerSample(output: RealArrayTensor)
: Unit
final override protected def doPredictPerBatch(inPlaceAllowed: Boolean,
input: Tensor)
: RealArrayTensor = {
val out = {
if (inPlaceAllowed) {
input.asOrToRealArrayTensor
}
else {
input.toRealArrayTensor
}
}
doPredictPerBatch(out)
out
}
protected def doPredictPerBatch(output: RealArrayTensor)
: Unit
final override protected def doPredictInvPerValue(output: Tensor)
: RealArrayTensor = {
val inp = output.toRealArrayTensor
doPredictInvPerValue(inp)
inp
}
protected def doPredictInvPerValue(input: RealArrayTensor)
: Unit
final override protected def doPredictInvPerUnit(output: Tensor)
: RealArrayTensor = {
val inp = output.toRealArrayTensor
doPredictInvPerUnit(inp)
inp
}
protected def doPredictInvPerUnit(input: RealArrayTensor)
: Unit
final override protected def doPredictInvPerChannel(output: Tensor)
: Tensor = {
val inp = output.toRealArrayTensor
doPredictInvPerChannel(inp)
inp
}
protected def doPredictInvPerChannel(input: RealArrayTensor)
: Unit
final override protected def doPredictInvPerSample(output: Tensor)
: Tensor = {
val inp = output.toRealArrayTensor
doPredictInvPerSample(inp)
inp
}
protected def doPredictInvPerSample(input: RealArrayTensor)
: Unit
final override protected def doPredictInvPerBatch(output: Tensor)
: Tensor = {
val inp = output.toRealArrayTensor
doPredictInvPerBatch(inp)
inp
}
protected def doPredictInvPerBatch(input: RealArrayTensor)
: Unit
// ---------------------------------------------------------------------------
// Back propagation related.
// ---------------------------------------------------------------------------
final override protected def doDeriveWeightGradientsPerValue(error: Tensor,
sink: ValueTensor)
: Unit = {
val err = error.asOrToRealArrayTensor
val dst = sink.asOrToRealArrayTensor
doDeriveWeightGradientsPerValue(err, dst)
// Deallocate temporaries.
if (dst ne sink) {
sink := dst
dst.close()
}
if (err ne error) {
err.close()
}
}
protected def doDeriveWeightGradientsPerValue(error: RealArrayTensor,
sink: RealArrayTensor)
: Unit
final override protected def doDeriveWeightGradientsPerUnit(error: Tensor,
sink: ValueTensor)
: Unit = {
val err = error.asOrToRealArrayTensor
val dst = sink.asOrToRealArrayTensor
doDeriveWeightGradientsPerUnit(err, dst)
// Deallocate temporaries.
if (dst ne sink) {
sink := dst
dst.close()
}
if (err ne error) {
err.close()
}
}
protected def doDeriveWeightGradientsPerUnit(error: RealArrayTensor,
sink: RealArrayTensor)
: Unit
final override protected def doDeriveWeightGradientsPerChannel(error: Tensor,
sink: ValueTensor)
: Unit = {
val err = error.asOrToRealArrayTensor
val dst = sink.asOrToRealArrayTensor
doDeriveWeightGradientsPerChannel(err, dst)
// Deallocate temporaries.
if (dst ne sink) {
sink := dst
dst.close()
}
if (err ne error) {
err.close()
}
}
protected def doDeriveWeightGradientsPerChannel(error: RealArrayTensor,
sink: RealArrayTensor)
: Unit
final override protected def doDeriveWeightGradientsPerSample(error: Tensor,
sink: ValueTensor)
: Unit = {
val err = error.asOrToRealArrayTensor
val dst = sink.asOrToRealArrayTensor
doDeriveWeightGradientsPerSample(err, dst)
// Deallocate temporaries.
if (dst ne sink) {
sink := dst
dst.close()
}
if (err ne error) {
err.close()
}
}
protected def doDeriveWeightGradientsPerSample(error: RealArrayTensor,
sink: RealArrayTensor)
: Unit
final override protected def doDeriveWeightGradientsPerBatch(error: Tensor,
sink: ValueTensor)
: Unit = {
val err = error.asOrToRealArrayTensor
val dst = sink.asOrToRealArrayTensor
doDeriveWeightGradientsPerBatch(err, dst)
// Deallocate temporaries.
if (dst ne sink) {
sink := dst
dst.close()
}
if (err ne error) {
err.close()
}
}
protected def doDeriveWeightGradientsPerBatch(error: RealArrayTensor,
sink: RealArrayTensor)
: Unit
}
| bashimao/ltudl | blaze/src/main/scala/edu/latrobe/blaze/modules/jvm/AddBias_JVM.scala | Scala | apache-2.0 | 8,325 |
package scala.slick.test.jdbc
import scala.slick.jdbc.{PositionedResult, PositionedResultIterator}
import com.typesafe.slick.testkit.util.DelegateResultSet
import org.junit.Test
import org.junit.Assert._
class PositionedResultTest {
@Test def testMaxRows {
assertEquals(5, createFakePR(5, 0).length)
assertEquals(1, createFakePR(5, 1).length)
assertEquals(4, createFakePR(5, 4).length)
assertEquals(5, createFakePR(5, 5).length)
assertEquals(5, createFakePR(5, 6).length)
}
def createFakePR(len: Int, limit: Int): PositionedResultIterator[Int] = {
val fakeRS = new DelegateResultSet(null) {
var count: Int = 0
override def next() = {
count += 1
count <= len
}
override def getInt(columnIndex: Int): Int = columnIndex
override def wasNull(): Boolean = false
}
val pr = new PositionedResult(fakeRS) { def close() {} }
new PositionedResultIterator[Int](pr, limit) {
def extractValue(pr: PositionedResult) = pr.nextInt()
}
}
}
| dvinokurov/slick | slick-testkit/src/test/scala/scala/slick/test/jdbc/PositionedResultTest.scala | Scala | bsd-2-clause | 1,028 |
/*!
* Copyright 2013-2014 Dennis Hörsch.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.dennishoersch.dropwizard.blog.domain
case class Author(id: Long, name: String, avatarUrl: String) extends Identifiable {
} | dhs3000/dropwizard-scala | src/main/scala/de/dennishoersch/dropwizard/blog/domain/Author.scala | Scala | apache-2.0 | 741 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.createTable
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll
class TestCreateTableWithCompactionOptions extends QueryTest with BeforeAndAfterAll {
val tableWithCompactionOptions = "tableWithCompactionOptions"
val tableWithInvalidMajorCompactionSize = "tableWithInvalidMajorCompactionSize"
val tableWithInvalidAutoLoadMerge = "tableWithInvalidAutoLoadMerge"
val tableWithInvalidLevelThreshold = "tableWithInvalidLevelThreshold"
val tableWithInvalidPreserveSegments = "tableWithInvalidPreserveSegments"
val tableWithInvalidAllowedDays = "tableWithInvalidAllowedDays"
val tableWithoutCompactionOptions = "tableWithoutCompactionOptions"
override def beforeAll: Unit = {
cleanTables()
}
override def afterAll: Unit = {
cleanTables()
}
private def cleanTables(): Unit = {
sql("use default")
sql(s"DROP TABLE IF EXISTS $tableWithCompactionOptions")
sql(s"DROP TABLE IF EXISTS $tableWithInvalidMajorCompactionSize")
sql(s"DROP TABLE IF EXISTS $tableWithInvalidAutoLoadMerge")
sql(s"DROP TABLE IF EXISTS $tableWithInvalidLevelThreshold")
sql(s"DROP TABLE IF EXISTS $tableWithInvalidPreserveSegments")
sql(s"DROP TABLE IF EXISTS $tableWithInvalidAllowedDays")
sql(s"DROP TABLE IF EXISTS $tableWithoutCompactionOptions")
}
test("test create table with compaction options") {
sql(
s"""
| CREATE TABLE $tableWithCompactionOptions(
| intField INT,
| stringField STRING
| )
| STORED AS carbondata
| TBLPROPERTIES('MAJOR_COMPACTION_SIZE'='10240',
| 'AUTO_LOAD_MERGE'='true',
| 'COMPACTION_LEVEL_THRESHOLD'='5,6',
| 'COMPACTION_PRESERVE_SEGMENTS'='10',
| 'ALLOWED_COMPACTION_DAYS'='5')
""".stripMargin)
val tableOptions = sql(s"DESCRIBE FORMATTED $tableWithCompactionOptions")
.collect().map(r => (r.getString(0).trim, r.getString(1).trim)).toMap
assert(tableOptions.contains("MAJOR_COMPACTION_SIZE"))
assert(tableOptions.getOrElse("MAJOR_COMPACTION_SIZE", "").equals("10240"))
assert(tableOptions.contains("AUTO_LOAD_MERGE"))
assert(tableOptions.getOrElse("AUTO_LOAD_MERGE", "").equals("true"))
assert(tableOptions.contains("COMPACTION_LEVEL_THRESHOLD"))
assert(tableOptions.getOrElse("COMPACTION_LEVEL_THRESHOLD", "").equals("5,6"))
assert(tableOptions.contains("COMPACTION_PRESERVE_SEGMENTS"))
assert(tableOptions.getOrElse("COMPACTION_PRESERVE_SEGMENTS", "").equals("10"))
assert(tableOptions.contains("ALLOWED_COMPACTION_DAYS"))
assert(tableOptions.getOrElse("ALLOWED_COMPACTION_DAYS", "").equals("5"))
}
test("test create table with invalid major compaction size") {
val exception: Exception = intercept[Exception] {
sql(
s"""
|CREATE TABLE $tableWithInvalidMajorCompactionSize
|(
|intField INT,
|stringField STRING
|)
|STORED AS carbondata
|TBLPROPERTIES('MAJOR_COMPACTION_SIZE'='abc')
""".stripMargin)
}
assert(exception.getMessage.contains(
"Invalid major_compaction_size value found: abc, " +
"only int value greater than 0 is supported."))
}
test("test create table with invalid auto load merge") {
val exception: Exception = intercept[Exception] {
sql(
s"""
|CREATE TABLE $tableWithInvalidAutoLoadMerge
|(
|intField INT,
|stringField STRING
|)
|STORED AS carbondata
|TBLPROPERTIES('AUTO_LOAD_MERGE'='123')
""".stripMargin)
}
assert(exception.getMessage.contains(
"Invalid auto_load_merge value found: 123, only true|false is supported."))
}
test("test create table with invalid level threshold") {
val exception: Exception = intercept[Exception] {
sql(
s"""
|CREATE TABLE $tableWithInvalidLevelThreshold
|(
|intField INT,
|stringField STRING
|)
|STORED AS carbondata
|TBLPROPERTIES(
|'AUTO_LOAD_MERGE'='true',
|'COMPACTION_LEVEL_THRESHOLD'='x,6')
""".stripMargin)
}
assert(exception.getMessage.contains(
"Invalid compaction_level_threshold value found: x,6, " +
"only int values separated by comma and between 0 and 100 are supported."))
}
test("test create table with invalid preserve segments number") {
val exception: Exception = intercept[Exception] {
sql(
s"""
|CREATE TABLE $tableWithInvalidPreserveSegments
|(
|intField INT,
|stringField STRING
|)
|STORED AS carbondata
|TBLPROPERTIES(
|'AUTO_LOAD_MERGE'='true',
|'COMPACTION_LEVEL_THRESHOLD'='4,6',
|'COMPACTION_PRESERVE_SEGMENTS'='abc')
""".stripMargin)
}
assert(exception.getMessage.contains(
"Invalid compaction_preserve_segments value found: abc, " +
"only int value between 0 and 100 is supported."))
}
test("test create table with invalid allowed days") {
val exception: Exception = intercept[Exception] {
sql(
s"""
|CREATE TABLE $tableWithInvalidAllowedDays
|(
|intField INT,
|stringField STRING
|)
|STORED AS carbondata
|TBLPROPERTIES(
|'AUTO_LOAD_MERGE'='true',
|'ALLOWED_COMPACTION_DAYS'='abc')
""".stripMargin)
}
assert(exception.getMessage.contains(
"Invalid allowed_compaction_days value found: abc, " +
"only int value between 0 and 100 is supported."))
}
}
| zzcclp/carbondata | integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithCompactionOptions.scala | Scala | apache-2.0 | 6,562 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui.jobs
import java.util.Properties
import org.scalatest.Matchers
import org.apache.spark._
import org.apache.spark.{LocalSparkContext, SparkConf, Success}
import org.apache.spark.executor._
import org.apache.spark.scheduler._
import org.apache.spark.ui.jobs.UIData.TaskUIData
import org.apache.spark.util.{AccumulatorContext, Utils}
class JobProgressListenerSuite extends SparkFunSuite with LocalSparkContext with Matchers {
val jobSubmissionTime = 1421191042750L
val jobCompletionTime = 1421191296660L
private def createStageStartEvent(stageId: Int) = {
val stageInfo = new StageInfo(stageId, 0, stageId.toString, 0, null, null, "")
SparkListenerStageSubmitted(stageInfo)
}
private def createStageEndEvent(stageId: Int, failed: Boolean = false) = {
val stageInfo = new StageInfo(stageId, 0, stageId.toString, 0, null, null, "")
if (failed) {
stageInfo.failureReason = Some("Failed!")
}
SparkListenerStageCompleted(stageInfo)
}
private def createJobStartEvent(
jobId: Int,
stageIds: Seq[Int],
jobGroup: Option[String] = None): SparkListenerJobStart = {
val stageInfos = stageIds.map { stageId =>
new StageInfo(stageId, 0, stageId.toString, 0, null, null, "")
}
val properties: Option[Properties] = jobGroup.map { groupId =>
val props = new Properties()
props.setProperty(SparkContext.SPARK_JOB_GROUP_ID, groupId)
props
}
SparkListenerJobStart(jobId, jobSubmissionTime, stageInfos, properties.orNull)
}
private def createJobEndEvent(jobId: Int, failed: Boolean = false) = {
val result = if (failed) JobFailed(new Exception("dummy failure")) else JobSucceeded
SparkListenerJobEnd(jobId, jobCompletionTime, result)
}
private def runJob(listener: SparkListener, jobId: Int, shouldFail: Boolean = false) {
val stagesThatWontBeRun = jobId * 200 to jobId * 200 + 10
val stageIds = jobId * 100 to jobId * 100 + 50
listener.onJobStart(createJobStartEvent(jobId, stageIds ++ stagesThatWontBeRun))
for (stageId <- stageIds) {
listener.onStageSubmitted(createStageStartEvent(stageId))
listener.onStageCompleted(createStageEndEvent(stageId, failed = stageId % 2 == 0))
}
listener.onJobEnd(createJobEndEvent(jobId, shouldFail))
}
private def assertActiveJobsStateIsEmpty(listener: JobProgressListener) {
listener.getSizesOfActiveStateTrackingCollections.foreach { case (fieldName, size) =>
assert(size === 0, s"$fieldName was not empty")
}
}
test("test LRU eviction of stages") {
def runWithListener(listener: JobProgressListener) : Unit = {
for (i <- 1 to 50) {
listener.onStageSubmitted(createStageStartEvent(i))
listener.onStageCompleted(createStageEndEvent(i))
}
assertActiveJobsStateIsEmpty(listener)
}
val conf = new SparkConf()
conf.set("spark.ui.retainedStages", 5.toString)
var listener = new JobProgressListener(conf)
// Test with 5 retainedStages
runWithListener(listener)
listener.completedStages.size should be (5)
listener.completedStages.map(_.stageId).toSet should be (Set(50, 49, 48, 47, 46))
// Test with 0 retainedStages
conf.set("spark.ui.retainedStages", 0.toString)
listener = new JobProgressListener(conf)
runWithListener(listener)
listener.completedStages.size should be (0)
}
test("test clearing of stageIdToActiveJobs") {
val conf = new SparkConf()
conf.set("spark.ui.retainedStages", 5.toString)
val listener = new JobProgressListener(conf)
val jobId = 0
val stageIds = 1 to 50
// Start a job with 50 stages
listener.onJobStart(createJobStartEvent(jobId, stageIds))
for (stageId <- stageIds) {
listener.onStageSubmitted(createStageStartEvent(stageId))
}
listener.stageIdToActiveJobIds.size should be > 0
// Complete the stages and job
for (stageId <- stageIds) {
listener.onStageCompleted(createStageEndEvent(stageId, failed = false))
}
listener.onJobEnd(createJobEndEvent(jobId, false))
assertActiveJobsStateIsEmpty(listener)
listener.stageIdToActiveJobIds.size should be (0)
}
test("test clearing of jobGroupToJobIds") {
def runWithListener(listener: JobProgressListener): Unit = {
// Run 50 jobs, each with one stage
for (jobId <- 0 to 50) {
listener.onJobStart(createJobStartEvent(jobId, Seq(0), jobGroup = Some(jobId.toString)))
listener.onStageSubmitted(createStageStartEvent(0))
listener.onStageCompleted(createStageEndEvent(0, failed = false))
listener.onJobEnd(createJobEndEvent(jobId, false))
}
assertActiveJobsStateIsEmpty(listener)
}
val conf = new SparkConf()
conf.set("spark.ui.retainedJobs", 5.toString)
var listener = new JobProgressListener(conf)
runWithListener(listener)
// This collection won't become empty, but it should be bounded by spark.ui.retainedJobs
listener.jobGroupToJobIds.size should be (5)
// Test with 0 jobs
conf.set("spark.ui.retainedJobs", 0.toString)
listener = new JobProgressListener(conf)
runWithListener(listener)
listener.jobGroupToJobIds.size should be (0)
}
test("test LRU eviction of jobs") {
val conf = new SparkConf()
conf.set("spark.ui.retainedStages", 5.toString)
conf.set("spark.ui.retainedJobs", 5.toString)
val listener = new JobProgressListener(conf)
// Run a bunch of jobs to get the listener into a state where we've exceeded both the
// job and stage retention limits:
for (jobId <- 1 to 10) {
runJob(listener, jobId, shouldFail = false)
}
for (jobId <- 200 to 210) {
runJob(listener, jobId, shouldFail = true)
}
assertActiveJobsStateIsEmpty(listener)
// Snapshot the sizes of various soft- and hard-size-limited collections:
val softLimitSizes = listener.getSizesOfSoftSizeLimitedCollections
val hardLimitSizes = listener.getSizesOfHardSizeLimitedCollections
// Run some more jobs:
for (jobId <- 11 to 50) {
runJob(listener, jobId, shouldFail = false)
// We shouldn't exceed the hard / soft limit sizes after the jobs have finished:
listener.getSizesOfSoftSizeLimitedCollections should be (softLimitSizes)
listener.getSizesOfHardSizeLimitedCollections should be (hardLimitSizes)
}
listener.completedJobs.size should be (5)
listener.completedJobs.map(_.jobId).toSet should be (Set(50, 49, 48, 47, 46))
for (jobId <- 51 to 100) {
runJob(listener, jobId, shouldFail = true)
// We shouldn't exceed the hard / soft limit sizes after the jobs have finished:
listener.getSizesOfSoftSizeLimitedCollections should be (softLimitSizes)
listener.getSizesOfHardSizeLimitedCollections should be (hardLimitSizes)
}
assertActiveJobsStateIsEmpty(listener)
// Completed and failed jobs each their own size limits, so this should still be the same:
listener.completedJobs.size should be (5)
listener.completedJobs.map(_.jobId).toSet should be (Set(50, 49, 48, 47, 46))
listener.failedJobs.size should be (5)
listener.failedJobs.map(_.jobId).toSet should be (Set(100, 99, 98, 97, 96))
}
test("test executor id to summary") {
val conf = new SparkConf()
val listener = new JobProgressListener(conf)
val taskMetrics = TaskMetrics.empty
val shuffleReadMetrics = taskMetrics.createTempShuffleReadMetrics()
assert(listener.stageIdToData.size === 0)
// finish this task, should get updated shuffleRead
shuffleReadMetrics.incRemoteBytesRead(1000)
taskMetrics.mergeShuffleReadMetrics()
var taskInfo = new TaskInfo(1234L, 0, 1, 0L, "exe-1", "host1", TaskLocality.NODE_LOCAL, false)
taskInfo.finishTime = 1
var task = new ShuffleMapTask(0)
val taskType = Utils.getFormattedClassName(task)
listener.onTaskEnd(
SparkListenerTaskEnd(task.stageId, 0, taskType, Success, taskInfo, taskMetrics))
assert(listener.stageIdToData.getOrElse((0, 0), fail())
.executorSummary.getOrElse("exe-1", fail()).shuffleRead === 1000)
// finish a task with unknown executor-id, nothing should happen
taskInfo =
new TaskInfo(1234L, 0, 1, 1000L, "exe-unknown", "host1", TaskLocality.NODE_LOCAL, true)
taskInfo.finishTime = 1
task = new ShuffleMapTask(0)
listener.onTaskEnd(
SparkListenerTaskEnd(task.stageId, 0, taskType, Success, taskInfo, taskMetrics))
assert(listener.stageIdToData.size === 1)
// finish this task, should get updated duration
taskInfo = new TaskInfo(1235L, 0, 1, 0L, "exe-1", "host1", TaskLocality.NODE_LOCAL, false)
taskInfo.finishTime = 1
task = new ShuffleMapTask(0)
listener.onTaskEnd(
SparkListenerTaskEnd(task.stageId, 0, taskType, Success, taskInfo, taskMetrics))
assert(listener.stageIdToData.getOrElse((0, 0), fail())
.executorSummary.getOrElse("exe-1", fail()).shuffleRead === 2000)
// finish this task, should get updated duration
taskInfo = new TaskInfo(1236L, 0, 2, 0L, "exe-2", "host1", TaskLocality.NODE_LOCAL, false)
taskInfo.finishTime = 1
task = new ShuffleMapTask(0)
listener.onTaskEnd(
SparkListenerTaskEnd(task.stageId, 0, taskType, Success, taskInfo, taskMetrics))
assert(listener.stageIdToData.getOrElse((0, 0), fail())
.executorSummary.getOrElse("exe-2", fail()).shuffleRead === 1000)
}
test("test task success vs failure counting for different task end reasons") {
val conf = new SparkConf()
val listener = new JobProgressListener(conf)
val metrics = TaskMetrics.empty
val taskInfo = new TaskInfo(1234L, 0, 3, 0L, "exe-1", "host1", TaskLocality.NODE_LOCAL, false)
taskInfo.finishTime = 1
val task = new ShuffleMapTask(0)
val taskType = Utils.getFormattedClassName(task)
// Go through all the failure cases to make sure we are counting them as failures.
val taskFailedReasons = Seq(
Resubmitted,
new FetchFailed(null, 0, 0, 0, "ignored"),
ExceptionFailure("Exception", "description", null, null, None),
TaskResultLost,
ExecutorLostFailure("0", true, Some("Induced failure")),
UnknownReason)
var failCount = 0
for (reason <- taskFailedReasons) {
listener.onTaskEnd(
SparkListenerTaskEnd(task.stageId, 0, taskType, reason, taskInfo, metrics))
failCount += 1
assert(listener.stageIdToData((task.stageId, 0)).numCompleteTasks === 0)
assert(listener.stageIdToData((task.stageId, 0)).numFailedTasks === failCount)
}
// Make sure killed tasks are accounted for correctly.
listener.onTaskEnd(
SparkListenerTaskEnd(task.stageId, 0, taskType, TaskKilled, taskInfo, metrics))
assert(listener.stageIdToData((task.stageId, 0)).numKilledTasks === 1)
// Make sure we count success as success.
listener.onTaskEnd(
SparkListenerTaskEnd(task.stageId, 1, taskType, Success, taskInfo, metrics))
assert(listener.stageIdToData((task.stageId, 1)).numCompleteTasks === 1)
assert(listener.stageIdToData((task.stageId, 0)).numFailedTasks === failCount)
}
test("test update metrics") {
val conf = new SparkConf()
val listener = new JobProgressListener(conf)
val taskType = Utils.getFormattedClassName(new ShuffleMapTask(0))
val execId = "exe-1"
def makeTaskMetrics(base: Int): TaskMetrics = {
val taskMetrics = TaskMetrics.empty
val shuffleReadMetrics = taskMetrics.createTempShuffleReadMetrics()
val shuffleWriteMetrics = taskMetrics.shuffleWriteMetrics
val inputMetrics = taskMetrics.inputMetrics
val outputMetrics = taskMetrics.outputMetrics
shuffleReadMetrics.incRemoteBytesRead(base + 1)
shuffleReadMetrics.incLocalBytesRead(base + 9)
shuffleReadMetrics.incRemoteBlocksFetched(base + 2)
taskMetrics.mergeShuffleReadMetrics()
shuffleWriteMetrics.incBytesWritten(base + 3)
taskMetrics.setExecutorRunTime(base + 4)
taskMetrics.incDiskBytesSpilled(base + 5)
taskMetrics.incMemoryBytesSpilled(base + 6)
inputMetrics.setBytesRead(base + 7)
outputMetrics.setBytesWritten(base + 8)
taskMetrics
}
def makeTaskInfo(taskId: Long, finishTime: Int = 0): TaskInfo = {
val taskInfo = new TaskInfo(taskId, 0, 1, 0L, execId, "host1", TaskLocality.NODE_LOCAL,
false)
taskInfo.finishTime = finishTime
taskInfo
}
listener.onTaskStart(SparkListenerTaskStart(0, 0, makeTaskInfo(1234L)))
listener.onTaskStart(SparkListenerTaskStart(0, 0, makeTaskInfo(1235L)))
listener.onTaskStart(SparkListenerTaskStart(1, 0, makeTaskInfo(1236L)))
listener.onTaskStart(SparkListenerTaskStart(1, 0, makeTaskInfo(1237L)))
listener.onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate(execId, Array(
(1234L, 0, 0, makeTaskMetrics(0).accumulators().map(AccumulatorSuite.makeInfo)),
(1235L, 0, 0, makeTaskMetrics(100).accumulators().map(AccumulatorSuite.makeInfo)),
(1236L, 1, 0, makeTaskMetrics(200).accumulators().map(AccumulatorSuite.makeInfo)))))
var stage0Data = listener.stageIdToData.get((0, 0)).get
var stage1Data = listener.stageIdToData.get((1, 0)).get
assert(stage0Data.shuffleReadTotalBytes == 220)
assert(stage1Data.shuffleReadTotalBytes == 410)
assert(stage0Data.shuffleWriteBytes == 106)
assert(stage1Data.shuffleWriteBytes == 203)
assert(stage0Data.executorRunTime == 108)
assert(stage1Data.executorRunTime == 204)
assert(stage0Data.diskBytesSpilled == 110)
assert(stage1Data.diskBytesSpilled == 205)
assert(stage0Data.memoryBytesSpilled == 112)
assert(stage1Data.memoryBytesSpilled == 206)
assert(stage0Data.inputBytes == 114)
assert(stage1Data.inputBytes == 207)
assert(stage0Data.outputBytes == 116)
assert(stage1Data.outputBytes == 208)
assert(
stage0Data.taskData.get(1234L).get.metrics.get.shuffleReadMetrics.totalBlocksFetched == 2)
assert(
stage0Data.taskData.get(1235L).get.metrics.get.shuffleReadMetrics.totalBlocksFetched == 102)
assert(
stage1Data.taskData.get(1236L).get.metrics.get.shuffleReadMetrics.totalBlocksFetched == 202)
// task that was included in a heartbeat
listener.onTaskEnd(SparkListenerTaskEnd(0, 0, taskType, Success, makeTaskInfo(1234L, 1),
makeTaskMetrics(300)))
// task that wasn't included in a heartbeat
listener.onTaskEnd(SparkListenerTaskEnd(1, 0, taskType, Success, makeTaskInfo(1237L, 1),
makeTaskMetrics(400)))
stage0Data = listener.stageIdToData.get((0, 0)).get
stage1Data = listener.stageIdToData.get((1, 0)).get
// Task 1235 contributed (100+1)+(100+9) = 210 shuffle bytes, and task 1234 contributed
// (300+1)+(300+9) = 610 total shuffle bytes, so the total for the stage is 820.
assert(stage0Data.shuffleReadTotalBytes == 820)
// Task 1236 contributed 410 shuffle bytes, and task 1237 contributed 810 shuffle bytes.
assert(stage1Data.shuffleReadTotalBytes == 1220)
assert(stage0Data.shuffleWriteBytes == 406)
assert(stage1Data.shuffleWriteBytes == 606)
assert(stage0Data.executorRunTime == 408)
assert(stage1Data.executorRunTime == 608)
assert(stage0Data.diskBytesSpilled == 410)
assert(stage1Data.diskBytesSpilled == 610)
assert(stage0Data.memoryBytesSpilled == 412)
assert(stage1Data.memoryBytesSpilled == 612)
assert(stage0Data.inputBytes == 414)
assert(stage1Data.inputBytes == 614)
assert(stage0Data.outputBytes == 416)
assert(stage1Data.outputBytes == 616)
assert(
stage0Data.taskData.get(1234L).get.metrics.get.shuffleReadMetrics.totalBlocksFetched == 302)
assert(
stage1Data.taskData.get(1237L).get.metrics.get.shuffleReadMetrics.totalBlocksFetched == 402)
}
test("drop internal and sql accumulators") {
val taskInfo = new TaskInfo(0, 0, 0, 0, "", "", TaskLocality.ANY, false)
val internalAccum =
AccumulableInfo(id = 1, name = Some("internal"), None, None, true, false, None)
val sqlAccum = AccumulableInfo(
id = 2,
name = Some("sql"),
update = None,
value = None,
internal = false,
countFailedValues = false,
metadata = Some(AccumulatorContext.SQL_ACCUM_IDENTIFIER))
val userAccum = AccumulableInfo(
id = 3,
name = Some("user"),
update = None,
value = None,
internal = false,
countFailedValues = false,
metadata = None)
taskInfo.accumulables ++= Seq(internalAccum, sqlAccum, userAccum)
val newTaskInfo = TaskUIData.dropInternalAndSQLAccumulables(taskInfo)
assert(newTaskInfo.accumulables === Seq(userAccum))
}
}
| Panos-Bletsos/spark-cost-model-optimizer | core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala | Scala | apache-2.0 | 17,468 |
package org.http4s
package dsl
package impl
import cats.{Applicative, Monad}
import org.http4s.headers._
import ResponseGenerator.addEntityLength
import cats.arrow.FunctionK
trait ResponseGenerator extends Any {
def status: Status
}
private[impl] object ResponseGenerator {
def addEntityLength[G[_]](entity: Entity[G], headers: Headers): Headers =
entity.length match {
case Some(l) => `Content-Length`.fromLong(l).fold(_ => headers, c => headers.put(c))
case None => headers
}
}
/**
* Helper for the generation of a [[org.http4s.Response]] which will not contain a body
*
* While it is possible to for the [[org.http4s.Response]] manually, the EntityResponseGenerators
* offer shortcut syntax to make intention clear and concise.
*
* @example {{{
* val resp: F[Response] = Status.Continue()
* }}}
*/
trait EmptyResponseGenerator[F[_], G[_]] extends Any with ResponseGenerator {
def apply(headers: Header*)(implicit F: Applicative[F]): F[Response[G]] =
F.pure(Response(status, headers = Headers(headers.toList)))
}
/** Helper for the generation of a [[org.http4s.Response]] which may contain a body
*
* While it is possible to construct the [[org.http4s.Response]]
* manually, the EntityResponseGenerators offer shortcut syntax to
* make intention clear and concise.
*
* @example {{{
* val resp: IO[Response] = Ok("Hello world!")
* }}}
*/
trait EntityResponseGenerator[F[_], G[_]] extends Any with ResponseGenerator {
def liftG: FunctionK[G, F]
def apply(headers: Header*)(implicit F: Applicative[F]): F[Response[G]] =
F.pure(Response[G](status, headers = Headers(`Content-Length`.zero :: headers.toList)))
def apply[A](body: G[A])(implicit F: Monad[F], w: EntityEncoder[G, A]): F[Response[G]] =
F.flatMap(liftG(body))(apply[A](_))
def apply[A](body: A, headers: Header*)(
implicit F: Applicative[F],
w: EntityEncoder[G, A]): F[Response[G]] = {
val h = w.headers ++ Headers(headers.toList)
val entity = w.toEntity(body)
F.pure(Response[G](status = status, headers = addEntityLength(entity, h), body = entity.body))
}
}
/** Helper for the generation of a [[org.http4s.Response]] which may contain
* a Location header and may contain a body.
*
* A 300, 301, 302, 303, 307 and 308 status SHOULD contain a Location header, which
* distinguishes this from other `EntityResponseGenerator`s.
*/
trait LocationResponseGenerator[F[_], G[_]] extends Any with EntityResponseGenerator[F, G] {
@deprecated("Use `apply(Location(location))` instead", "0.18.0-M2")
def apply(location: Uri)(implicit F: Applicative[F]): F[Response[G]] =
F.pure(
Response[G](
status = status,
headers = Headers(`Content-Length`.zero :: Location(location) :: Nil)))
def apply(location: Location, headers: Header*)(implicit F: Applicative[F]): F[Response[G]] =
F.pure(
Response[G](status, headers = Headers(`Content-Length`.zero :: location :: headers.toList)))
def apply[A](location: Location, body: A, headers: Header*)(
implicit F: Applicative[F],
w: EntityEncoder[G, A]): F[Response[G]] = {
val h = w.headers ++ Headers(location +: headers.toList)
val entity = w.toEntity(body)
F.pure(Response[G](status = status, headers = addEntityLength(entity, h), body = entity.body))
}
}
/** Helper for the generation of a [[org.http4s.Response]] which must contain
* a WWW-Authenticate header and may contain a body.
*
* A 401 status MUST contain a `WWW-Authenticate` header, which
* distinguishes this from other `ResponseGenerator`s.
*/
trait WwwAuthenticateResponseGenerator[F[_], G[_]] extends Any with ResponseGenerator {
@deprecated("Use ``apply(`WWW-Authenticate`(challenge, challenges)`` instead", "0.18.0-M2")
def apply(challenge: Challenge, challenges: Challenge*)(
implicit F: Applicative[F]): F[Response[G]] =
F.pure(
Response[G](
status = status,
headers =
Headers(`Content-Length`.zero :: `WWW-Authenticate`(challenge, challenges: _*) :: Nil)
))
def apply(authenticate: `WWW-Authenticate`, headers: Header*)(
implicit F: Applicative[F]): F[Response[G]] =
F.pure(
Response[G](
status,
headers = Headers(`Content-Length`.zero :: authenticate :: headers.toList)))
def apply[A](authenticate: `WWW-Authenticate`, body: A, headers: Header*)(
implicit F: Applicative[F],
w: EntityEncoder[G, A]): F[Response[G]] = {
val h = w.headers ++ Headers(authenticate +: headers.toList)
val entity = w.toEntity(body)
F.pure(Response[G](status = status, headers = addEntityLength(entity, h), body = entity.body))
}
}
/** Helper for the generation of a [[org.http4s.Response]] which must contain
* an Allow header and may contain a body.
*
* A 405 status MUST contain an `Allow` header, which
* distinguishes this from other `ResponseGenerator`s.
*/
trait AllowResponseGenerator[F[_], G[_]] extends Any with ResponseGenerator {
def apply(allow: Allow, headers: Header*)(implicit F: Applicative[F]): F[Response[G]] =
F.pure(Response[G](status, headers = Headers(`Content-Length`.zero +: allow +: headers.toList)))
def apply[A](allow: Allow, body: A, headers: Header*)(
implicit F: Applicative[F],
w: EntityEncoder[G, A]): F[Response[G]] = {
val h = w.headers ++ Headers(allow +: headers.toList)
val entity = w.toEntity(body)
F.pure(Response[G](status = status, headers = addEntityLength(entity, h), body = entity.body))
}
}
/** Helper for the generation of a [[org.http4s.Response]] which must contain
* a Proxy-Authenticate header and may contain a body.
*
* A 407 status MUST contain a `Proxy-Authenticate` header, which
* distinguishes this from other `EntityResponseGenerator`s.
*/
trait ProxyAuthenticateResponseGenerator[F[_], G[_]] extends Any with ResponseGenerator {
@deprecated("Use ``apply(`Proxy-Authenticate`(challenge, challenges)`` instead", "0.18.0-M2")
def apply(challenge: Challenge, challenges: Challenge*)(
implicit F: Applicative[F]): F[Response[G]] =
F.pure(
Response[G](
status = status,
headers =
Headers(`Content-Length`.zero :: `Proxy-Authenticate`(challenge, challenges: _*) :: Nil)
))
def apply(authenticate: `Proxy-Authenticate`, headers: Header*)(
implicit F: Applicative[F]): F[Response[G]] =
F.pure(
Response[G](
status,
headers = Headers(`Content-Length`.zero +: authenticate +: headers.toList)))
def apply[A](authenticate: `Proxy-Authenticate`, body: A, headers: Header*)(
implicit F: Applicative[F],
w: EntityEncoder[G, A]): F[Response[G]] = {
val h = w.headers ++ Headers(authenticate +: headers.toList)
val entity = w.toEntity(body)
F.pure(Response[G](status = status, headers = addEntityLength(entity, h), body = entity.body))
}
}
| ChristopherDavenport/http4s | dsl/src/main/scala/org/http4s/dsl/impl/ResponseGenerator.scala | Scala | apache-2.0 | 6,921 |
/*
* Copyright 2012-2013 Stephane Godbillon (@sgodbillon) and Zenexity
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactivemongo.api.collections
import reactivemongo.api.{ ReadPreference, SerializationPack }
private[reactivemongo] object QueryCodecs {
@inline def writeReadPref[P <: SerializationPack with Singleton](pack: P): ReadPreference => pack.Document = writeReadPref[pack.type](pack.newBuilder)
def writeReadPref[P <: SerializationPack with Singleton](builder: SerializationPack.Builder[P]): ReadPreference => builder.pack.Document =
{ readPreference: ReadPreference =>
import builder.{ elementProducer => element, document, string }
val mode = readPreference match {
case ReadPreference.Primary => "primary"
case ReadPreference.PrimaryPreferred(_) => "primaryPreferred"
case ReadPreference.Secondary(_) => "secondary"
case ReadPreference.SecondaryPreferred(_) => "secondaryPreferred"
case ReadPreference.Nearest(_) => "nearest"
}
val elements = Seq.newBuilder[builder.pack.ElementProducer]
elements += element("mode", string(mode))
readPreference match {
case ReadPreference.Taggable(first :: tagSet) if tagSet.nonEmpty => {
val head = document(first.toSeq.map {
case (k, v) => element(k, string(v))
})
elements += element("tags", builder.array(
head,
tagSet.toSeq.map { tags =>
document(tags.toSeq.map {
case (k, v) => element(k, string(v))
})
}))
}
case _ => ()
}
document(elements.result())
}
}
| ornicar/ReactiveMongo | driver/src/main/scala/api/collections/QueryCodecs.scala | Scala | apache-2.0 | 2,223 |
package stefanholzmueller.pp2.check
import stefanholzmueller.pp2.util.Dice
import stefanholzmueller.pp2.util.IntTriple
class OutcomeCalculatorAdapter extends OutcomeExaminer {
def examine(check: Check, diceTriple: IntTriple): Outcome = {
val (options, attributes, points, difficulty) = OutcomeCalculator.javaCheckToScalaTuple(check)
val dice = new Dice(diceTriple.first, diceTriple.second, diceTriple.third)
OutcomeCalculator.examine(options, attributes, points, difficulty)(dice)
}
}
object OutcomeCalculator {
def javaCheckToScalaTuple(check: Check): (Options, List[Int], Int, Int) = {
val options = new Options(check.hasMinimumQuality, check.hasFesteMatrix, check.hasWildeMagie, check.hasSpruchhemmung)
val attributes = List(check.getAttribute1, check.getAttribute2, check.getAttribute3)
val points = check.getValue
val difficulty = check.getDifficulty
(options, attributes, points, difficulty)
}
def examine(options: Options, attributes: List[Int], points: Int, difficulty: Int)(dice: Dice): Outcome = {
specialOutcome(options, points, dice) match {
case Some(special) => special
case None => successOrFailure(options, attributes, points, difficulty, dice)
}
}
private def specialOutcome(options: Options, points: Int, dice: Dice): Option[Outcome] = {
if (dice.allEqualTo(1))
Some(SpectacularSuccess(applyMinimumQuality(options, points)))
else if (dice.twoEqualTo(1))
Some(AutomaticSuccess(applyMinimumQuality(options, points)))
else if (dice.allEqualTo(20))
Some(SpectacularFailure())
else if (options.wildeMagie && dice.twoGreaterThan(18))
Some(AutomaticFailure())
else if (options.festeMatrix && dice.twoEqualTo(20) && dice.sum > 57)
Some(AutomaticFailure())
else if (!options.festeMatrix && dice.twoEqualTo(20))
Some(AutomaticFailure())
else if (options.spruchhemmung && dice.twoSameValues())
Some(Spruchhemmung())
else None
}
private def successOrFailure(options: Options, attributes: List[Int], points: Int, difficulty: Int, dice: Dice) = {
val (ease, effectivePoints, effectiveAttributes) = diceIndependentPart(attributes, points, difficulty)
successOrFailureInternal(options, points, dice, ease, effectivePoints, effectiveAttributes)
}
private def diceIndependentPart(attributes: List[Int], points: Int, difficulty: Int): (Int, Int, List[Int]) = {
val ease = points - difficulty
val effectivePoints = ease max 0
val effectiveAttributes = if (ease < 0) attributes.map(_ + ease) else attributes
(ease, effectivePoints, effectiveAttributes)
}
private def successOrFailureInternal(options: Options, points: Int, dice: Dice,
ease: Int, effectivePoints: Int, effectiveAttributes: List[Int]) = {
val comparisons = dice.compareWithAttributes(effectiveAttributes)
val usedPoints = comparisons.filter(_ > 0).sum
if (usedPoints > effectivePoints) {
Failure(usedPoints - effectivePoints)
} else {
val leftoverPoints = effectivePoints - usedPoints
val cappedQuality = leftoverPoints min points
val quality = applyMinimumQuality(options, cappedQuality)
if (usedPoints == 0) {
val worstDie = comparisons.reduce(_ max _) // is <= 0 in this case
Success(quality, leftoverPoints - worstDie)
} else {
Success(quality, leftoverPoints)
}
}
}
private def applyMinimumQuality(options: Options, rawQuality: Int): Int = {
if (options.minimumQuality) rawQuality max 1 else rawQuality max 0
}
} | stefanholzmueller/pp2scala | src/main/scala/stefanholzmueller/pp2/check/OutcomeCalculator.scala | Scala | bsd-2-clause | 3,525 |
/**
* Copyright (C) 2010-2011 LShift Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lshift.diffa.kernel.participants
import scala.util.matching.Regex
import scala.collection.JavaConversions._
import net.lshift.diffa.adapter.scanning._
/**
* Category function for partitioning on prefixes of strings.
*
*/
case class StringPrefixCategoryFunction(attrName:String,
prefixLength: Int,
maxLength: Int,
step: Int)
extends StringPrefixAggregation(attrName, prefixLength)
with CategoryFunction {
def name = "prefix(%d,%d,%d)".format(prefixLength, maxLength, step)
def descend =
if (prefixLength == maxLength)
None
else if (prefixLength + step > maxLength)
Some(StringPrefixCategoryFunction(attrName, maxLength, maxLength, step))
else
Some(StringPrefixCategoryFunction(attrName, prefixLength + step, maxLength, step))
def constrain(parent:Option[ScanConstraint], partition: String) =
if (partition.length < prefixLength)
new SetConstraint(attrName, Set(partition))
else if (partition.length > prefixLength)
throw new InvalidAttributeValueException(
"Partition value must be %d characters in length".format(prefixLength))
else
new StringPrefixConstraint(attrName, partition)
val shouldBucket = true
} | lshift/diffa | kernel/src/main/scala/net/lshift/diffa/kernel/participants/StringPrefixCategoryFunction.scala | Scala | apache-2.0 | 1,937 |
package org.web3scala.json
import org.scalatest.{FlatSpec, Matchers}
class JacksonJsonMapperSpec extends FlatSpec with Matchers {
"JacksonJsonMapper" should "serialize AnyRef value into a byte array" in {
val result = new JacksonJsonMapper().writeAsBytes("test")
result.mkString(", ") shouldBe "34, 116, 101, 115, 116, 34"
}
} | web3scala/web3scala | src/test/scala/org/web3scala/json/JacksonJsonMapperSpec.scala | Scala | apache-2.0 | 340 |
package bio
package attribute
/**
* Access functions for the attribute list
*/
trait AttributeAccess {
/**
* @return list of attributes matching message
*/
def attribList(message: Message, attributes: List[Attribute]): List[Attribute] = {
attributes.filter { a =>
a.send(message) match {
case (Ok, _) => true
case _ => false
}
}
}
/**
* @return the values of messages as a list
*/
def attribValues(message: Message, attributes: List[Attribute]): List[Any] = {
attribList(message, attributes) map { a =>
val (Ok, value) = a.send(message)
value
}
}
/**
* @return the first attribute value matching message
*/
def attribFirst(message: Message, attributes: List[Attribute]): Option[Any] = {
val list: List[Attribute] = attribList(message, attributes)
if (list.size > 0) {
val (Ok, msg) = list.head.send(message)
Some(msg)
} else None
}
}
| bioscala/bioscala | src/main/scala/bio/attributes/access.scala | Scala | bsd-2-clause | 961 |
package spark.scheduler.cluster
import java.io.{File, FileInputStream, FileOutputStream}
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet
import spark._
import spark.TaskState.TaskState
import spark.scheduler._
import java.nio.ByteBuffer
import java.util.concurrent.atomic.AtomicLong
/**
* The main TaskScheduler implementation, for running tasks on a cluster. Clients should first call
* start(), then submit task sets through the runTasks method.
*/
private[spark] class ClusterScheduler(val sc: SparkContext)
extends TaskScheduler
with Logging {
// How often to check for speculative tasks
val SPECULATION_INTERVAL = System.getProperty("spark.speculation.interval", "100").toLong
val activeTaskSets = new HashMap[String, TaskSetManager]
var activeTaskSetsQueue = new ArrayBuffer[TaskSetManager]
val taskIdToTaskSetId = new HashMap[Long, String]
val taskIdToSlaveId = new HashMap[Long, String]
val taskSetTaskIds = new HashMap[String, HashSet[Long]]
// Incrementing Mesos task IDs
val nextTaskId = new AtomicLong(0)
// Which hosts in the cluster are alive (contains hostnames)
val hostsAlive = new HashSet[String]
// Which slave IDs we have executors on
val slaveIdsWithExecutors = new HashSet[String]
val slaveIdToHost = new HashMap[String, String]
// JAR server, if any JARs were added by the user to the SparkContext
var jarServer: HttpServer = null
// URIs of JARs to pass to executor
var jarUris: String = ""
// Listener object to pass upcalls into
var listener: TaskSchedulerListener = null
var backend: SchedulerBackend = null
val mapOutputTracker = SparkEnv.get.mapOutputTracker
override def setListener(listener: TaskSchedulerListener) {
this.listener = listener
}
def initialize(context: SchedulerBackend) {
backend = context
}
def newTaskId(): Long = nextTaskId.getAndIncrement()
override def start() {
backend.start()
if (System.getProperty("spark.speculation", "false") == "true") {
new Thread("ClusterScheduler speculation check") {
setDaemon(true)
override def run() {
while (true) {
try {
Thread.sleep(SPECULATION_INTERVAL)
} catch {
case e: InterruptedException => {}
}
checkSpeculatableTasks()
}
}
}.start()
}
}
def submitTasks(taskSet: TaskSet) {
val tasks = taskSet.tasks
logInfo("Adding task set " + taskSet.id + " with " + tasks.length + " tasks")
this.synchronized {
val manager = new TaskSetManager(this, taskSet)
activeTaskSets(taskSet.id) = manager
activeTaskSetsQueue += manager
taskSetTaskIds(taskSet.id) = new HashSet[Long]()
}
backend.reviveOffers()
}
def taskSetFinished(manager: TaskSetManager) {
this.synchronized {
activeTaskSets -= manager.taskSet.id
activeTaskSetsQueue -= manager
taskIdToTaskSetId --= taskSetTaskIds(manager.taskSet.id)
taskIdToSlaveId --= taskSetTaskIds(manager.taskSet.id)
taskSetTaskIds.remove(manager.taskSet.id)
}
}
/**
* Called by cluster manager to offer resources on slaves. We respond by asking our active task
* sets for tasks in order of priority. We fill each node with tasks in a round-robin manner so
* that tasks are balanced across the cluster.
*/
def resourceOffers(offers: Seq[WorkerOffer]): Seq[Seq[TaskDescription]] = {
synchronized {
SparkEnv.set(sc.env)
// Mark each slave as alive and remember its hostname
for (o <- offers) {
slaveIdToHost(o.slaveId) = o.hostname
hostsAlive += o.hostname
}
// Build a list of tasks to assign to each slave
val tasks = offers.map(o => new ArrayBuffer[TaskDescription](o.cores))
val availableCpus = offers.map(o => o.cores).toArray
var launchedTask = false
for (manager <- activeTaskSetsQueue.sortBy(m => (m.taskSet.priority, m.taskSet.stageId))) {
do {
launchedTask = false
for (i <- 0 until offers.size) {
val sid = offers(i).slaveId
val host = offers(i).hostname
manager.slaveOffer(sid, host, availableCpus(i)) match {
case Some(task) =>
tasks(i) += task
val tid = task.taskId
taskIdToTaskSetId(tid) = manager.taskSet.id
taskSetTaskIds(manager.taskSet.id) += tid
taskIdToSlaveId(tid) = sid
slaveIdsWithExecutors += sid
availableCpus(i) -= 1
launchedTask = true
case None => {}
}
}
} while (launchedTask)
}
return tasks
}
}
def statusUpdate(tid: Long, state: TaskState, serializedData: ByteBuffer) {
var taskSetToUpdate: Option[TaskSetManager] = None
var failedHost: Option[String] = None
var taskFailed = false
synchronized {
try {
if (state == TaskState.LOST && taskIdToSlaveId.contains(tid)) {
// We lost the executor on this slave, so remember that it's gone
val slaveId = taskIdToSlaveId(tid)
val host = slaveIdToHost(slaveId)
if (hostsAlive.contains(host)) {
slaveIdsWithExecutors -= slaveId
hostsAlive -= host
activeTaskSetsQueue.foreach(_.hostLost(host))
failedHost = Some(host)
}
}
taskIdToTaskSetId.get(tid) match {
case Some(taskSetId) =>
if (activeTaskSets.contains(taskSetId)) {
//activeTaskSets(taskSetId).statusUpdate(status)
taskSetToUpdate = Some(activeTaskSets(taskSetId))
}
if (TaskState.isFinished(state)) {
taskIdToTaskSetId.remove(tid)
if (taskSetTaskIds.contains(taskSetId)) {
taskSetTaskIds(taskSetId) -= tid
}
taskIdToSlaveId.remove(tid)
}
if (state == TaskState.FAILED) {
taskFailed = true
}
case None =>
logInfo("Ignoring update from TID " + tid + " because its task set is gone")
}
} catch {
case e: Exception => logError("Exception in statusUpdate", e)
}
}
// Update the task set and DAGScheduler without holding a lock on this, because that can deadlock
if (taskSetToUpdate != None) {
taskSetToUpdate.get.statusUpdate(tid, state, serializedData)
}
if (failedHost != None) {
listener.hostLost(failedHost.get)
backend.reviveOffers()
}
if (taskFailed) {
// Also revive offers if a task had failed for some reason other than host lost
backend.reviveOffers()
}
}
def error(message: String) {
synchronized {
if (activeTaskSets.size > 0) {
// Have each task set throw a SparkException with the error
for ((taskSetId, manager) <- activeTaskSets) {
try {
manager.error(message)
} catch {
case e: Exception => logError("Exception in error callback", e)
}
}
} else {
// No task sets are active but we still got an error. Just exit since this
// must mean the error is during registration.
// It might be good to do something smarter here in the future.
logError("Exiting due to error from cluster scheduler: " + message)
System.exit(1)
}
}
}
override def stop() {
if (backend != null) {
backend.stop()
}
if (jarServer != null) {
jarServer.stop()
}
}
override def defaultParallelism() = backend.defaultParallelism()
// Check for speculatable tasks in all our active jobs.
def checkSpeculatableTasks() {
var shouldRevive = false
synchronized {
for (ts <- activeTaskSetsQueue) {
shouldRevive |= ts.checkSpeculatableTasks()
}
}
if (shouldRevive) {
backend.reviveOffers()
}
}
def slaveLost(slaveId: String) {
var failedHost: Option[String] = None
synchronized {
val host = slaveIdToHost(slaveId)
if (hostsAlive.contains(host)) {
slaveIdsWithExecutors -= slaveId
hostsAlive -= host
activeTaskSetsQueue.foreach(_.hostLost(host))
failedHost = Some(host)
}
}
if (failedHost != None) {
listener.hostLost(failedHost.get)
backend.reviveOffers()
}
}
}
| joeywen/spark_cpp_api | core/src/main/scala/spark/scheduler/cluster/ClusterScheduler.scala | Scala | bsd-3-clause | 8,522 |
package io.eels
import java.nio.file.Files
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.scalatest.{Matchers, WordSpec}
class FilePatternTest extends WordSpec with Matchers {
implicit val fs = FileSystem.get(new Configuration())
"FilePattern" should {
"detect single hdfs path without name server" ignore {
FilePattern("hdfs:///mypath").toPaths() shouldBe List(new Path("hdfs:///mypath"))
}
"detect single hdfs path with name server" ignore {
FilePattern("hdfs://nameserver/mypath").toPaths() shouldBe List(new Path("hdfs://nameserver/mypath"))
}
"detect absolute local file" in {
FilePattern("file:///absolute/file").toPaths() shouldBe List(new Path("file:///absolute/file"))
}
"detect relative local file" in {
FilePattern("file:///local/file").toPaths() shouldBe List(new Path("file:///local/file"))
}
"detect relative local file expansion" in {
val dir = Files.createTempDirectory("filepatterntest")
val files = List("a", "b", "c").map { it =>
dir.resolve(it)
}
val hdfsPaths = files.map { it =>
new Path(it.toUri)
}
files.foreach(file => Files.createFile(file))
FilePattern(dir.toUri.toString() + "/*").toPaths().toSet shouldBe hdfsPaths.toSet
files.foreach(Files.deleteIfExists)
Files.deleteIfExists(dir)
}
//not working on windows
"detect relative local file expansion with schema" in {
val dir = Files.createTempDirectory("filepatterntest")
val files = List("a", "b", "c").map { it =>
dir.resolve(it)
}
val hdfsPaths = files.map { it =>
new Path(it.toUri)
}
files.foreach(file => Files.createFile(file))
FilePattern(dir.toUri.toString() + "/*").toPaths().toSet shouldBe hdfsPaths.toSet
files.foreach(Files.deleteIfExists)
Files.deleteIfExists(dir)
}
"use filter if supplied" in {
val dir = Files.createTempDirectory("filepatterntest")
val files = List("a", "b", "c").map { it => dir.resolve(it) }
files.foreach { it => Files.createFile(it) }
val a = FilePattern(dir.toAbsolutePath().toString() + "/*")
.withFilter(_.toString().endsWith("a"))
.toPaths.toSet
a shouldBe Set(new Path("file:///" + dir.resolve("a")))
files.foreach { it => Files.deleteIfExists(it) }
Files.deleteIfExists(dir)
}
}
} | sksamuel/eel-sdk | eel-core/src/test/scala/io/eels/FilePatternTest.scala | Scala | apache-2.0 | 2,452 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.BindReferences.bindReferences
import org.apache.spark.sql.catalyst.expressions.codegen.GenerateOrdering
import org.apache.spark.sql.types._
/**
* A base class for generated/interpreted row ordering.
*/
class BaseOrdering extends Ordering[InternalRow] {
def compare(a: InternalRow, b: InternalRow): Int = {
throw new UnsupportedOperationException
}
}
/**
* An interpreted row ordering comparator.
*/
class InterpretedOrdering(ordering: Seq[SortOrder]) extends BaseOrdering {
def this(ordering: Seq[SortOrder], inputSchema: Seq[Attribute]) =
this(bindReferences(ordering, inputSchema))
override def compare(a: InternalRow, b: InternalRow): Int = {
var i = 0
val size = ordering.size
while (i < size) {
val order = ordering(i)
val left = order.child.eval(a)
val right = order.child.eval(b)
if (left == null && right == null) {
// Both null, continue looking.
} else if (left == null) {
return if (order.nullOrdering == NullsFirst) -1 else 1
} else if (right == null) {
return if (order.nullOrdering == NullsFirst) 1 else -1
} else {
val comparison = order.dataType match {
case dt: AtomicType if order.direction == Ascending =>
dt.ordering.asInstanceOf[Ordering[Any]].compare(left, right)
case dt: AtomicType if order.direction == Descending =>
dt.ordering.asInstanceOf[Ordering[Any]].reverse.compare(left, right)
case a: ArrayType if order.direction == Ascending =>
a.interpretedOrdering.asInstanceOf[Ordering[Any]].compare(left, right)
case a: ArrayType if order.direction == Descending =>
a.interpretedOrdering.asInstanceOf[Ordering[Any]].reverse.compare(left, right)
case s: StructType if order.direction == Ascending =>
s.interpretedOrdering.asInstanceOf[Ordering[Any]].compare(left, right)
case s: StructType if order.direction == Descending =>
s.interpretedOrdering.asInstanceOf[Ordering[Any]].reverse.compare(left, right)
case other =>
throw new IllegalArgumentException(s"Type $other does not support ordered operations")
}
if (comparison != 0) {
return comparison
}
}
i += 1
}
0
}
}
object InterpretedOrdering {
/**
* Creates a [[InterpretedOrdering]] for the given schema, in natural ascending order.
*/
def forSchema(dataTypes: Seq[DataType]): InterpretedOrdering = {
new InterpretedOrdering(dataTypes.zipWithIndex.map {
case (dt, index) => SortOrder(BoundReference(index, dt, nullable = true), Ascending)
})
}
}
object RowOrdering extends CodeGeneratorWithInterpretedFallback[Seq[SortOrder], BaseOrdering] {
/**
* Returns true iff the data type can be ordered (i.e. can be sorted).
*/
def isOrderable(dataType: DataType): Boolean = dataType match {
case NullType => true
case dt: AtomicType => true
case CalendarIntervalType => true
case struct: StructType => struct.fields.forall(f => isOrderable(f.dataType))
case array: ArrayType => isOrderable(array.elementType)
case udt: UserDefinedType[_] => isOrderable(udt.sqlType)
case _ => false
}
/**
* Returns true iff outputs from the expressions can be ordered.
*/
def isOrderable(exprs: Seq[Expression]): Boolean = exprs.forall(e => isOrderable(e.dataType))
override protected def createCodeGeneratedObject(in: Seq[SortOrder]): BaseOrdering = {
GenerateOrdering.generate(in)
}
override protected def createInterpretedObject(in: Seq[SortOrder]): BaseOrdering = {
new InterpretedOrdering(in)
}
def create(order: Seq[SortOrder], inputSchema: Seq[Attribute]): BaseOrdering = {
createObject(bindReferences(order, inputSchema))
}
/**
* Creates a row ordering for the given schema, in natural ascending order.
*/
def createNaturalAscendingOrdering(dataTypes: Seq[DataType]): BaseOrdering = {
val order: Seq[SortOrder] = dataTypes.zipWithIndex.map {
case (dt, index) => SortOrder(BoundReference(index, dt, nullable = true), Ascending)
}
create(order, Seq.empty)
}
}
| jkbradley/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ordering.scala | Scala | apache-2.0 | 5,129 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.