code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package quizleague.web.site.team import scalajs.js import quizleague.web.core.RouteComponent import quizleague.web.core.GridSizeComponentConfig import quizleague.web.site.SideMenu import quizleague.web.site.login.LoginService object TeamsComponent extends RouteComponent with GridSizeComponentConfig { val template=""" <v-container v-bind="gridSize" fluid> <v-layout> <v-flex><ql-text-box><ql-named-text name="teams-header"></ql-named-text></ql-text-box></v-flex> </v-layout> </v-container>""" override val mounted = ({(c:facade) => { //super.mounted.call(c) LoginService.userProfile.filter(_ != null).subscribe(u => c.$router.push(s"/team/${u.team.id}")) }}:js.ThisFunction) } object TeamsTitleComponent extends RouteComponent{ val template=""" <v-toolbar color="amber lighten-3" dense class="subtitle-background" > <ql-title>Teams</ql-title> <v-toolbar-title> Teams </v-toolbar-title> </v-toolbar>""" } object StartTeamPage extends RouteComponent with GridSizeComponentConfig{ val template=""" <v-container v-bind="gridSize" fluid> <v-layout> <v-flex><ql-text-box><ql-named-text name="start-team"></ql-named-text></ql-text-box></v-flex> </v-layout> </v-container>""" } object StartTeamTitleComponent extends RouteComponent{ val template=""" <v-toolbar color="amber lighten-3" dense class="subtitle-background" > <ql-title>Starting a Team</ql-title> <v-toolbar-title> Starting a Team </v-toolbar-title> </v-toolbar>""" }
gumdrop/quizleague-maintain
js/src/main/scala/quizleague/web/site/team/TeamsComponent.scala
Scala
mit
1,677
package quisp.enums /** * @author rodneykinney */ object VAlign { val TOP = top val MIDDLE = middle val BOTTOM = bottom case object top extends VAlign case object middle extends VAlign case object bottom extends VAlign } sealed trait VAlign extends EnumTrait
rodneykinney/quisp
src/main/scala/quisp/enums/VAlign.scala
Scala
apache-2.0
278
package org.jetbrains.plugins.scala package lang package parameterInfo package patternParameterInfo import com.intellij.lang.parameterInfo.ParameterInfoHandlerWithTabActionSupport import com.intellij.psi.PsiElement import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScPatternArgumentList /** * @author Aleksander Podkhalyuzin * @since 25.04.2009 */ abstract class PatternParameterInfoTestBase extends ParameterInfoTestBase[ScPatternArgumentList] { override def getTestDataPath: String = s"${super.getTestDataPath}patternParameterInfo/" override protected def createHandler: ParameterInfoHandlerWithTabActionSupport[ScPatternArgumentList, Any, _ <: PsiElement] = new ScalaPatternParameterInfoHandler }
ilinum/intellij-scala
test/org/jetbrains/plugins/scala/lang/parameterInfo/patternParameterInfo/PatternParameterInfoTestBase.scala
Scala
apache-2.0
733
/* * Copyright 2013 http4s.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.http4s package blaze import cats.effect._ object BlazeHttp2Example extends IOApp { override def run(args: List[String]): IO[ExitCode] = BlazeSslExampleApp.builder[IO].flatMap(_.enableHttp2(true).serve.compile.lastOrError) }
rossabaker/http4s
examples/blaze/src/main/scala/com/example/http4s/blaze/BlazeHttp2Example.scala
Scala
apache-2.0
847
package com.github.mrpowers.spark.daria.sql import utest._ import com.github.mrpowers.spark.fast.tests.DataFrameComparer import com.github.mrpowers.spark.daria.sql.SparkSessionExt._ import org.apache.spark.sql.types.{IntegerType, StringType} object EtlDefinitionTest extends TestSuite with SparkSessionTestWrapper with DataFrameComparer { val tests = Tests { 'new - { "creates a new EtlDefinition object with a metadata property" - { val sourceDF = spark.createDF( List( ("bob", 14), ("liz", 20) ), List( ("name", StringType, true), ("age", IntegerType, true) ) ) val etlDefinition = new EtlDefinition( sourceDF = sourceDF, transform = EtlHelpers.someTransform(), write = EtlHelpers.someWriter(), metadata = scala.collection.mutable.Map("hidden" -> true) ) assert(etlDefinition.metadata("hidden") == true) } "allows objects to be created without setting metadata" - { val sourceDF = spark.createDF( List( ("bob", 14), ("liz", 20) ), List( ("name", StringType, true), ("age", IntegerType, true) ) ) val etlDefinition = new EtlDefinition( sourceDF = sourceDF, transform = EtlHelpers.someTransform(), write = EtlHelpers.someWriter() ) } } 'process - { "runs a full ETL process and writes out data to a folder" - { val sourceDF = spark.createDF( List( ("bob", 14), ("liz", 20) ), List( ("name", StringType, true), ("age", IntegerType, true) ) ) val etlDefinition = new EtlDefinition( sourceDF = sourceDF, transform = EtlHelpers.someTransform(), write = EtlHelpers.someWriter() ) etlDefinition.process() } } 'etlCollection - { "can run etls that are organized in a map" - { val sourceDF = spark.createDF( List( ("bob", 14), ("liz", 20) ), List( ("name", StringType, true), ("age", IntegerType, true) ) ) val etlDefinition = new EtlDefinition( sourceDF = sourceDF, transform = EtlHelpers.someTransform(), write = EtlHelpers.someWriter() ) val etls = scala.collection.mutable .Map[String, EtlDefinition]("example" -> etlDefinition) etls += ("ex2" -> etlDefinition) etls("example").process() } } } }
MrPowers/spark-daria
src/test/scala/com/github/mrpowers/spark/daria/sql/EtlDefinitionTest.scala
Scala
mit
2,781
package cn.changhong.zipkin import java.net.InetSocketAddress import java.util.Random import cn.changhong.core.{NewsModel, IndexNewsOperatorServices} import com.twitter.finagle.builder.ServerBuilder import com.twitter.finagle.stats.DefaultStatsReceiver import com.twitter.finagle.thrift.ThriftServerFramedCodec import com.twitter.finagle.zipkin.thrift.ZipkinTracer import com.twitter.util.Future import org.apache.thrift.protocol.TBinaryProtocol /** * Created by yangguo on 15-1-8. */ object StartEndPointThriftServer { object IndexNewsOperationImp extends IndexNewsOperatorServices.FutureIface{ override def indexNews(indexNews: NewsModel): Future[Boolean] = Future.value{ true } override def deleteArtificaillyNes(id: Int): Future[Int] = Future.value{ new Random().nextInt() } } def main(args:Array[String]): Unit ={ require(args!=null && args.length>4) val service=new IndexNewsOperatorServices.FinagledService(IndexNewsOperationImp,new TBinaryProtocol.Factory()) val tracer=ZipkinTracer.mk(args(3),args(4).toInt,DefaultStatsReceiver,1) ServerBuilder() .codec(ThriftServerFramedCodec()) .bindTo(new InetSocketAddress(args(0),args(1).toInt)) .name(args(2)) .tracer(tracer) .build(service) } }
guoyang2011/myfinagle
ThriftDemo/src/main/scala/cn/changhong/zipkin/StartEndPointThriftServer.scala
Scala
apache-2.0
1,283
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.api.stream.table.stringexpr import org.apache.flink.api.scala._ import org.apache.flink.table.api.scala._ import org.apache.flink.table.expressions.utils.Func0 import org.apache.flink.table.utils.{TableTestBase, Top3WithMapView} import org.junit.Test class TableAggregateStringExpressionTest extends TableTestBase { @Test def testNonGroupedTableAggregate(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]('a, 'b, 'c) val top3 = new Top3WithMapView util.tableEnv.registerFunction("top3", top3) util.tableEnv.registerFunction("Func0", Func0) // Expression / Scala API val resScala = t .flatAggregate(top3('a)) .select(Func0('f0) as 'a, 'f1 as 'b) // String / Java API val resJava = t .flatAggregate("top3(a)") .select("Func0(f0) as a, f1 as b") verifyTableEquals(resJava, resScala) } @Test def testGroupedTableAggregate(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]('a, 'b, 'c) val top3 = new Top3WithMapView util.tableEnv.registerFunction("top3", top3) util.tableEnv.registerFunction("Func0", Func0) // Expression / Scala API val resScala = t .groupBy('b % 5) .flatAggregate(top3('a)) .select(Func0('f0) as 'a, 'f1 as 'b) // String / Java API val resJava = t .groupBy("b % 5") .flatAggregate("top3(a)") .select("Func0(f0) as a, f1 as b") verifyTableEquals(resJava, resScala) } @Test def testAliasNonGroupedTableAggregate(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]('a, 'b, 'c) val top3 = new Top3WithMapView util.tableEnv.registerFunction("top3", top3) util.tableEnv.registerFunction("Func0", Func0) // Expression / Scala API val resScala = t .flatAggregate(top3('a) as ('d, 'e)) .select('*) // String / Java API val resJava = t .flatAggregate("top3(a) as (d, e)") .select("*") verifyTableEquals(resJava, resScala) } @Test def testAliasGroupedTableAggregate(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]('a, 'b, 'c) val top3 = new Top3WithMapView util.tableEnv.registerFunction("top3", top3) util.tableEnv.registerFunction("Func0", Func0) // Expression / Scala API val resScala = t .groupBy('b) .flatAggregate(top3('a) as ('d, 'e)) .select('*) // String / Java API val resJava = t .groupBy("b") .flatAggregate("top3(a) as (d, e)") .select("*") verifyTableEquals(resJava, resScala) } }
fhueske/flink
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/stringexpr/TableAggregateStringExpressionTest.scala
Scala
apache-2.0
3,494
package chapter3 sealed trait Tree[+A] case class Leaf[A](value: A) extends Tree[A] case class Empty[A]() extends Tree[A] case class Branch[A](left: Tree[A], right: Tree[A]) extends Tree[A] object Tree { // Add helpers for tree construction }
amolnayak311/functional-programming-in-scala
src/chapter3/Tree.scala
Scala
unlicense
258
/*********************************************************************** * Copyright (c) 2013-2018 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.hbase.tools.data import com.beust.jcommander.Parameters import org.locationtech.geomesa.hbase.data.HBaseDataStore import org.locationtech.geomesa.hbase.tools.HBaseDataStoreCommand import org.locationtech.geomesa.hbase.tools.HBaseDataStoreCommand.{HBaseParams, RemoteFilterNotUsedParam} import org.locationtech.geomesa.hbase.tools.data.HBaseDeleteCatalogCommand.HBaseDeleteCatalogParams import org.locationtech.geomesa.tools.data.{DeleteCatalogCommand, DeleteCatalogParams} class HBaseDeleteCatalogCommand extends DeleteCatalogCommand[HBaseDataStore] with HBaseDataStoreCommand { override val params = new HBaseDeleteCatalogParams } object HBaseDeleteCatalogCommand { @Parameters(commandDescription = "Delete a GeoMesa catalog completely (and all features in it)") class HBaseDeleteCatalogParams extends DeleteCatalogParams with HBaseParams with RemoteFilterNotUsedParam }
ddseapy/geomesa
geomesa-hbase/geomesa-hbase-tools/src/main/scala/org/locationtech/geomesa/hbase/tools/data/HBaseDeleteCatalogCommand.scala
Scala
apache-2.0
1,379
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ai.h2o.sparkling.ml.params import org.apache.spark.ml.param.{Param, ParamPair, Params} import org.json4s.JsonAST.JArray import org.json4s.jackson.JsonMethods.{compact, parse, render} import org.json4s.{JDouble, JNull, JString} import scala.collection.JavaConverters._ class NullableFloatArrayParam(parent: Params, name: String, doc: String, isValid: Array[Float] => Boolean) extends Param[Array[Float]](parent, name, doc, isValid) { def this(parent: Params, name: String, doc: String) = this(parent, name, doc, _ => true) /** Creates a param pair with a `java.util.List` of values (for Java and Python). */ def w(value: java.util.List[java.lang.Double]): ParamPair[Array[Float]] = w(value.asScala.map(_.doubleValue().toFloat).toArray) override def jsonEncode(value: Array[Float]): String = { if (value == null) { compact(render(JNull)) } else { import org.json4s.JsonDSL._ compact(render(value.toSeq.map { case v if v.isNaN => JString("NaN") case Float.NegativeInfinity => JString("-Inf") case Float.PositiveInfinity => JString("Inf") case v => JDouble(v) })) } } override def jsonDecode(json: String): Array[Float] = { parse(json) match { case JNull => null case JArray(values) => values.map { case JString("NaN") => Float.NaN case JString("-Inf") => Float.NegativeInfinity case JString("Inf") => Float.PositiveInfinity case JDouble(x) => x.toFloat case jValue => throw new IllegalArgumentException(s"Cannot decode $jValue to Float.") }.toArray case _ => throw new IllegalArgumentException(s"Cannot decode $json to Array[Float].") } } }
h2oai/sparkling-water
scoring/src/main/scala/ai/h2o/sparkling/ml/params/NullableFloatArrayParam.scala
Scala
apache-2.0
2,654
package glint.models.client.async import akka.actor.ActorRef import breeze.linalg.{DenseVector, Vector} import com.typesafe.config.Config import glint.messages.server.request.PushMatrixLong import glint.messages.server.response.ResponseLong import glint.partitioning.Partitioner import spire.implicits.cfor /** * Asynchronous implementation of a BigMatrix for longs */ private[glint] class AsyncBigMatrixLong(partitioner: Partitioner, matrices: Array[ActorRef], config: Config, rows: Long, cols: Int) extends AsyncBigMatrix[Long, ResponseLong, PushMatrixLong](partitioner, matrices, config, rows, cols) { /** * Converts the values in given response starting at index start to index end to a vector * * @param response The response containing the values * @param start The start index * @param end The end index * @return A vector for the range [start, end) */ @inline override protected def toVector(response: ResponseLong, start: Int, end: Int): Vector[Long] = { val result = DenseVector.zeros[Long](end - start) cfor(0)(_ < end - start, _ + 1)(i => { result(i) = response.values(start + i) }) result } /** * Creates a push message from given sequence of rows, columns and values * * @param id The identifier * @param rows The rows * @param cols The columns * @param values The values * @return A PushMatrix message for type V */ @inline override protected def toPushMessage(id: Int, rows: Array[Long], cols: Array[Int], values: Array[Long]): PushMatrixLong = { PushMatrixLong(id, rows, cols, values) } /** * Extracts a value from a given response at given index * * @param response The response * @param index The index * @return The value */ @inline override protected def toValue(response: ResponseLong, index: Int): Long = response.values(index) }
rjagerman/glint
src/main/scala/glint/models/client/async/AsyncBigMatrixLong.scala
Scala
mit
2,057
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package repositories.schemepreferences import javax.inject.{ Inject, Singleton } import model.Exceptions.{ CannotUpdateSchemePreferences, SchemePreferencesNotFound } import model.{ SchemeId, SelectedSchemes } import play.modules.reactivemongo.ReactiveMongoComponent import reactivemongo.bson.{ BSONDocument, BSONObjectID, _ } import reactivemongo.play.json.ImplicitBSONHandlers._ import repositories.{ CollectionNames, ReactiveRepositoryHelpers } import uk.gov.hmrc.mongo.ReactiveRepository import uk.gov.hmrc.mongo.json.ReactiveMongoFormats import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future trait SchemePreferencesRepository { def find(applicationId: String): Future[SelectedSchemes] def save(applicationId: String, schemePreferences: SelectedSchemes): Future[Unit] def add(applicationId: String, newScheme: SchemeId): Future[Unit] } @Singleton class SchemePreferencesMongoRepository @Inject() (mongoComponent: ReactiveMongoComponent) extends ReactiveRepository[SelectedSchemes, BSONObjectID]( CollectionNames.APPLICATION, mongoComponent.mongoConnector.db, SelectedSchemes.selectedSchemesFormat, ReactiveMongoFormats.objectIdFormats ) with SchemePreferencesRepository with ReactiveRepositoryHelpers { private val SchemePreferencesDocumentKey = "scheme-preferences" def find(applicationId: String): Future[SelectedSchemes] = { val query = BSONDocument("applicationId" -> applicationId) val projection = BSONDocument(SchemePreferencesDocumentKey -> 1, "_id" -> 0) collection.find(query, Some(projection)).one[BSONDocument] map { case Some(document) if document.getAs[BSONDocument](SchemePreferencesDocumentKey).isDefined => document.getAs[SelectedSchemes](SchemePreferencesDocumentKey).get case _ => throw SchemePreferencesNotFound(applicationId) } } def save(applicationId: String, schemePreference: SelectedSchemes): Future[Unit] = { val query = BSONDocument("applicationId" -> applicationId) val preferencesBSON = BSONDocument("$set" -> BSONDocument( SchemePreferencesDocumentKey -> schemePreference, "progress-status." + SchemePreferencesDocumentKey -> true )) val validator = singleUpdateValidator(applicationId, actionDesc = "saving scheme preferences", CannotUpdateSchemePreferences(applicationId)) collection.update(ordered = false).one(query, preferencesBSON) map validator } def add(applicationId: String, newScheme: SchemeId): Future[Unit] = { val query = BSONDocument("applicationId" -> applicationId) val update = BSONDocument( "$addToSet" -> BSONDocument( s"scheme-preferences.schemes" -> newScheme ) ) val validator = singleUpdateValidator(applicationId, actionDesc = s"inserting sdip scheme") collection.update(ordered = false).one(query, update) map validator } }
hmrc/fset-faststream
app/repositories/schemepreferences/SchemePreferencesRepository.scala
Scala
apache-2.0
3,493
/* * Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com> */ package detailedtopics.configuration.securityheaders object SecurityHeaders { //#filters import javax.inject.Inject import play.api.http.DefaultHttpFilters import play.filters.headers.SecurityHeadersFilter import play.api.mvc.Action //#filters class Filters @Inject() (securityHeadersFilter: SecurityHeadersFilter) extends DefaultHttpFilters(securityHeadersFilter) import play.api.mvc.Results.Ok def index = Action { //#allowActionSpecificHeaders Ok("Index").withHeaders(SecurityHeadersFilter.CONTENT_SECURITY_POLICY_HEADER -> "my page-specific header") //#allowActionSpecificHeaders } }
wsargent/playframework
documentation/manual/working/commonGuide/filters/code/SecurityHeaders.scala
Scala
apache-2.0
699
package com.naughtyzombie.sparkle.recipesearch.model /** * Created by pram on 08/11/2015. */ case class Recipe(id: String, name: String, source: String, recipeYield: String, ingredients: List[String], prepTime: String, cookTime: String, datePublished: String, description: String )
pram/sparkle
src/main/scala/com/naughtyzombie/sparkle/recipesearch/model/Recipe.scala
Scala
mit
448
package formation.warp10 import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.stream.ActorMaterializer import formation.warp10.data.{MagasinStorage, ProductStorage} import formation.warp10.mock.MockApi import formation.warp10.warp10.{MagasinWarp10, ProductWarp10} import kneelnrise.warp10scala.model.Warp10Configuration object Boot extends App { val configuration = new Configuration() implicit val actorSystem = ActorSystem() implicit val actorMaterializer = ActorMaterializer() implicit val warp10Configuration = Warp10Configuration("", configuration.readToken, configuration.writeToken, Warp10Configuration.ApiVersion.ZERO) val warp10Api = new Warp10Api(configuration) val router = new Router(new MockApi) val magasinStorage = new MagasinStorage(configuration.magasinStorageDirectory) val productStorage = new ProductStorage(configuration.productsStorageDirectory) val magasinWarp10 = new MagasinWarp10(warp10Api, configuration) val productWarp10 = new ProductWarp10(magasinStorage, warp10Api, configuration) loadIntoWarp10() val bindingFuture = Http().bindAndHandle(router.route, "localhost", 9000) def loadIntoWarp10(): Unit = { magasinWarp10.registerAll(magasinStorage.findAll()) productWarp10.registerAll(productStorage.findAll()) } }
kneelnrise/formation-warp10
src/main/scala/formation/warp10/Boot.scala
Scala
mit
1,306
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.bwsw.sj.common.dal.model.instance import java.util import java.util.Date import com.bwsw.sj.common.dal.model.service.ZKServiceDomain import com.bwsw.sj.common.dal.morphia.MorphiaAnnotations.PropertyField import com.bwsw.sj.common.utils.StreamUtils._ import com.bwsw.sj.common.utils.{EngineLiterals, RestLiterals} import org.mongodb.morphia.annotations._ /** * Domain entity for [[com.bwsw.sj.common.utils.EngineLiterals.regularStreamingType]] instance * * @author Kseniya Tomskikh */ class RegularInstanceDomain(override val name: String, override val moduleType: String, override val moduleName: String, override val moduleVersion: String, override val engine: String, override val coordinationService: ZKServiceDomain, override val status: String = EngineLiterals.ready, override val restAddress: String = "", override val description: String = RestLiterals.defaultDescription, override val parallelism: Int = 1, override val options: String = "{}", override val perTaskCores: Double = 0.1, override val perTaskRam: Int = 32, override val jvmOptions: java.util.Map[String, String] = new util.HashMap[String, String](), override val nodeAttributes: java.util.Map[String, String] = new util.HashMap[String, String](), override val environmentVariables: java.util.Map[String, String] = new util.HashMap[String, String](), override val stage: FrameworkStage = FrameworkStage(), override val performanceReportingInterval: Long = 60000, override val frameworkId: String = System.currentTimeMillis().toString, var inputs: Array[String] = Array(), override val outputs: Array[String] = Array(), @PropertyField("checkpoint-mode") val checkpointMode: String, @Property("checkpoint-interval") var checkpointInterval: Long = 0, @Embedded("execution-plan") var executionPlan: ExecutionPlan = new ExecutionPlan(), @Property("start-from") var startFrom: String = EngineLiterals.newestStartMode, @Property("state-management") var stateManagement: String = EngineLiterals.noneStateMode, @Property("state-full-checkpoint") var stateFullCheckpoint: Int = 100, @Property("event-wait-idle-time") var eventWaitIdleTime: Long = 1000, creationDate: Date) extends InstanceDomain( name, moduleType, moduleName, moduleVersion, engine, coordinationService, status, restAddress, description, outputs, parallelism, options, perTaskCores, perTaskRam, jvmOptions, nodeAttributes, environmentVariables, stage, performanceReportingInterval, frameworkId, creationDate) { override def getInputsWithoutStreamMode: Array[String] = inputs.map(clearStreamFromMode) }
bwsw/sj-platform
core/sj-common/src/main/scala/com/bwsw/sj/common/dal/model/instance/RegularInstanceDomain.scala
Scala
apache-2.0
4,233
package gv package isi package std.io import java.nio.{ ByteBuffer } import isi.io.{ ByteSink } trait ByteSinkDecorationOps[T] extends Any { def self: T @inline final def write(from: ByteBuffer)(implicit sink: ByteSink[T]): Int = sink writeFrom (self, from) }
mouchtaris/jleon
src/main/scala-2.12/gv/isi/std/io/ByteSinkDecorationOps.scala
Scala
mit
276
package org.jetbrains.plugins.scala package refactoring.changeSignature import java.io.File import com.intellij.openapi.util.io.FileUtilRt import com.intellij.openapi.vfs.CharsetToolkit import com.intellij.psi._ import com.intellij.psi.impl.source.PostprocessReformattingAspect import com.intellij.refactoring.changeSignature._ import org.jetbrains.plugins.scala.base.ScalaLightPlatformCodeInsightTestCaseAdapter import org.jetbrains.plugins.scala.lang.formatting.settings.ScalaCodeStyleSettings import org.jetbrains.plugins.scala.lang.psi.api.base.ScMethodLike import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.createTypeFromText import org.jetbrains.plugins.scala.lang.psi.types.api._ import org.jetbrains.plugins.scala.lang.refactoring.changeSignature.changeInfo.ScalaChangeInfo import org.jetbrains.plugins.scala.lang.refactoring.changeSignature.{ScalaChangeSignatureProcessor, ScalaParameterInfo} import org.jetbrains.plugins.scala.project.ProjectContext import org.jetbrains.plugins.scala.settings.annotations._ import org.jetbrains.plugins.scala.util._ import org.junit.Assert._ /** * Nikolay.Tropin * 2014-08-14 */ abstract class ChangeSignatureTestBase extends ScalaLightPlatformCodeInsightTestCaseAdapter { var targetMethod: PsiMember = null protected var isAddDefaultValue = false implicit def projectContext: ProjectContext = getProjectAdapter override def getTestDataPath = folderPath def folderPath: String def mainFileName(testName: String): String def mainFileAfterName(testName: String): String def secondFileName(testName: String): String def secondFileAfterName(testName: String): String def processor(newVisibility: String, newName: String, newReturnType: String, newParams: => Seq[Seq[ParameterInfo]]): ChangeSignatureProcessorBase def findTargetElement: PsiMember protected def doTest(newVisibility: String, newName: String, newReturnType: String, newParams: => Seq[Seq[ParameterInfo]], settings: ScalaCodeStyleSettings = TypeAnnotationSettings.alwaysAddType(ScalaCodeStyleSettings.getInstance(getProjectAdapter))) { val testName = getTestName(false) val oldSettings = ScalaCodeStyleSettings.getInstance(getProjectAdapter).clone() TypeAnnotationSettings.set(getProjectAdapter, settings) val secondName = secondFileName(testName) val checkSecond = secondName != null val secondFile = if (checkSecond) { val secondFileText = getTextFromTestData(secondName) addFileToProject(secondName, secondFileText) } else null val fileName = mainFileName(testName) configureByFile(fileName) targetMethod = findTargetElement processor(newVisibility, newName, newReturnType, newParams).run() PostprocessReformattingAspect.getInstance(getProjectAdapter).doPostponedFormatting() val mainAfterText = getTextFromTestData(mainFileAfterName(testName)) TypeAnnotationSettings.set(getProjectAdapter, oldSettings.asInstanceOf[ScalaCodeStyleSettings]) assertEquals(mainAfterText, getFileAdapter.getText) if (checkSecond) { val secondAfterText = getTextFromTestData(secondFileAfterName(testName)) assertEquals(secondAfterText, secondFile.getText) } } protected def addFileToProject(fileName: String, text: String): PsiFile = PsiFileTestUtil.addFileToProject(fileName, text, getProjectAdapter) protected def getTextFromTestData(fileName: String) = { val file = new File(getTestDataPath + fileName) FileUtilRt.loadFile(file, CharsetToolkit.UTF8, true) } protected def getPsiTypeFromText(typeText: String, context: PsiElement): PsiType = { val factory: JavaCodeFragmentFactory = JavaCodeFragmentFactory.getInstance(getProjectAdapter) factory.createTypeCodeFragment(typeText, context, false).getType } protected def javaProcessor(newVisibility: String, newName: String, newReturnType: String, newParams: => Seq[Seq[ParameterInfo]]): ChangeSignatureProcessorBase = { val psiMethod = targetMethod.asInstanceOf[PsiMethod] val retType = if (newReturnType != null) getPsiTypeFromText(newReturnType, psiMethod) else psiMethod.getReturnType val params = newParams.flatten.map(_.asInstanceOf[ParameterInfoImpl]).toArray new ChangeSignatureProcessor(getProjectAdapter, psiMethod, /*generateDelegate = */ false, newVisibility, newName, retType, params, Array.empty) } protected def scalaProcessor(newVisibility: String, newName: String, newReturnType: String, newParams: => Seq[Seq[ParameterInfo]], isAddDefaultValue: Boolean): ChangeSignatureProcessorBase = { val maybeReturnType = targetMethod match { case fun: ScFunction => Option(newReturnType).flatMap { createTypeFromText(_, fun, fun) }.orElse { fun.returnType.toOption } case _ => None } val params = newParams.map(_.map(_.asInstanceOf[ScalaParameterInfo])) // TODO Having this repeated separately somehow defies the purpose of testing val annotationNeeded = ScalaTypeAnnotationSettings(targetMethod.getProject).isTypeAnnotationRequiredFor( Declaration(targetMethod, Visibility(newVisibility)), Location(targetMethod), Some(Definition(targetMethod))) val changeInfo = new ScalaChangeInfo(newVisibility, targetMethod.asInstanceOf[ScMethodLike], newName, maybeReturnType.getOrElse(Any), params, isAddDefaultValue, Some(annotationNeeded)) new ScalaChangeSignatureProcessor(getProjectAdapter, changeInfo) } }
jastice/intellij-scala
scala/scala-impl/test/org/jetbrains/plugins/scala/refactoring/changeSignature/ChangeSignatureTestBase.scala
Scala
apache-2.0
5,915
/** * Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0 * See accompanying LICENSE file. */ package controller.api import com.typesafe.config.{ ConfigFactory, Config } import controllers.KafkaManagerContext import controllers.api.KafkaStateCheck import features.ApplicationFeatures import kafka.manager.utils.{ CuratorAwareTest, KafkaServerInTest } import kafka.test.SeededBroker import models.navigation.Menus import org.scalatest.mock.MockitoSugar import play.api.i18n.MessagesApi import play.api.{ Configuration, Play } import play.api.inject.ApplicationLifecycle import play.api.libs.json.{JsDefined, Json} import play.api.test.Helpers._ import play.api.test.{ FakeApplication, FakeRequest } import play.mvc.Http.Status.{ BAD_REQUEST, OK } import org.scalatest.Matchers._ import scala.concurrent.Await import scala.concurrent.duration._ import scala.util.Try class TestKafkaStateCheck extends CuratorAwareTest with KafkaServerInTest with MockitoSugar { private[this] val broker = new SeededBroker("controller-api-test", 4) override val kafkaServerZkPath = broker.getZookeeperConnectionString private[this] val duration = FiniteDuration(10, SECONDS) private[this] val testClusterName = "kafka-sc-test-cluster" private[this] val testTopicName = "kafka-sc-test-topic" private[this] var kafkaManagerContext: Option[KafkaManagerContext] = None private[this] var kafkaStateCheck: Option[KafkaStateCheck] = None override protected def beforeAll(): Unit = { super.beforeAll() //lazy val app : FakeApplication = { // FakeApplication(additionalConfiguration = Map("kafka-manager.zkhosts" -> kafkaServerZkPath)) //} //Play.start(app) import scala.collection.JavaConverters._ val config = ConfigFactory.parseMap( Map( "pinned-dispatcher.type" -> "PinnedDispatcher", "pinned-dispatcher.executor" -> "thread-pool-executor", "kafka-manager.zkhosts" -> kafkaServerZkPath ).asJava ) val conf = new Configuration(config) val kmc = new KafkaManagerContext(mock[ApplicationLifecycle], conf) implicit val af = ApplicationFeatures.getApplicationFeatures(config) implicit val menus = new Menus kafkaManagerContext = Option(kmc) val ksc = new KafkaStateCheck(mock[MessagesApi], kmc) kafkaStateCheck = Option(ksc) createCluster() createTopic() Thread.sleep(10000) } override protected def afterAll(): Unit = { disableCluster() deleteCluster() kafkaManagerContext.foreach(_.getKafkaManager.shutdown()) //Play.stop(app) Try(broker.shutdown()) super.afterAll() } private[this] def createCluster() = { val future = kafkaManagerContext.get.getKafkaManager.addCluster( testClusterName,"0.8.2.0",kafkaServerZkPath, jmxEnabled = false, pollConsumers = true, filterConsumers = true, jmxUser = None, jmxPass = None, jmxSsl = false, tuning = Option(kafkaManagerContext.get.getKafkaManager.defaultTuning) ) val result = Await.result(future,duration) result.toEither.left.foreach(apiError => sys.error(apiError.msg)) Thread.sleep(3000) } private[this] def createTopic() = { val future = kafkaManagerContext.get.getKafkaManager.createTopic(testClusterName, testTopicName, 4, 1) val result = Await.result(future, duration) result.toEither.left.foreach(apiError => sys.error(apiError.msg)) } private[this] def deleteTopic() = { val future = kafkaManagerContext.get.getKafkaManager.deleteTopic(testClusterName, testTopicName) val result = Await.result(future, duration) } private[this] def disableCluster() = { val future = kafkaManagerContext.get.getKafkaManager.disableCluster(testClusterName) Await.result(future, duration) Thread.sleep(3000) } private[this] def deleteCluster() = { val future = kafkaManagerContext.get.getKafkaManager.deleteCluster(testClusterName) Await.result(future, duration) Thread.sleep(3000) } test("get brokers") { val future = kafkaStateCheck.get.brokers(testClusterName).apply(FakeRequest()) assert(status(future) === OK) assert(contentAsJson(future) === Json.obj("brokers" -> Seq(0))) } test("get available brokers in non-existing cluster") { val future = kafkaStateCheck.get.brokers("non-existent").apply(FakeRequest()) assert(status(future) === BAD_REQUEST) } test("get topics") { val future = kafkaStateCheck.get.topics(testClusterName).apply(FakeRequest()) assert(status(future) === OK) assert(contentAsJson(future) === Json.obj("topics" -> Seq(testTopicName, "controller-api-test", "__consumer_offsets").sorted)) } test("get topics in non-existing cluster") { val future = kafkaStateCheck.get.topics("non-existent").apply(FakeRequest()) assert(status(future) === BAD_REQUEST) } test("get under-replicated partitions") { val future = kafkaStateCheck.get.underReplicatedPartitions(testClusterName, testTopicName).apply(FakeRequest()) assert(status(future) === OK) assert(contentAsJson(future) === Json.obj("topic" -> testTopicName, "underReplicatedPartitions" -> Seq.empty[Int])) } test("get under-replicated partitions of non-existing topic in non-existing cluster") { val future = kafkaStateCheck.get.underReplicatedPartitions("non-existent", "weird").apply(FakeRequest()) assert(status(future) === BAD_REQUEST) } test("get unavailable partitions") { val future = kafkaStateCheck.get.unavailablePartitions(testClusterName, testTopicName).apply(FakeRequest()) assert(status(future) == OK) assert(contentAsJson(future) == Json.obj("topic" -> testTopicName, "unavailablePartitions" -> Seq.empty[Int])) } test("get unavailable partitions of non-existing topic in non-existing cluster") { val future = kafkaStateCheck.get.unavailablePartitions("non-existent", "weird").apply(FakeRequest()) assert(status(future) === BAD_REQUEST) } test("topic summary") { val future = kafkaStateCheck.get.topicSummaryAction(testClusterName, "null", testTopicName, "KF").apply(FakeRequest()) assert(status(future) === OK) val json = Json.parse(contentAsJson(future).toString()) (json \\ "totalLag").asOpt[Int] should not be empty (json \\ "percentageCovered").asOpt[Int] should not be empty (json \\ "partitionOffsets").asOpt[Seq[Long]] should not be empty (json \\ "partitionLatestOffsets").asOpt[Seq[Long]] should not be empty (json \\ "owners").asOpt[Seq[String]] should not be empty } test("get unavailable topic summary") { val future = kafkaStateCheck.get.topicSummaryAction("non-existent", "null", "weird", "KF").apply(FakeRequest()) assert(status(future) === BAD_REQUEST) } test("get unavailable group summary") { val future = kafkaStateCheck.get.groupSummaryAction("non-existent", "weird", "KF").apply(FakeRequest()) assert(status(future) === BAD_REQUEST) } test("get clusters") { val future = kafkaStateCheck.get.clusters.apply(FakeRequest()) assert(status(future) === OK) val json = Json.parse(contentAsJson(future).toString()) println(Json.prettyPrint(json)) assert((json \\ "clusters").isInstanceOf[JsDefined]) } test("get topic identities") { val future = kafkaStateCheck.get.topicIdentities(testClusterName).apply(FakeRequest()) assert(status(future) === OK) val json = Json.parse(contentAsJson(future).toString()) println(Json.prettyPrint(json)) assert((json \\ "topicIdentities").isInstanceOf[JsDefined]) } test("consumers summary") { val future = kafkaStateCheck.get.consumersSummaryAction(testClusterName).apply(FakeRequest()) assert(status(future) === OK) val json = Json.parse(contentAsJson(future).toString()) (json \\ "consumers").asOpt[Seq[Map[String, String]]] should not be empty } }
Knewton/kafka-manager
test/controller/api/TestKafkaStateCheck.scala
Scala
apache-2.0
7,820
package com.github.akiomik.ideaAndroidScala import com.intellij.ide.fileTemplates._ import com.intellij.openapi.util.IconLoader import org.jetbrains.plugins.scala.ScalaFileType class AndroidScalaFileTemplateFactory extends FileTemplateGroupDescriptorFactory { val templates = Seq("Activity.scala", "Fragment.scala", "Application.scala") override def getFileTemplatesDescriptor: FileTemplateGroupDescriptor = { val groupName = AndroidScalaBundle("fileTemplate.groupName") val icon = IconLoader.getIcon("images/android-scala.png") val group = new FileTemplateGroupDescriptor(groupName, icon) for (template <- templates) { val descriptor = new FileTemplateDescriptor(template, ScalaFileType.SCALA_FILE_TYPE.getIcon) group.addTemplate(descriptor) } group } }
akiomik/idea-android-scala
src/com/github/akiomik/ideaAndroidScala/AndroidScalaFileTemplateFactory.scala
Scala
mit
801
/** * Copyright ยฉ 2015, eXist-db * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the <organization> nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.exist.xquery.modules.expath.file import org.exist.dom.QName import org.exist.util.serializer.XQuerySerializer import org.exist.xdm.Function.{Parameter, ResultType} import org.exist.xdm.Type import org.exist.xdm.XdmImplicits._ import org.exist.xquery.ErrorCodes.ErrorCode import org.exist.xquery.{AbstractInternalModule, BasicFunction, FunctionDef, FunctionSignature, XPathException, XQueryContext} import org.exist.xquery.util.SerializerUtils import org.exist.xquery.value.{Base64BinaryValueType, BinaryValue, BinaryValueFromInputStream, BooleanValue, IntegerValue, Item, NodeValue, Sequence, StringValue, ValueSequence} import org.exquery.expath.module.file.{FileModule, FileModuleErrors, FileModuleException} import FileModule._ import java.io.{ByteArrayInputStream, InputStream, ByteArrayOutputStream => JByteArrayOutputStream, OutputStreamWriter => JOutputStreamWriter} import java.net.URI import java.util.{List => JList, Map => JMap, Properties => JProperties} import fs2.{Stream, io} import cats.effect.{IO, Sync} /** * Implementation of the EXPath File Module for eXist * * @author Adam Retter <adam.retter@googlemail.com> */ class ExpathFileModule(parameters: JMap[String, JList[_]]) extends AbstractInternalModule(ExpathFileModule.functions, parameters) { override def getNamespaceURI: String = FileModule.NAMESPACE override def getDescription: String = "EXPath file module" override def getDefaultPrefix: String = FileModule.PREFIX override def getReleaseVersion: String = "2.3" } object ExpathFileModule { private val pathParam = Parameter("path", Type.string, "A filesystem path") private val itemsParam = Parameter("items", Type.item_*, "The items to write to the file") private val paramsParam = Parameter("params", Type.element, "Controls the serialization") private val fileParam = Parameter("file", Type.string, "A file on the filesystem") private val valueParam = Parameter("value", Type.string, "A string value") private val encodingParam = Parameter("encoding", Type.string, "The character encoding to use") private val linesParam = Parameter("lines", Type.string_*, "A lines of text") private val prefixParam = Parameter("prefix", Type.string, "The prefix for the name") private val suffixParam = Parameter("suffix", Type.string, "The suffix for the name") private def dirParam(description: String = "A path to a directory on the filesystem") = Parameter("dir", Type.string, description) private val recursiveParam = Parameter("recursive", Type.boolean, "If the parameter $recursive is set to true(), sub-directories will be incorporated as well") private val offsetParam = Parameter("offset", Type.integer, "The offset in bytes to start at") private val lengthParam = Parameter("length", Type.integer, "The length in bytes") private val binaryValueParam = Parameter("value", Type.base64Binary, "A binary value") private val valuesParam = Parameter("value", Type.string_*, "The string values") private val pathResultType = ResultType(Type.string, "The full path to the resource created on the filesystem") private val binaryResultType = ResultType(Type.base64Binary, "The binary data") private val textResultType = ResultType(Type.string, "The text data") private def signatures(name: String, description: String, multiParameters: Seq[Seq[Parameter]], resultType: Option[ResultType]) : Seq[FunctionSignature] = org.exist.xdm.Function.signatures(new QName(name, FileModule.NAMESPACE, FileModule.PREFIX), description, multiParameters, resultType) private def functionDefs(signatures: Seq[FunctionSignature]) = signatures.map(new FunctionDef(_, classOf[ExpathFileFunctions])) val functions = Seq( functionDefs(signatures("exists", "Tests if the file or directory pointed by $path exists.", Seq(Seq(pathParam)), Some(ResultType(Type.boolean, "true if the file exists.")))), functionDefs(signatures("is-dir", "Tests if $path points to a directory. On UNIX-based systems the root and the volume roots are considered directories.", Seq(Seq(pathParam)), Some(ResultType(Type.boolean, "true if the path is a directory.")))), functionDefs(signatures("is-file", "Tests if $path points to a file.", Seq(Seq(pathParam)), Some(ResultType(Type.boolean, "true if the path is a file.")))), functionDefs(signatures("last-modified", "Returns the last modification time of a file or directory.", Seq(Seq(pathParam)), Some(ResultType(Type.dateTime, "The last modification time of the directory or file.")))), functionDefs(signatures("size", "Returns the byte size of a file, or the value 0 for directories.", Seq(Seq(fileParam)), Some(ResultType(Type.integer, "The size in bytes of a file, or 0 if a directory.")))), functionDefs(signatures("append", "Appends a sequence of items to a file. If the file pointed by $file does not exist, a new file will be created.", Seq(Seq(fileParam, itemsParam), Seq(fileParam, itemsParam, paramsParam)), None)), functionDefs(signatures("append-binary", "Appends a Base64 item as binary to a file. If the file pointed by $file does not exist, a new file will be created.", Seq(Seq(fileParam, binaryValueParam)), None)), functionDefs(signatures("append-text", "Appends a string to a file. If the file pointed by $file does not exist, a new file will be created. Encoding is assumed to be UTF-8 if not specified.", Seq(Seq(fileParam, valueParam), Seq(fileParam, valueParam, encodingParam)), None)), functionDefs(signatures("append-text-lines", "Appends a sequence of strings to a file, each followed by the system-dependent newline character. If the file pointed by $file does not exist, a new file will be created. Encoding is assumed to be UTF-8 if not specified", Seq(Seq(fileParam, linesParam), Seq(fileParam, linesParam, encodingParam)), None)), functionDefs(signatures("copy", "Copies a file or a directory given a source and a target path/URI.", Seq(Seq(Parameter("source", Type.string, "The path to the file or directory to copy"), Parameter("target", Type.string, "The path to the target file or directory for the copy"))), None)), functionDefs(signatures("create-dir", "Creates a directory, or does nothing if the directory already exists. The operation will create all non-existing parent directories.", Seq(Seq(dirParam())), None)), functionDefs(signatures("create-temp-dir", "Creates a temporary directory and all non-existing parent directories.", Seq(Seq(prefixParam, suffixParam), Seq(prefixParam, suffixParam, dirParam("A directory in which to create the temporary directory"))), Some(pathResultType))), functionDefs(signatures("create-temp-file", "Creates a temporary file and all non-existing parent directories.", Seq(Seq(prefixParam, suffixParam), Seq(prefixParam, suffixParam, dirParam("A directory in which to create the temporary file"))), Some(pathResultType))), functionDefs(signatures("delete", "Deletes a file or a directory from the file system.", Seq(Seq(pathParam), Seq(pathParam, recursiveParam)), None)), functionDefs(signatures("list", """Lists all files and directories in a given directory. The order of the items in the resulting sequence is not defined. The "." and ".." items are never returned. The returned paths are relative to the provided directory $dir.""", Seq(Seq(dirParam()), Seq(dirParam(), recursiveParam), Seq(dirParam(), recursiveParam, Parameter("pattern", Type.string, "Defines a name pattern in the glob syntax. Only the paths of the files and directories whose names are matching the pattern will be returned."))), None)), functionDefs(signatures("move", "Moves a file or a directory given a source and a target path/URI.", Seq(Seq(Parameter("source", Type.string, "The path to the file or directory to move"), Parameter("target", Type.string, "The path to the target file or directory for the move"))), None)), functionDefs(signatures("read-binary", "Returns the content of a file in its Base64 representation.", Seq(Seq(fileParam), Seq(fileParam, offsetParam), Seq(fileParam, offsetParam, lengthParam)), Some(binaryResultType))), functionDefs(signatures("read-text", "Returns the content of a file in its string representation. Encoding is assumed to be UTF-8 if not specified.", Seq(Seq(fileParam), Seq(fileParam, encodingParam)), Some(textResultType))), functionDefs(signatures("read-text-lines", "Returns the contents of a file as a sequence of strings, separated at newline boundaries. Encoding is assumed to be UTF-8 if not specified.", Seq(Seq(fileParam), Seq(fileParam, encodingParam)), Some(textResultType))), functionDefs(signatures("write", "Writes a sequence of items to a file. If the file pointed to by $file already exists, it will be overwritten.", Seq(Seq(fileParam, itemsParam), Seq(fileParam, itemsParam, paramsParam)), None)), functionDefs(signatures("write-binary", "Writes a Base64 item as binary to a file. If the file pointed to by $file already exists, it will be overwritten.", Seq(Seq(fileParam, binaryValueParam), Seq(fileParam, binaryValueParam, offsetParam)), None)), functionDefs(signatures("write-text", "Writes a string to a file. If the file pointed to by $file already exists, it will be overwritten. Encoding is assumed to be UTF-8.", Seq(Seq(fileParam, valueParam), Seq(fileParam, valueParam, encodingParam)), None)), functionDefs(signatures("write-text-lines", "Writes a sequence of strings to a file, each followed by the system-dependent newline character. If the file pointed to by $file already exists, it will be overwritten. Encoding is assumed to be UTF-8 if bit specified.", Seq(Seq(fileParam, valuesParam), Seq(fileParam, valuesParam, encodingParam)), None)), functionDefs(signatures("name", "Returns the name of a file or directory.", Seq(Seq(pathParam)), Some(ResultType(Type.string, "The name of the directory or file.")))), functionDefs(signatures("parent", "Transforms the given path into an absolute path, as specified by file:resolve-path, and returns the parent directory.", Seq(Seq(pathParam)), Some(ResultType(Type.string_?, "The name of the parent or the empty-sequence if the parent is a filesystem root.")))), functionDefs(signatures("path-to-native", "Transforms a URI, an absolute path, or relative path to a canonical, system-dependent path representation. A canonical path is both absolute and unique and thus contains no redirections such as references to parent directories or symbolic links.", Seq(Seq(pathParam)), Some(ResultType(Type.string, "The resulting native path.")))), functionDefs(signatures("path-to-uri", "Transforms a file system path into a URI with the file:// scheme. If the path is relative, it is first resolved against the current working directory.", Seq(Seq(pathParam)), Some(ResultType(Type.uri, "The resulting path URI.")))), functionDefs(signatures("resolve-path", "Transforms a relative path into an absolute operating system path by resolving it against the current working directory. If the resulting path points to a directory, it will be suffixed with the system-specific directory separator.", Seq(Seq(pathParam)), Some(ResultType(Type.string, "The absolute filesystem path.")))), functionDefs(signatures("dir-separator", """Returns the value of the operating system-specific directory separator, which usually is / on UNIX-based systems and \\ on Windows systems.""", Seq.empty, Some(ResultType(Type.string, "The directory separator")))), functionDefs(signatures("line-separator", "Returns the value of the operating system-specific line separator, which usually is &#10; on UNIX-based systems, &#13;&#10; on Windows systems and &#13; on Mac systems.", Seq.empty, Some(ResultType(Type.string, "The line separator")))), functionDefs(signatures("path-separator", "Returns the value of the operating system-specific path separator, which usually is : on UNIX-based systems and ; on Windows systems.", Seq.empty, Some(ResultType(Type.string, "The path separator")))), functionDefs(signatures("temp-dir", "Returns the path to the default temporary-file directory of an operating system.", Seq.empty, Some(ResultType(Type.string, "The path of the temporary directory.")))), functionDefs(signatures("base-dir", "Returns the parent directory of the static base URI. If the Base URI property is undefined, the empty sequence is returned.", Seq.empty, Some(ResultType(Type.string_?, "The parent directory of the static base URI.")))), functionDefs(signatures("current-dir", "Returns the current working directory.", Seq.empty, Some(ResultType(Type.string, "The current working directory.")))) ).reduceLeft(_ ++ _).toArray } /** * Implementation of the functions within the EXPath * File Module for eXist */ class ExpathFileFunctions(context: XQueryContext, signature: FunctionSignature) extends BasicFunction(context, signature) { private lazy val fm = new FileModule {} @throws[XPathException] override def eval(args: Array[Sequence], contextSequence: Sequence) : Sequence = { signature.getName.getLocalPart match { case "exists" => getOrThrow[Boolean](valueOrError( fileProperty(args)(fm.exists[IO]).attempt.unsafeRunSync() )) case "is-dir" => getOrThrow[Boolean](valueOrError( fileProperty(args)(fm.isDir[IO]).attempt.unsafeRunSync() )) case "is-file" => getOrThrow[Boolean](valueOrError( fileProperty(args)(fm.isFile[IO]).attempt.unsafeRunSync() )) case "last-modified" => getOrThrow(valueOrError( fileProperty(args)(fm.lastModified[IO]).map(LongToXdmDateTime).attempt.unsafeRunSync() )) case "size" => getOrThrow(valueOrError( fileProperty(args)(fm.fileSize[IO]).map(LongToXdmInteger).attempt.unsafeRunSync() )) case "append" => appendOrWrite(args, append = true) case "append-binary" => appendBinary(args) case "append-text" => appendOrWriteText(args, append = true) case "append-text-lines" => appendOrWriteTextLines(args, append = true) case "copy" => val source = sarg(args)(0) val target = sarg(args)(1) zip(source, target) match { case Some((source, target)) => getOrThrow(valueOrError( fm.copy[IO](source, target) .map(_ => Sequence.EMPTY_SEQUENCE) .attempt.unsafeRunSync() )) case None => invalidArg } case "create-dir" => sarg(args)(0) match { case Some(dir) => getOrThrow(valueOrError( fm.createDir[IO](dir) .map(_ => Sequence.EMPTY_SEQUENCE) .attempt.unsafeRunSync() )) case None => invalidArg } case "create-temp-dir" => val prefix = sarg(args)(0) val suffix = sarg(args)(1) val dir = sarg(args)(2) zip(prefix, suffix).map { case (prefix, suffix) => getOrThrow(valueOrError( fm.createTempDir[IO](prefix, suffix, dir).map(StringToXdmString(_)).attempt.unsafeRunSync() )) }.getOrElse(invalidArg) case "create-temp-file" => val prefix = sarg(args)(0) val suffix = sarg(args)(1) val dir = sarg(args)(2) zip(prefix, suffix).map { case (prefix, suffix) => getOrThrow(valueOrError( fm.createTempFile[IO](prefix, suffix, dir).map(StringToXdmString(_)).attempt.unsafeRunSync() )) }.getOrElse(invalidArg) case "delete" => val path = sarg(args)(0) val recursive: Boolean = bv2b(barg(args)(1)).getOrElse(false) path match { case Some(path) => getOrThrow(valueOrError( fm.delete[IO](path, recursive) .map(_ => Sequence.EMPTY_SEQUENCE) .attempt.unsafeRunSync() )) case None => invalidArg } case "list" => val dir = sarg(args)(0) val recursive = bv2b(barg(args)(1)).getOrElse(false) val pattern = sarg(args)(0) dir.map { dir => getOrThrow(valueOrError( fm.list[IO](dir, recursive, pattern) .map { paths => val seq = new ValueSequence() paths.map(path => seq.add(path)) seq }.attempt.unsafeRunSync() )) }.getOrElse(invalidArg) case "move" => val source = sarg(args)(0) val target = sarg(args)(1) zip(source, target) match { case Some((source, target)) => getOrThrow(valueOrError( fm.move[IO](source, target) .map(_ => Sequence.EMPTY_SEQUENCE) .attempt.unsafeRunSync() )) case None => invalidArg } case "read-binary" => val file = sarg(args)(0) val offset = iv2i(iarg(args)(1)).getOrElse(0) val length = iarg(args)(2) file.map { file => // TODO must be a better way to go from fm.readBinary to BinaryValueFromInputStream -- without buffering in memory? // what about --> stream.through(io.toInputStream[IO]) getOrThrow(valueOrError( fm.readBinary[IO](file, offset, length).flatMap { stream => stream .runFold(new JByteArrayOutputStream()){ case (buf, b) => buf.write(b); buf } .map(_.toByteArray) .map(inBuf => BinaryValueFromInputStream.getInstance(context, new Base64BinaryValueType(), new ByteArrayInputStream(inBuf))) }.attempt.unsafeRunSync() )) }.getOrElse(invalidArg) case "read-text" => val file = sarg(args)(0) val encoding = sv2s(sarg(args)(1)).getOrElse(FileModule.DEFAULT_CHAR_ENCODING) file.map { file => getOrThrow(valueOrError( fm.readText[IO](file, encoding).flatMap { stream => stream .runFold(new StringBuilder()){ case (buf, str) => buf.append(str)} .map(_.toString()) }.attempt.unsafeRunSync() )) }.getOrElse(invalidArg) case "read-text-lines" => val file = sarg(args)(0) val encoding = sv2s(sarg(args)(1)).getOrElse(FileModule.DEFAULT_CHAR_ENCODING) file.map { file => getOrThrow(valueOrError( fm.readText[IO](file, encoding).flatMap { stream => stream .runFold(new ValueSequence()){ case (buf, str) => buf.add(StringToXdmString(str)); buf} }.attempt.unsafeRunSync() )) }.getOrElse(invalidArg) case "write" => appendOrWrite(args, append = false) case "write-binary" => writeBinary(args) case "write-text" => appendOrWriteText(args, append = false) case "write-text-lines" => appendOrWriteTextLines(args, append = false) case "name" => getOrThrow[String](valueOrError( fileProperty(args)(fm.name[IO]).attempt.unsafeRunSync() )) case "parent" => getOrThrow[Option[String]](valueOrError( fileProperty(args)(fm.parent[IO]).attempt.unsafeRunSync() )) // case "children" => // val path = sarg(args)(0) // path.map { // path => // val seq = new ValueSequence() // valueOrError(fm.children(path).map(_.to(seqSink(seq)).run.attemptRun)).map(_ => seq) // } | invalidArg case "path-to-native" => getOrThrow[String](valueOrError( fileProperty(args)(fm.pathToNative[IO]).attempt.unsafeRunSync() )) case "path-to-uri" => getOrThrow[URI](valueOrError( fileProperty(args)(fm.pathToUri[IO]).attempt.unsafeRunSync() )) case "resolve-path" => getOrThrow[String](valueOrError( fileProperty(args)(fm.resolvePath[IO]).attempt.unsafeRunSync() )) case "dir-separator" => fm.dirSeparator case "line-separator" => fm.lineSeparator case "path-separator" => fm.pathSeparator case "temp-dir" => fm.tempDir case "base-dir" => getOrThrow[Option[String]](valueOrError( fm.parent[IO](context.getBaseURI.toString).attempt.unsafeRunSync() )) case "current-dir" => fm.currentDir case _ => throw new XPathException(this, "Unknown function signature") } } /** * Zips two Options together if both are Some */ private def zip[A, B](oa: Option[A], ob: Option[B]) : Option[(A, B)] = oa.flatMap(a => ob.map(b => (a, b))) /** * Zips an Option and a value together if the first is Some */ private def zip[A, B](oa: Option[A], b: B) : Option[(A, B)] = oa.map(a => (a, b)) @throws[XPathException] private def appendOrWrite(args: Array[Sequence], append: Boolean) : Sequence = { val file = sarg(args)(0) val items : Option[Sequence] = args.get(1) val outputProperties = new JProperties() arg[NodeValue](args)(2).map { params => SerializerUtils.getSerializationOptions(this, params, outputProperties) } zip(file, items).map { case (file, items) => getOrThrow(valueOrError( fm.writeBinary[IO](file, append).flatMap { writer => serializeSequence[IO](items, outputProperties) .to(writer) .run }.map(_ => Sequence.EMPTY_SEQUENCE).attempt.unsafeRunSync() )) }.getOrElse(invalidArg) } @throws[XPathException] private def appendBinary(args: Array[Sequence]) : Sequence = appendOrWriteBinary(args, append = true) @throws[XPathException] private def writeBinary(args: Array[Sequence]) : Sequence = { val file = sarg(args)(0) val value = args.get(1).map(_.itemAt(0).asInstanceOf[BinaryValue]) val offset = iarg(args)(2).map(_.getInt).getOrElse(0) zip(file, value).map { case (file, value) => getOrThrow(valueOrError( fm.writeBinary[IO](file, offset).flatMap { writer => io.readInputStream(IO(value.getInputStream), DEFAULT_BUF_SIZE, true) .to(writer) .run }.map(_ => Sequence.EMPTY_SEQUENCE).attempt.unsafeRunSync() )) }.getOrElse(invalidArg) } @throws[XPathException] private def appendOrWriteBinary[F[_]](args : Array[Sequence], append: Boolean) : Sequence = { val file = sarg(args)(0) val value = args.get(1).map(_.itemAt(0).asInstanceOf[BinaryValue]) zip(file, value).map { case (file, value) => getOrThrow(valueOrError( fm.writeBinary[IO](file, append).flatMap { writer => io.readInputStream(IO(value.getInputStream), DEFAULT_BUF_SIZE, true) .to(writer) .run }.map(_ => Sequence.EMPTY_SEQUENCE).attempt.unsafeRunSync() )) }.getOrElse(invalidArg) } @throws[XPathException] private def appendOrWriteText(args: Array[Sequence], append: Boolean) : Sequence = { val file = sarg(args)(0) val value = sarg(args)(1) val encoding = arg[StringValue](args)(2).map(_.getStringValue).getOrElse("UTF-8") zip(file, value).map { case (file, value) => getOrThrow(valueOrError( fm.writeText[IO](file, append, encoding).flatMap { writer => val stream: Stream[IO, String] = Stream.emit(value.getStringValue) stream .to(writer) .run }.map(_ => Sequence.EMPTY_SEQUENCE).attempt.unsafeRunSync() )) }.getOrElse(invalidArg) } @throws[XPathException] private def appendOrWriteTextLines(args: Array[Sequence], append: Boolean) : Sequence = { val file = sarg(args)(0) val lines: Option[Sequence] = args.get(1) val encoding = arg[StringValue](args)(2).map(_.getStringValue).getOrElse("UTF-8") zip(file, lines).map { case (file, lines) if (!lines.isEmpty) => getOrThrow(valueOrError( fm.writeText[IO](file, append, encoding).flatMap { writer => val stream: Stream[IO, String] = Stream.unfold(lines.iterate){ it => if(it.hasNext) { Some((it.nextItem(), it)) } else { None } }.map(_.asInstanceOf[StringValue].getStringValue) stream .to(writer) .run }.map(_ => Sequence.EMPTY_SEQUENCE).attempt.unsafeRunSync() )) }.getOrElse(invalidArg) } /** * Helper function for FileModule functions that operate on a single * string parameter... typically a path. */ @throws[XPathException] private def fileProperty[T](args: Array[Sequence])(fn: (String) => T) = sarg(args)(0).map(fn(_)).getOrElse(invalidArg) /** * Serializes a sequence * * @param outputProperties Any output properties to set on the serializer * @param seq The sequence of items to serialize * * @return The serialized items */ @throws[RuntimeException] private def serializeSequence[F[_]](seq: Sequence, outputProperties: JProperties)(implicit F: Sync[F]) : Stream[F, Byte] = { val in : F[InputStream] = F.delay { val baos = new JByteArrayOutputStream() try { val writer = new JOutputStreamWriter(baos) try { val xqSerializer = new XQuerySerializer(context.getBroker(), outputProperties, writer) xqSerializer.serialize(seq) new ByteArrayInputStream(baos.toByteArray) } finally { writer.close() } } finally { baos.close() } } fs2.io.readInputStream(in, FileModule.DEFAULT_BUF_SIZE, true) } /** * Throw a standard invalid argument XPathException */ @throws[XPathException] private def invalidArg = throw new XPathException(getLine(), getColumn(), "Missing function argument") /** * Adds a safe get(Int) : Maybe[T] method to any Array[T] object */ private implicit class MyArray[T](val array: Array[T]) { def get(idx: Int): Option[T] = scala.util.Try(array(idx)).toOption } /** * Extract a single argument * * @param idx The index of the argument * @param iidx If the argument is a sequence, then the item index within the sequence. * Defaults to 0 which is suitable for an atomic item */ private def arg[T <: Item](args: Array[Sequence])(idx: Int, iidx: Int = 0) : Option[T] = args.get(idx).map(_.itemAt(iidx).asInstanceOf[T]) /** * Extract a single string value argument * * @param idx The index of the argument * @param iidx If the argument is a sequence, then the item index within the sequence. * Defaults to 0 which is suitable for an atomic item */ private def sarg(args: Array[Sequence])(idx: Int, iidx: Int = 0) : Option[StringValue] = arg[StringValue](args)(idx, iidx) /** * Extract a single boolean value argument * * @param idx The index of the argument * @param iidx If the argument is a sequence, then the item index within the sequence. * Defaults to 0 which is suitable for an atomic item */ private def barg(args: Array[Sequence])(idx: Int, iidx: Int = 0) : Option[BooleanValue] = arg[BooleanValue](args)(idx, iidx) /** * Extract a single integer value argument * * @param idx The index of the argument * @param iidx If the argument is a sequence, then the item index within the sequence. * Defaults to 0 which is suitable for an atomic item */ private def iarg(args: Array[Sequence])(idx: Int, iidx: Int = 0) : Option[IntegerValue] = arg[IntegerValue](args)(idx, iidx) /** * Extracts a value from the right or throws * an XPathException for the error on the left */ @throws[XPathException] private implicit def getOrThrow[T](value: Either[FileModuleException, T]): T = { value match { case Right(v) => v case Left(fme) => throw toXPathException(fme) } } /** * Creates an XPathException from a FileModuleException */ private def toXPathException(fme: FileModuleException): XPathException = { val errorCode = new ErrorCode(new QName(fme.fileModuleError.name, FileModule.NAMESPACE), fme.fileModuleError.description) new XPathException(this, errorCode, fme.fileModuleError.description, null, fme) } /** * Flattens out a result to extract an * either error or value. Exceptions are * converted to FileModuleErrors.IoError */ private def valueOrError[T](v: Either[Throwable, T]) : Either[FileModuleException, T] = { v match { case Right(r) => Right(r) case Left(fileModuleEx: FileModuleException) => Left(fileModuleEx) case Left(t) => Left(new FileModuleException(FileModuleErrors.IoError, t)) } } private implicit def sv2s(value: Option[StringValue]) : Option[String] = value.map(_.getStringValue) private implicit def bv2b(value: Option[BooleanValue]) : Option[Boolean] = value.map(_.getValue) private implicit def iv2i(value: Option[IntegerValue]) : Option[Int] = value.map(_.getInt) }
adamretter/exist-expath-file-module
src/main/scala/org/exist/xquery/modules/expath/file/ExpathFileModule.scala
Scala
bsd-3-clause
30,837
package edison.search import edison.search.serialization.JsonSerialization.DefaultSerializers._ import edison.search.serialization.{ JsonSerialization, JsonSerializer } import edison.search.tree.Helpers.TreePrettyPrinter import edison.search.tree.IntegerTree import edison.util.SmartSpec class JsonSerializationTest extends SmartSpec { def serialize[T](obj: T)(implicit serializer: JsonSerializer[T]): String = JsonSerialization.serializeToString(obj) def jsonRepr(str: String) = str.stripMargin.replaceAll("[\n ]", "") behavior of "JSON serialization" it must "handle samples with floating point results correctly" in { val sample = Sample(1, 0.0625) // 1/16 serialize(sample) shouldBe """{"value":1,"result":0.0625}""" } it must "be able to serialize an empty Samples" in { serialize(Samples.empty) shouldBe """{"values":[],"size":0}""" } it must "be able to serialize two Samples" in { val samples = Samples(1 -> 10.0, 2 -> 20.0) serialize(samples) shouldBe jsonRepr( """ |{ | "values": [ | {"value": 1, "result": 10.0}, | {"value": 2, "result": 20.0} | ], | "size": 2, | "min": 10.0, | "max": 20.0, | "mean": 15.0, | "sd": 5.0 |} |""" ) } it must "be able to serialize an IntegerTree leaf" in { val samples = Samples(1 -> 10.0, 2 -> 20.0) val tree = IntegerTree(Range(1, 50), List.empty, samples) serialize(tree) shouldBe jsonRepr( s""" |{ | "name": "[1;50)", | "samples": ${serialize(samples)}, | "children": [] |} """ ) } it must "be able to serialize an IntegerTree node with children" in { val leafA = IntegerTree(Range(1, 2), List.empty, Samples(1 -> 10.0)) val leafB = IntegerTree(Range(2, 3), List.empty, Samples(2 -> 20.0)) val root = IntegerTree(Range(1, 3), List(leafA, leafB), Samples(1 -> 10.0, 2 -> 20.0)) serialize(root) shouldBe jsonRepr( s""" |{ | "name": "[1;3)", | "samples": ${serialize(root.samples)}, | "children": [${serialize(leafA)}, ${serialize(leafB)}] |} """ ) } it must "handle inclusive ranges correctly" in { serialize(IntegerTree.empty(Range.inclusive(5, 5))) shouldBe jsonRepr( s""" |{ | "name": "[5;5]", | "samples": { "values": [], "size": 0 }, | "children": [] |} """ ) } behavior of "TreePrettyPrinter" it must "pretty print trees" in { val root = IntegerTree(Range(1, 2), List.empty, Samples(1 -> 10.0)) root.json shouldBe """ |{ | "name" : "[1;2)", | "samples" : { | "values" : [ { | "value" : 1, | "result" : 10.0 | } ], | "size" : 1, | "min" : 10.0, | "max" : 10.0, | "mean" : 10.0, | "sd" : 0.0 | }, | "children" : [ ] |} """.stripMargin.trim } it must "pretty print trees in compact mode" in { val root = IntegerTree(Range(1, 2), List.empty, Samples(1 -> 10.0)) root.shortJson shouldBe """ |{ | "name" : "[1;2)", | "samples" : { | "size" : 1, | "mean" : 10.0 | }, | "children" : [ ] |} """.stripMargin.trim } }
pawel-wiejacha/edison
core/src/test/scala/edison/search/serialization/JsonSerializationTest.scala
Scala
mit
3,382
package com.wuyuntao.aeneas.tests.views import java.time.OffsetDateTime import java.util.UUID import com.wuyuntao.aeneas.View case class UserById(val id: UUID, val email: String, val lastLoginTime: OffsetDateTime) extends View
wuyuntao/Aeneas
aeneas-core/src/test/scala/com/wuyuntao/aeneas/tests/views/UserById.scala
Scala
apache-2.0
236
object StringReductions extends App { println(io.Source.stdin.getLines().toList.head.distinct) }
PaulNoth/hackerrank
practice/functional_programming/recursion/string_reductions/StringReductions.scala
Scala
mit
99
package net.jcain.net import akka.actor.FSM.{CurrentState, SubscribeTransitionCallBack, Transition} import akka.actor.{ActorSystem, Props} import akka.io.Tcp.Connect import akka.io.{IO, Tcp} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import akka.util.ByteString import java.net.InetSocketAddress import org.scalatest.exceptions.TestFailedException import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} import scala.collection.mutable.ListBuffer import scala.concurrent.duration._ import scala.language.reflectiveCalls import scala.util.matching.Regex class MockSMTPSpec extends TestKit(ActorSystem("MockSMTPSpec")) with WordSpecLike with BeforeAndAfterAll with Matchers with ImplicitSender { var port = 9875 val rgen = new java.security.SecureRandom import Handler._ class ServerFixture(label: String) { val tcpProbe = new TestProbe(system) { def expectResponse(regex: Regex): scala.util.matching.Regex.Match = { val input = new ListBuffer[String] while (input.isEmpty || !input.last.endsWith("\\r\\n")) expectMsgPF() { case Tcp.Received(data) => input.append(data.utf8String) } val response = input.mkString regex.findFirstMatchIn(response) match { case None => throw new TestFailedException(s"$response did not match Regex $regex", 5) case Some(m) => m } } } port += 1 val server = system.actorOf(Props(classOf[MockSMTP], tcpProbe.ref, port, None), s"$label-smtp-server") // wait for the server to start up and listen tcpProbe.expectMsg(MockSMTP.Ready) def stop() = system.stop(server) } class HandlerFixture(label: String, val initialGreeting: Option[String] = None) { val dataProbe = TestProbe(s"$label-probe-handler-test") val stateProbe = TestProbe(s"$label-probe-handler-state") val lifeProbe = TestProbe(s"$label-probe-handler-life") val handlerId = "%016x".format(rgen.nextLong()) val handler = system.actorOf(Props(classOf[Handler], handlerId, dataProbe.ref, initialGreeting), s"$label-smtp-handler-$handlerId") lifeProbe.watch(handler) handler ! SubscribeTransitionCallBack(stateProbe.ref) if (initialGreeting.isEmpty) { // wait for handler to start up stateProbe.expectMsg(CurrentState(handler, Greeting)) // expect initial greeting dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String shouldBe s"${Handler.InitialGreeting} $handlerId\\r\\n" } } def sendData(text: String) = handler ! Tcp.Received(ByteString(s"$text\\r\\n")) def stop() = system.stop(handler) } "MockSMTP" when { "starting up" should { "bind to given port" in new ServerFixture("svr-start") { // attempt to connect to the port IO(Tcp) ! Connect(new InetSocketAddress("localhost", port)) expectMsgPF() { case Tcp.Connected(_, _) => } stop() } } "RcptTo" when { "GetRecipients" should { "reply with the recipients" in new ServerFixture("svr-rcptto-get-rcpts") { // connect to the port IO(Tcp).tell(Connect(new InetSocketAddress("localhost", port)), tcpProbe.ref) tcpProbe.expectMsgPF() { case Tcp.Connected(_, _) => } val tcp = tcpProbe.lastSender tcp.tell(Tcp.Register(tcpProbe.ref), tcpProbe.ref) val matched = tcpProbe.expectResponse("[0-9a-f]{16}$".r) val handlerId = matched.group(0) // EHLO tcp.tell(Tcp.Write(ByteString("EHLO bedevere.tremtek.com\\r\\n")), tcpProbe.ref) tcpProbe.expectResponse("^250 localhost".r) // MAIL FROM tcp.tell(Tcp.Write(ByteString("MAIL FROM:<admin@tremtek.com>\\r\\n")), tcpProbe.ref) tcpProbe.expectResponse("^250 Ok".r) // RCPT TO tcp.tell(Tcp.Write(ByteString("RCPT TO:<jcain@tremtek.com>\\r\\n")), tcpProbe.ref) tcpProbe.expectResponse("^250 Ok".r) tcp.tell(Tcp.Write(ByteString("RCPT TO:<chanselman@tremtek.com>\\r\\n")), tcpProbe.ref) tcpProbe.expectResponse("^250 Ok".r) tcp.tell(Tcp.Write(ByteString("RCPT TO:<jec@tremtek.com>\\r\\n")), tcpProbe.ref) tcpProbe.expectResponse("^250 Ok".r) // GetRecipients server ! MockSMTP.GetRecipients(handlerId) expectMsg(MockSMTP.Recipients(List("jcain@tremtek.com", "chanselman@tremtek.com", "jec@tremtek.com"))) stop() } } } } "Handler" when { "createCommand()" when { "given invalid input" should { "return None" in { Handler.createCommand("HELO?") shouldBe None } } "given valid input" should { "return the correct Command" in { createCommand("HELO bedevere.tremtek.com") shouldBe Some(C_HELO("bedevere.tremtek.com")) createCommand("EHLO bedevere.tremtek.com") shouldBe Some(C_EHLO("bedevere.tremtek.com")) createCommand("MAIL FROM:<admin@tremtek.com>") shouldBe Some(C_MAIL("admin@tremtek.com")) createCommand("RCPT TO:<jcain@tremtek.com>") shouldBe Some(C_RCPT("jcain@tremtek.com")) createCommand("DATA") shouldBe Some(C_DATA) createCommand("RSET") shouldBe Some(C_RSET) createCommand("QUIT") shouldBe Some(C_QUIT) } } } "Greeting" when { "configured to respond with a busy message" should { "respond with the message" in new HandlerFixture("greeting-busy", initialGreeting = Some("554 Service unavailable")) { dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String shouldBe s"${initialGreeting.get} $handlerId\\r\\n" } stop() } } "HELO" should { "respond to HELO and go to Idle state" in new HandlerFixture("greeting-helo") { // EHLO sendData("HELO bedevere.tremtek.com") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("250 Hello") } stateProbe.expectMsg(Transition(handler, Greeting, Idle)) stop() } } "EHLO" should { "respond to EHLO and go to Idle state" in new HandlerFixture("greeting-ehlo") { // EHLO sendData("EHLO bedevere.tremtek.com") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("250 localhost") } stateProbe.expectMsg(Transition(handler, Greeting, Idle)) stop() } } } "Idle" when { "MAIL FROM" should { "accept the sender address and go to MailFrom state" in new HandlerFixture("idle-mailfrom") { // EHLO sendData("EHLO bedevere.tremtek.com") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("250 localhost") } stateProbe.expectMsg(Transition(handler, Greeting, Idle)) // MAIL FROM sendData("MAIL FROM:<admin@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("250 Ok") } stateProbe.expectMsg(Transition(handler, Idle, MailFrom)) stop() } } } "MailFrom" when { "RCPT TO" when { "recipient contains NORELAY" should { "respond with 554 message" in new HandlerFixture("mailfrom-rcptto-norelay") { // EHLO sendData("EHLO bedevere.tremtek.com") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith("250 localhost") } stateProbe.expectMsg(Transition(handler, Greeting, Idle)) // MAIL FROM sendData("MAIL FROM:<admin@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith("250 Ok") } stateProbe.expectMsg(Transition(handler, Idle, MailFrom)) // RCPT TO sendData("RCPT TO:<jcain-NORELAY@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith("554 Relay") } stateProbe.expectNoMsg(1.second) stop() } } "recipient contains NOTFOUND" should { "respond with 550 message" in new HandlerFixture("mailfrom-rcptto-notfound") { // EHLO sendData("EHLO bedevere.tremtek.com") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith("250 localhost") } stateProbe.expectMsg(Transition(handler, Greeting, Idle)) // MAIL FROM sendData("MAIL FROM:<admin@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith("250 Ok") } stateProbe.expectMsg(Transition(handler, Idle, MailFrom)) // RCPT TO sendData("RCPT TO:<jcain-NOTFOUND@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith("550 User not found") } stateProbe.expectNoMsg(1.second) stop() } } "recipient is normal" should { "accept multiple recipients and go to RcptTo state and remain" in new HandlerFixture("mailfrom-rcptto-ok") { // EHLO sendData("EHLO bedevere.tremtek.com") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith("250 localhost") } stateProbe.expectMsg(Transition(handler, Greeting, Idle)) // MAIL FROM sendData("MAIL FROM:<admin@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith("250 Ok") } stateProbe.expectMsg(Transition(handler, Idle, MailFrom)) // RCPT TO sendData("RCPT TO:<jcain@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith("250 Ok") } stateProbe.expectMsg(Transition(handler, MailFrom, RcptTo)) sendData("RCPT TO:<chanselman@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith("250 Ok") } stateProbe.expectNoMsg(1.second) sendData("RCPT TO:<jec@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith("250 Ok") } stateProbe.expectNoMsg(1.second) stop() } } } } "RcptTo" when { "X_GET_RCPTS" should { "reply with the recipients" in new HandlerFixture("rcptto-get-rcpts") { // EHLO sendData("EHLO bedevere.tremtek.com") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith("250 localhost") } stateProbe.expectMsg(Transition(handler, Greeting, Idle)) // MAIL FROM sendData("MAIL FROM:<admin@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith("250 Ok") } stateProbe.expectMsg(Transition(handler, Idle, MailFrom)) // RCPT TO sendData("RCPT TO:<jcain@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith("250 Ok") } stateProbe.expectMsg(Transition(handler, MailFrom, RcptTo)) sendData("RCPT TO:<chanselman@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith("250 Ok") } stateProbe.expectNoMsg(1.second) sendData("RCPT TO:<jec@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith("250 Ok") } stateProbe.expectNoMsg(1.second) // X_GET_RCPTS sendData("X_GET_RCPTS") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String shouldBe "250 jcain@tremtek.com|chanselman@tremtek.com|jec@tremtek.com\\r\\n" } stop() } } "DATA" should { "go to Data state and receive text" in new HandlerFixture("rcptto-data") { // EHLO sendData("EHLO bedevere.tremtek.com") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("250 localhost") } stateProbe.expectMsg(Transition(handler, Greeting, Idle)) // MAIL FROM sendData("MAIL FROM:<admin@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("250 Ok") } stateProbe.expectMsg(Transition(handler, Idle, MailFrom)) // RCPT TO sendData("RCPT TO:<jcain@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("250 Ok") } stateProbe.expectMsg(Transition(handler, MailFrom, RcptTo)) // DATA sendData("DATA") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("354 End data with") } stateProbe.expectMsg(Transition(handler, RcptTo, Data)) // text sendData("This is a test") stateProbe.expectNoMsg(1.second) sendData("emergency broadcast") stateProbe.expectNoMsg(1.second) sendData("system.") stateProbe.expectNoMsg(1.second) sendData("This is only a test.") stateProbe.expectNoMsg(1.second) stop() } } } "Data" when { "receiving text without the terminator" should { "remain in Data state" in new HandlerFixture("data-text") { // EHLO sendData("EHLO bedevere.tremtek.com") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("250 localhost") } stateProbe.expectMsg(Transition(handler, Greeting, Idle)) // MAIL FROM sendData("MAIL FROM:<admin@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("250 Ok") } stateProbe.expectMsg(Transition(handler, Idle, MailFrom)) // RCPT TO sendData("RCPT TO:<jcain@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("250 Ok") } stateProbe.expectMsg(Transition(handler, MailFrom, RcptTo)) // DATA sendData("DATA") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("354 End data with") } stateProbe.expectMsg(Transition(handler, RcptTo, Data)) // text sendData("This is a test of") stateProbe.expectNoMsg(1.second) sendData("the emergency broadcast") stateProbe.expectNoMsg(1.second) sendData("system.") stateProbe.expectNoMsg(1.second) sendData("This is only a test.") stateProbe.expectNoMsg(1.second) stop() } } "receiving text followed by the terminator" should { "store the message and go to Idle state" in new HandlerFixture("data-term") { // EHLO sendData("EHLO bedevere.tremtek.com") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("250 localhost") } stateProbe.expectMsg(Transition(handler, Greeting, Idle)) // MAIL FROM sendData("MAIL FROM:<admin@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("250 Ok") } stateProbe.expectMsg(Transition(handler, Idle, MailFrom)) // RCPT TO sendData("RCPT TO:<jcain@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("250 Ok") } stateProbe.expectMsg(Transition(handler, MailFrom, RcptTo)) // DATA sendData("DATA") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("354 End data with") } stateProbe.expectMsg(Transition(handler, RcptTo, Data)) // text sendData("This is a test of") sendData("the emergency broadcast") sendData("system.") sendData("This is only a test.") // send remaining terminator in pieces List(46, 13, 10).foreach(b => handler ! Tcp.Received(ByteString(b))) dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("250 Ok: queued") } stateProbe.expectMsg(Transition(handler, Data, Idle)) stop() } } "receiving text with _NOPTR_" should { "respond with a 550 and exit" in new HandlerFixture("data-noptr") { // EHLO sendData("EHLO bedevere.tremtek.com") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("250 localhost") } stateProbe.expectMsg(Transition(handler, Greeting, Idle)) // MAIL FROM sendData("MAIL FROM:<admin@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("250 Ok") } stateProbe.expectMsg(Transition(handler, Idle, MailFrom)) // RCPT TO sendData("RCPT TO:<jcain@tremtek.com>") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("250 Ok") } stateProbe.expectMsg(Transition(handler, MailFrom, RcptTo)) // DATA sendData("DATA") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should startWith ("354 End data with") } stateProbe.expectMsg(Transition(handler, RcptTo, Data)) // text sendData("This is a test _NOPTR_.\\r\\n.") dataProbe.expectMsgPF() { case Tcp.Write(str, _) => str.utf8String should include regex "550.*PTR".r } lifeProbe.expectTerminated(handler) stop() } } } } }
jec/MockSMTP
src/test/scala/net/jcain/net/MockSMTPSpec.scala
Scala
bsd-3-clause
17,986
package com.lucidchart.open.nark.controllers import com.lucidchart.open.nark.request.{AppFlash, AppAction, AuthAction, DashboardAction} import com.lucidchart.open.nark.views import com.lucidchart.open.nark.models.{DashboardTagsModel, DashboardModel, TagConverter} import com.lucidchart.open.nark.models.records.{Dashboard, Pagination, TagMap} import com.lucidchart.open.nark.utils.StatsD import play.api.data._ import play.api.data.Forms._ import play.api.libs.json.Json import validation.Constraints import java.util.UUID class DashboardTagsController extends AppController { /* * Get tag and all the dashboards it is assocaited with. */ def tag(name: String) = AuthAction.maybeAuthenticatedUser { implicit userOption => AppAction {implicit request => val dashboardIds = DashboardTagsModel.findDashboardsWithTag(name).map(_.recordId) val dashboards = DashboardModel.findDashboardByID(dashboardIds).filter(!_.deleted) Ok(views.html.dashboardtags.dashboardtag(name, dashboards)) } } /* * Search tags by name. */ def search(term: String, page: Int) = AuthAction.maybeAuthenticatedUser { implicit userOption => AppAction {implicit request => val realPage = page.max(1) val (found, tags) = DashboardTagsModel.search(term, realPage - 1) val dashboardTags = DashboardTagsModel.findDashboardsWithTag(tags) val dashboards = DashboardModel.findDashboardByID(dashboardTags.map(_.recordId).distinct).filter(!_.deleted) Ok(views.html.dashboardtags.search(term, Pagination(realPage, found, DashboardModel.configuredLimit, List(TagConverter.toTagMap[Dashboard](dashboardTags, dashboards))))) } } /* * Search tags by name. Returns json formatted for jquery-tokeninput. */ def searchToJson(term: String) = AuthAction.maybeAuthenticatedUser { implicit userOption => AppAction { implicit request => val (found, matches) = DashboardTagsModel.search(term + "%", 0) Ok(Json.toJson(matches.map{ m => Json.obj("id" -> m, "name" -> m) })) } } } object DashboardTagsController extends DashboardTagsController
lucidsoftware/nark
app/com/lucidchart/open/nark/controllers/DashboardTagsController.scala
Scala
apache-2.0
2,059
package scoverage.macrosupport import scala.reflect.macros.blackbox.Context private object TesterMacro { type TesterContext = Context { type PrefixType = Tester.type } def test(c: TesterContext) = { import c.universe._ q"""println("macro test")""" } }
scoverage/scalac-scoverage-plugin
scalac-scoverage-plugin/src/test/scala-2.11+/scoverage/macrosupport/TesterMacro.scala
Scala
apache-2.0
271
package io.youi.component.support import io.youi.component.Component /** * ThemeComponent is a convenience trait presuming that its companion object is a Theme without a unique selector */ trait ThemedComponent { this: Component => classes += getClass.getSimpleName }
outr/youi
gui/src/main/scala/io/youi/component/support/ThemedComponent.scala
Scala
mit
278
/* Copyright 2009 David Hall, Daniel Ramage Licensed under the Apache License, Version 2.0 (the "License") you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package epic.features import org.scalatest._ import org.scalatest.junit._ import org.scalatest.prop._ import org.junit.runner.RunWith import scala.io.Source @RunWith(classOf[JUnitRunner]) class PorterStemmerTest extends FunSuite with Checkers { test("massive vocabulary test") { val wStream = this.getClass.getClassLoader.getResourceAsStream("lang/eng/stem/vocabulary.txt") val words = Source.fromInputStream(wStream).getLines() val sStream = this.getClass.getClassLoader.getResourceAsStream("lang/eng/stem/stemmed_vocabulary.txt") val stems = Source.fromInputStream(sStream).getLines() try { for ((w, s) <- words zip stems) { assert(PorterStemmer(w) == s, w) } } finally { wStream.close() sStream.close() } } }
langkilde/epic
src/test/scala/epic/features/PorterStemmerTest.scala
Scala
apache-2.0
1,358
package com.blstream.sbtsearchmavenplugin import java.net.{ URL, URLConnection } import sbt.Logger import scala.util.Try case class Artifact(g: String, a: String, latestVersion: String) trait Search { self: MavenOrgSearcher with QueryCleaner with ResultsParser with ArtifactsPrinter => def search(args: Seq[String], log: Logger): Unit = { val results = for { queryString <- args.headOption.toRight[Error]("usage: searchMaven queryString").right cleanedQuery <- cleanQuery(queryString).right jsonResults <- query(cleanedQuery).right artifacts <- parseResults(jsonResults).right } yield { printArtifacts(cleanedQuery)(artifacts) } results.fold( log.warn(_), log.info(_) ) } } trait ArtifactsPrinter { def printArtifacts: String => List[Artifact] => String = query => artifacts => { val separator = "%" val quotesLength = 2 val max = countMaxColumnsSizes(artifacts) artifacts.map { a => val col1Length = max._1 + quotesLength val col2Length = max._2 + quotesLength val group = s""""${a.g}"""" val artifact = s""""${a.a}"""" val version = s""""${a.latestVersion}"""" s"%-${col1Length}s %s %-${col2Length}s %s %s".format(group, separator, artifact, separator, version).trim }.mkString(s"Results for $query:\\n", "\\n", "") } private def countMaxColumnsSizes: List[Artifact] => (Int, Int) = artifacts => artifacts.foldLeft((0, 0))((m, a) => (Math.max(m._1, a.g.length), Math.max(m._2, a.a.length))) } trait QueryCleaner { def cleanQuery: String => Either[Error, String] = rawQuery => { val pattern = "[^a-zA-Z0-9-]".r val q = pattern.replaceAllIn(rawQuery, "") if (q.isEmpty) Left("Empty query, only a-zA-Z0-9- allowed") else Right(q) } } trait MavenOrgSearcher { def query: String => Either[Error, Json] = queryString => { val query = s"http://search.maven.org/solrsearch/select?q=$queryString&rows=20&wt=json" val connMaybe = prepareConnection(query) connMaybe.right.map { conn => scala.io.Source.fromInputStream(conn.getInputStream).mkString } } private def prepareConnection: String => Either[Error, URLConnection] = query => { Try { val conn = new URL(query).openConnection() conn.setConnectTimeout(3000) conn.setReadTimeout(3000) Right(conn) }.recover { case _: Exception => Left("Connection failure, try again later.") }.get } } trait ResultsParser { import net.liftweb.json._ implicit val formats = DefaultFormats def parseResults: Json => Either[Error, List[Artifact]] = results => { val json = parse(results) val suggestionsJson = json \\ "spellcheck" \\ "suggestions" if ((json \\ "response" \\ "numFound").extract[Int] > 0) { val artifacts = json \\ "response" \\ "docs" Right(artifacts.extract[List[Artifact]]) } else { suggestionsJson.extractOpt[List[String]] match { case Some(_) => Left("Artifact not found") case None => { val suggestions = (suggestionsJson(1) \\ "suggestion").extract[List[String]].mkString(", ") Left(s"Artifact not found, did you mean: $suggestions?") } } } } }
blstream/sbt-search-maven-plugin
src/main/scala/com/blstream/sbtsearchmavenplugin/search.scala
Scala
mit
3,346
/** * Created by tonirilix on 12/18/15. */ /** * NOTE: Scalas's List, scala.List, differs from Java's java.util.List type in that * Scala Lists are always immutable (whereas Java Lists can be mutable). */ // Creating and initializing a list val oneTwoThree = List(1, 2, 3); /** * NOTE: List in Scala behaves a bit like Java strings. When you call a method in a list * that you might expect it'll mutate the list, it'll create a new list with the new value. */ // Using method ::: to concat elements in a list val oneTwo = List(1, 2); val threeFour = List(3, 4); val oneTwoThreeFour = oneTwo ::: threeFour; println(oneTwo + " and "+ threeFour + " were not mutated"); println("Thus, " + oneTwoThreeFour + " is a new list"); /** * NOTE: If a method is used in operator notation, such as a * b, the method is invoked on the * left operand, as in a.*(b) -- unless the method name ends in a colon. If the method name * ends in a colon, the method is invoked on twoThree, passing 1, like this: twoThree.::(1) * * This is a simple example of operator associativity. */ // Using :: (It's pronounced cons) for prepend a new element to the beginning of an existing list val twoThree2 = List(2, 3); val oneTwoThree2 = 1 :: twoThree2; println(oneTwoThree2); /** * NOTE: "Nil" is used to specify an empty list. The explanation of why you need to put Nil at the end in the bellow code * is that :: is defined on class List. If you try to just say 1 :: 2 :: 3, it won't compile because 3 is an Int, * which doesn't have a :: method */ val oneTwoThree3 = 1 :: 2 :: 3 :: Nil; println("oneTwoThree3 " + oneTwoThree3); println("oneTwoThree3 reverse "+ oneTwoThree3.reverse); /** * NOTE: List offers an append method, it's written :+, but * it's rarely used, because the time it takes to append to a list grows linearly with the size * of the list. * The best option is to prepend and then call "reverse" or use a ListBuffer and then call toList */
tonirilix/lab_scala
03_NextStepsInScala/src/useList.scala
Scala
mit
1,989
/* * Copyright 2017 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.bforms.typeclasses import java.time.LocalDateTime import java.time.format.DateTimeFormatter import scala.xml.Elem sealed trait Attribute[T] { def attribute(name: String, values: List[T]): Elem protected def createAttribute(name: String, tpe: String, values: List[String]): Elem = { val attributeElem = <attribute> <attribute_name>{ name }</attribute_name> <attribute_type>{ tpe }</attribute_type> </attribute> val attributeValues = createAttributeValues(values) attributeElem.copy(child = attributeElem.child ++ attributeValues) } private def createAttributeValues(values: List[String]): Elem = { val child = values.map(value => <attribute_value>{ value }</attribute_value>) val attributeValues = <attribute_values></attribute_values> attributeValues.copy(child = child) } } object Attribute { implicit object string extends Attribute[String] { def attribute(a: String, c: List[String]): Elem = { createAttribute(a, "string", c) } } implicit object int extends Attribute[Int] { def attribute(a: String, c: List[Int]): Elem = { createAttribute(a, "int", c.map(_.toString)) } } implicit object long extends Attribute[Long] { def attribute(a: String, c: List[Long]): Elem = { createAttribute(a, "integer", c.map(_.toString)) } } implicit object localDateTime extends Attribute[LocalDateTime] { val formatter = DateTimeFormatter.ofPattern("dd/MM/yyyy HH:mm:ss") def attribute(a: String, c: List[LocalDateTime]): Elem = { createAttribute(a, "time", c.map(date => date.format(formatter))) } } }
VlachJosef/bforms
app/uk/gov/hmrc/bforms/typeclasses/Attribute.scala
Scala
apache-2.0
2,262
package io.mediachain.util import com.orientechnologies.orient.core.id.ORecordId import io.mediachain.Types._ import io.mediachain.XorMatchers import org.specs2.Specification object HashingSpec extends Specification with XorMatchers { def is = s2""" $hashesCanonical - Hashes a Canonical, ignoring the 'id' field $differentDataDifferentHash - Non-identical data produces non-identical hash """ def hashesCanonical = { val canonical = Canonical(None, "foobar") val canonicalWithId = Canonical(Some(new ORecordId("#0:1")), "foobar") canonical.multiHash must_== canonicalWithId.multiHash } def differentDataDifferentHash = { val blob1 = ImageBlob(None, "Dogs playing backgammon", "Awww, they think they're people!", "March 15th 2016") val blob2 = blob1.copy(date = "March 15th, 2016") blob1.multiHash must_!= blob2.multiHash } }
mediachain/L-SPACE
l_space/src/test/scala/io/mediachain/util/HashingSpec.scala
Scala
mit
915
package grpcgateway.server import grpcgateway.handlers.GrpcGatewayHandler import io.netty.bootstrap.ServerBootstrap import io.netty.channel.{ChannelFuture, EventLoopGroup} class GrpcGatewayServer private[server] ( port: Int, bootstrap: ServerBootstrap, masterGroup: EventLoopGroup, slaveGroup: EventLoopGroup, services: List[GrpcGatewayHandler] ) { private var channel: Option[ChannelFuture] = None def start(): Unit = { channel = Option(bootstrap.bind(port).sync()) } def shutdown(): Unit = { slaveGroup.shutdownGracefully() masterGroup.shutdownGracefully() services.foreach(_.shutdown()) channel.foreach(_.channel().closeFuture().sync()) } }
btlines/grpcgateway
runtime/src/main/scala/grpcgateway/server/GrpcGatewayServer.scala
Scala
mit
708
//############################################################################ // Programmation IV - 2002 - Week 02 //############################################################################ object M0 { def gcd(a: Int, b: Int): Int = if (b == 0) a else gcd(b, a % b) def factorial(n: Int): Int = if (n == 0) 1 else n * factorial(n - 1) Console.println(gcd(14,21)) Console.println(factorial(5)) Console.println() } //############################################################################ object M1 { def cube(x: Int): Double = x * x * x def sumInts(a: Int, b: Int): Double = if (a > b) 0 else a + sumInts(a + 1, b); def sumCubes(a: Int, b: Int): Double = if (a > b) 0 else cube(a) + sumCubes(a + 1, b); def sumReciprocals(a: Int, b: Int): Double = if (a > b) 0 else 1.0/a + sumReciprocals(a + 1, b); def sumPi(n: Int): Double = { def element(x: Int): Double = 4.0/(4*x+1) - 4.0/(4*x-1); def sumElements(a: Int, b: Int): Double = if (a > b) 0 else element(a) + sumElements(a + 1, b); 4 + sumElements(1,n) } Console.println(sumInts(1,4)) Console.println(sumCubes(1,4)) Console.println(sumReciprocals(1,4)) Console.println(sumCubes(1, 10) + sumReciprocals(10, 20)) Console.println("pi = " + sumPi(20)) Console.println() } //############################################################################ object M2 { def id(x: Int): Double = x; def cube(x: Int): Double = x * x * x; def reciprocal(x: Int): Double = 1.0/x; def sum(f: Int => Double, a: Int, b: Int): Double = if (a > b) 0 else f(a) + sum(f, a + 1, b); def sumInts(a: Int, b: Int): Double = sum(id, a, b); def sumCubes(a: Int, b: Int): Double = sum(cube, a, b); def sumReciprocals(a: Int, b: Int): Double = sum(reciprocal, a, b); def sumPi(n: Int): Double = { def element(x: Int): Double = 4.0/(4*x+1) - 4.0/(4*x-1); 4 + sum(element, 1, n) } Console.println(sumInts(1,4)) Console.println(sumCubes(1,4)) Console.println(sumReciprocals(1,4)) Console.println(sumCubes(1, 10) + sumReciprocals(10, 20)) Console.println("pi = " + sumPi(20)) Console.println() } //############################################################################ object M3 { def sum(f: Int => Double, a: Int, b: Int): Double = if (a > b) 0 else f(a) + sum(f, a + 1, b); def sumInts(a: Int, b: Int): Double = sum((xXXXXX => xXXXXX), a, b); def sumCubes(a: Int, b: Int): Double = sum((x => x * x * x), a, b); def sumReciprocals(a: Int, b: Int): Double = sum((x => 1.0/x), a, b); def sumPi(n: Int): Double = 4 + sum((x => 4.0/(4*x+1) - 4.0/(4*x-1)), 1, n); Console.println(sumInts(1,4)) Console.println(sumCubes(1,4)) Console.println(sumReciprocals(1,4)) Console.println(sumCubes(1, 10) + sumReciprocals(10, 20)) Console.println("pi = " + sumPi(20)) Console.println() } //############################################################################ object M4 { def sum(f: Int => Double): (Int, Int) => Double = { def sumF(a: Int, b: Int): Double = if (a > b) 0 else f(a) + sumF(a + 1, b); sumF } def sumInts = sum(x => x) def sumCubes = sum(x => x * x * x) def sumReciprocals = sum(1.0/_) def sumPi = { n: Int => 4 + sum(x => 4.0/(4*x+1) - 4.0/(4*x-1))(1, n) } Console.println(sumInts(1,4)) Console.println(sumCubes(1,4)) Console.println(sumReciprocals(1,4)) Console.println(sumCubes(1, 10) + sumReciprocals(10, 20)) Console.println("pi = " + sumPi(20)) Console.println() } //############################################################################ object M5 { def sum(f: Int => Double): (Int, Int) => Double = { (a, b) => if (a > b) 0 else f(a) + sum(f)(a + 1, b) } def sumInts = sum(x => x) def sumCubes = sum(x => x * x * x) def sumReciprocals = sum(x => 1.0/x) def sumPi = { n: Int => 4 + sum(x => 4.0/(4*x+1) - 4.0/(4*x-1))(1, n) } Console.println(sumInts(1,4)) Console.println(sumCubes(1,4)) Console.println(sumReciprocals(1,4)) Console.println(sumCubes(1, 10) + sumReciprocals(10, 20)) Console.println("pi = " + sumPi(20)) Console.println() } //############################################################################ object M6 { def sum(f: Int => Double)(a: Int, b: Int): Double = if (a > b) 0 else f(a) + sum(f)(a + 1, b); def sumInts = sum(x => x)_ def sumCubes = sum(x => x * x * x)_ def sumReciprocals = sum(x => 1.0/x)_ def sumPi = { n: Int => 4 + sum(x => 4.0/(4*x+1) - 4.0/(4*x-1))(1, n) } Console.println(sumInts(1,4)) Console.println(sumCubes(1,4)) Console.println(sumReciprocals(1,4)) Console.println(sumCubes(1, 10) + sumReciprocals(10, 20)) Console.println("pi = " + sumPi(20)) Console.println() } //############################################################################ object M7 { def sum(f: Int => Double)(a: Int, b: Int): Double = { def iter(a: Int, result: Double): Double = if (a > b) result else iter(a + 1, f(a) + result); iter(a, 0) } def sumInts = sum(x => x)_ def sumCubes = sum(x => x * x * x)_ def sumReciprocals = sum(x => 1.0/x)_ def sumPi = { n: Int => 4 + sum(x => 4.0/(4*x+1) - 4.0/(4*x-1))(1, n) } Console.println(sumInts(1,4)) Console.println(sumCubes(1,4)) Console.println(sumReciprocals(1,4)) Console.println(sumCubes(1, 10) + sumReciprocals(10, 20)) Console.println("pi = " + sumPi(20)) Console.println() } //############################################################################ object M8 { def product(f: Int => Double)(a: Int, step: Int, b: Int): Double = if (a > b) 1 else f(a) * product(f)(a + step, step, b); def productPi = { n: Int => product(x=>4.0*x*x/(2*x-1)/(2*x-1))(1,1,n)/n } val pi = 2 * product(x => x * x)(2, 2, 40) / product(x => x * x)(1, 2,40)/40; Console.println("pi = " + productPi(20)) Console.println("pi = " + pi) Console.println() } //############################################################################ object M9 { def accumulate[t](combiner: (t, t) => t, nullValue: t, f: Int => t, next: Int => Int)(a: Int, b: Int): t = if (a > b) nullValue else combiner(f(a), accumulate(combiner, nullValue, f, next)(next(a), b)) def inc(x: Int) = x + 1 def sum(f: Int => Double): (Int, Int) => Double = accumulate((x: Double, y: Double) => x + y, 0d, f, inc) def product(f: Int => Double): (Int, Int) => Double = accumulate((x: Double, y: Double) => x * y, 1d, f, inc) def sumInts = sum(x => x) def sumCubes = sum(x => x * x * x) def sumReciprocals = sum(x => 1.0 / x) def sumPi = { n: Int => 4 + sum(x => 4.0/(4*x+1) - 4.0/(4*x-1))(1, n) } def productPi = { n: Int => product(x=>4.0*x*x/(2*x-1)/(2*x-1))(1,n)/n } val pi = 2*product(x => 2*x*2*x)(1,20)/product(x =>(2*x-1)*(2*x-1))(1,20)/40 Console.println(sumInts(1, 4)) Console.println(sumCubes(1, 4)) Console.println(sumReciprocals(1, 4)) Console.println(sumCubes(1, 10) + sumReciprocals(10, 20)) Console.println("pi = " + sumPi(20)) Console.println("pi = " + productPi(20)) Console.println("pi = " + pi) Console.println() } //############################################################################ object MA { val tolerance = 0.0001 def abs(x: Double) = if (x < 0) -x else x def isCloseEnough(x: Double, y: Double) = abs((x - y) / x) < tolerance def fixedPoint(f: Double => Double)(firstGuess: Double) = { def iterate(guess: Double): Double = { val next = f(guess); Console.println(next); if (isCloseEnough(guess, next)) next else iterate(next) } iterate(firstGuess) } def sqrt(x: Double) = fixedPoint(y => (y + x / y) / 2)(1.0) Console.println("sqrt(2) = " + sqrt(2)) Console.println() } //############################################################################ object MB { val tolerance = 0.0001; def abs(x: Double) = if (x < 0) -x else x; def isCloseEnough(x: Double, y: Double) = abs((x - y) / x) < tolerance; def fixedPoint(f: Double => Double)(firstGuess: Double) = { def iterate(guess: Double): Double = { val next = f(guess); Console.println(next); if (isCloseEnough(guess, next)) next else iterate(next) } iterate(firstGuess) } def averageDamp(f: Double => Double)(x: Double) = (x + f(x)) / 2; def sqrt(x: Double) = fixedPoint(averageDamp(y => x/y))(1.0); Console.println("sqrt(2) = " + sqrt(2)) Console.println() } //############################################################################ object MC { def sum(f: Int => Double)(a: Int, b: Int): Double = { def iter(a: Int, result: Double): Double = { if (a > b) result else iter(a + 1, result + f(a)) } iter(a, 0) } def product(f: Int => Double)(a: Int, b: Int): Double = { def iter(a: Int, result: Double): Double = { if (a > b) result else iter(a + 1, result * f(a)) } iter(a, 1) } def factorial(n: Int) = product(x => x)(1 , n) Console.println( "1 + 2 + .. + 5 = " + sum(x => x)(1, 5)); Console.println( "1 * 2 * .. * 5 = " + product(x => x)(1, 5)); Console.println() Console.println( "1^2 + 2^2 + .. + 5^2 = " + sum(x => x*x)(1, 5)); Console.println( "1^2 * 2^2 * .. * 5^2 = " + product(x => x*x)(1, 5)); Console.println() Console.println( "factorial(0) = " + factorial(0)) Console.println( "factorial(1) = " + factorial(1)) Console.println( "factorial(2) = " + factorial(2)) Console.println( "factorial(3) = " + factorial(3)) Console.println( "factorial(4) = " + factorial(4)) Console.println( "factorial(5) = " + factorial(5)) Console.println() } //############################################################################ object MD { def reduce(op: (Double,Double) => Double, zero:Double)(f: Int => Double)(a: Int,b: Int): Double = { def iter(a: Int, result: Double): Double = { if (a > b) result else iter(a + 1, op(result, f(a))) } iter(a, zero) } def plus (x:Double,y:Double) = x+y; val sum: (Int => Double) => (Int, Int) => Double = reduce(plus , 0); def times(x:Double,y:Double) = x*y; val product: (Int => Double) => (Int, Int) => Double = reduce(times, 1); def factorial(n: Int) = product(x => x)(1 , n) Console.println( "1 + 2 + .. + 5 = " + sum(x => x)(1, 5)) Console.println( "1 * 2 * .. * 5 = " + product(x => x)(1, 5)) Console.println() Console.println( "1^2 + 2^2 + .. + 5^2 = " + sum(x => x*x)(1, 5)) Console.println( "1^2 * 2^2 * .. * 5^2 = " + product(x => x*x)(1, 5)) Console.println() Console.println( "factorial(0) = " + factorial(0)) Console.println( "factorial(1) = " + factorial(1)) Console.println( "factorial(2) = " + factorial(2)) Console.println( "factorial(3) = " + factorial(3)) Console.println( "factorial(4) = " + factorial(4)) Console.println( "factorial(5) = " + factorial(5)) Console.println() } //############################################################################ object ME { def reduce(op: (Double,Double) => Double, zero:Double)(f: Int => Double)(a: Int,b: Int): Double = { def iter(a: Int, result: Double): Double = { if (a > b) result else iter(a + 1, op(result, f(a))) } iter(a, zero) } def sum: (Int => Double) => (Int, Int) => Double = reduce((x,y) => x + y, 0); def product: (Int => Double) => (Int, Int) => Double = reduce((x,y) => x * y, 1); def factorial(n: Int) = product(x => x)(1 , n) Console.println( "1 + 2 + .. + 5 = " + sum(x => x)(1, 5)) Console.println( "1 * 2 * .. * 5 = " + product(x => x)(1, 5)) Console.println() Console.println( "1^2 + 2^2 + .. + 5^2 = " + sum(x => x*x)(1, 5)) Console.println( "1^2 * 2^2 * .. * 5^2 = " + product(x => x*x)(1, 5)) Console.println() Console.println( "factorial(0) = " + factorial(0)) Console.println( "factorial(1) = " + factorial(1)) Console.println( "factorial(2) = " + factorial(2)) Console.println( "factorial(3) = " + factorial(3)) Console.println( "factorial(4) = " + factorial(4)) Console.println( "factorial(5) = " + factorial(5)) Console.println() } //############################################################################ object MF { def fib(x: Int): Int = if (x <= 1) x else fib(x - 2) + fib(x - 1) Console.println("fib(0) = " + fib(0)) Console.println("fib(1) = " + fib(1)) Console.println("fib(2) = " + fib(2)) Console.println("fib(3) = " + fib(3)) Console.println("fib(4) = " + fib(4)) Console.println("fib(5) = " + fib(5)) Console.println("fib(6) = " + fib(6)) Console.println("fib(7) = " + fib(7)) Console.println("fib(8) = " + fib(8)) Console.println("fib(9) = " + fib(9)) } //############################################################################ object MG { def fib(x: Int) = { def loop(n: Int, prev: Int, fibn: Int): Int = if (n == x) fibn else loop(n + 1, fibn, fibn + prev) if (x == 0) 0 else loop(1, 0, 1) } Console.println("fib(0) = " + fib(0)) Console.println("fib(1) = " + fib(1)) Console.println("fib(2) = " + fib(2)) Console.println("fib(3) = " + fib(3)) Console.println("fib(4) = " + fib(4)) Console.println("fib(5) = " + fib(5)) Console.println("fib(6) = " + fib(6)) Console.println("fib(7) = " + fib(7)) Console.println("fib(8) = " + fib(8)) Console.println("fib(9) = " + fib(9)) } //############################################################################ object MH { def power(x: Double, y: Int): Double = if (y <= 0) 1 else if (y % 2 == 0) power(x * x, y / 2) else x * power(x, y - 1); Console.println("power(0,0) = " + power(0,0)) Console.println("power(0,1) = " + power(0,1)) Console.println("power(0,2) = " + power(0,2)) Console.println("power(0,3) = " + power(0,3)) Console.println("power(0,4) = " + power(0,4)) Console.println("power(0,5) = " + power(0,5)) Console.println("power(0,6) = " + power(0,6)) Console.println("power(0,7) = " + power(0,7)) Console.println("power(0,8) = " + power(0,8)) Console.println() Console.println("power(1,0) = " + power(1,0)) Console.println("power(1,1) = " + power(1,1)) Console.println("power(1,2) = " + power(1,2)) Console.println("power(1,3) = " + power(1,3)) Console.println("power(1,4) = " + power(1,4)) Console.println("power(1,5) = " + power(1,5)) Console.println("power(1,6) = " + power(1,6)) Console.println("power(1,7) = " + power(1,7)) Console.println("power(1,8) = " + power(1,8)) Console.println() Console.println("power(2,0) = " + power(2,0)) Console.println("power(2,1) = " + power(2,1)) Console.println("power(2,2) = " + power(2,2)) Console.println("power(2,3) = " + power(2,3)) Console.println("power(2,4) = " + power(2,4)) Console.println("power(2,5) = " + power(2,5)) Console.println("power(2,6) = " + power(2,6)) Console.println("power(2,7) = " + power(2,7)) Console.println("power(2,8) = " + power(2,8)) Console.println() Console.println("power(3,0) = " + power(3,0)) Console.println("power(3,1) = " + power(3,1)) Console.println("power(3,2) = " + power(3,2)) Console.println("power(3,3) = " + power(3,3)) Console.println("power(3,4) = " + power(3,4)) Console.println("power(3,5) = " + power(3,5)) Console.println("power(3,6) = " + power(3,6)) Console.println("power(3,7) = " + power(3,7)) Console.println("power(3,8) = " + power(3,8)) Console.println() Console.println("power(4,0) = " + power(4,0)) Console.println("power(4,1) = " + power(4,1)) Console.println("power(4,2) = " + power(4,2)) Console.println("power(4,3) = " + power(4,3)) Console.println("power(4,4) = " + power(4,4)) Console.println("power(4,5) = " + power(4,5)) Console.println("power(4,6) = " + power(4,6)) Console.println("power(4,7) = " + power(4,7)) Console.println("power(4,8) = " + power(4,8)) Console.println() Console.println("power(5,0) = " + power(5,0)) Console.println("power(5,1) = " + power(5,1)) Console.println("power(5,2) = " + power(5,2)) Console.println("power(5,3) = " + power(5,3)) Console.println("power(5,4) = " + power(5,4)) Console.println("power(5,5) = " + power(5,5)) Console.println("power(5,6) = " + power(5,6)) Console.println("power(5,7) = " + power(5,7)) Console.println("power(5,8) = " + power(5,8)) Console.println() } //############################################################################ object Test { def main(args: Array[String]): Unit = { M0 M1 M2 M3 M4 M5 M6 M7 M8 M9 MA MB MC MD ME MF MG MH () } } //############################################################################
scala/scala
test/files/run/Course-2002-02.scala
Scala
apache-2.0
16,831
package cc.factorie.app.nlp.ner import java.io._ import cc.factorie.app.nlp.lexicon.{LexiconsProvider, StaticLexicons} import cc.factorie._ import cc.factorie.app.chain.{ChainModel, SegmentEvaluation} import cc.factorie.app.nlp._ import cc.factorie.optimize.{AdaGrad, ParameterAveraging, Trainer} import cc.factorie.util._ import cc.factorie.variable._ import scala.reflect.{ClassTag, classTag} import cc.factorie.app.nlp.lemma.LowercaseLemmatizer /** * NER tagger for the CoNLL 2003 corpus * * Training time: ~3 minutes (on blake, 30 Oct. 4:00pm) * tokens per second: 8431.02310444517 * docs per second: 48.24287793720109 (avg doc length = 200 tokens) * * CoNLL 2003 dev set (eng.testa) * OVERALL f1=0.933593 p=0.939802 r=0.927465 (tp=5511 fp=353 fn=431 true=5942 pred=5864) acc=0.985865 (50636/51362) * LOC f1=0.965931 p=0.967249 r=0.964616 (tp=1772 fp=60 fn=65 true=1837 pred=1832) * MISC f1=0.876404 p=0.909091 r=0.845987 (tp=780 fp=78 fn=142 true=922 pred=858) * ORG f1=0.892065 p=0.899848 r=0.884415 (tp=1186 fp=132 fn=155 true=1341 pred=1318) * PER f1=0.958897 p=0.955280 r=0.962541 (tp=1773 fp=83 fn=69 true=1842 pred=1856) * * CoNLL 2003 test set (eng.testb) * OVERALL f1=0.885633 p=0.888315 r=0.882967 (tp=4987 fp=627 fn=661 true=5648 pred=5614) acc=0.973253 (45193/46435) * LOC f1=0.915375 p=0.909953 r=0.920863 (tp=1536 fp=152 fn=132 true=1668 pred=1688) * MISC f1=0.791034 p=0.803231 r=0.779202 (tp=547 fp=134 fn=155 true=702 pred=681) * ORG f1=0.842767 p=0.838498 r=0.847080 (tp=1407 fp=271 fn=254 true=1661 pred=1678) * PER f1=0.940327 p=0.955329 r=0.925788 (tp=1497 fp=70 fn=120 true=1617 pred=1567) * */ class ConllChainNer(implicit mp:ModelProvider[ConllChainNer], nerLexiconFeatures:NerLexiconFeatures) extends ChainNer[BilouConllNerTag]( BilouConllNerDomain, (t, s) => new BilouConllNerTag(t, s), l => l.token, mp.provide, nerLexiconFeatures) with Serializable { def loadDocs(fileName: String): Seq[Document] = cc.factorie.app.nlp.load.LoadConll2003(BILOU=true).fromFilename(fileName) def newSpan(sec: Section, start: Int, length: Int, category: String) = new ConllNerSpan(sec, start, length, category) def newBuffer = new ConllNerSpanBuffer } //TODO this serialized model doesn't exist yet? object ConllChainNer extends ConllChainNer()(ModelProvider.classpath(), StaticLexiconFeatures()) with Serializable class OntonotesChainNer()(implicit mp:ModelProvider[OntonotesChainNer], nerLexiconFeatures:NerLexiconFeatures) extends ChainNer[BilouOntonotesNerTag](BilouOntonotesNerDomain, (t, s) => new BilouOntonotesNerTag(t, s), l => l.token, mp.provide, nerLexiconFeatures) { def newBuffer = new OntonotesNerSpanBuffer() def newSpan(sec: Section, start: Int, length: Int, category: String) = new OntonotesNerSpan(sec, start, length, category) } object OntonotesChainNer extends OntonotesChainNer()(ModelProvider.classpath(), StaticLexiconFeatures()) /** * A base class for finite-state named entity recognizers */ abstract class ChainNer[L<:NerTag](val labelDomain: CategoricalDomain[String] with SpanEncoding, val newLabel: (Token, String) => L, labelToToken: L => Token, modelIs: InputStream=null, nerLexiconFeatures: NerLexiconFeatures)(implicit m: ClassTag[L]) extends DocumentAnnotator with Serializable { val prereqAttrs = Seq(classOf[Sentence]) val postAttrs = Seq(m.runtimeClass) val FEATURE_PREFIX_REGEX = "^[^@]*$".r def process(document:Document) = if(document.tokenCount > 0) { if (!document.tokens.head.attr.contains(m.runtimeClass)) document.tokens.map(token => token.attr += newLabel(token, "O")) if (!document.tokens.head.attr.contains(classOf[ChainNERFeatures])) { document.tokens.map(token => {token.attr += new ChainNERFeatures(token)}) addFeatures(document, (t:Token)=>t.attr[ChainNERFeatures]) } document.sentences.collect { case sentence if sentence.nonEmpty => val vars = sentence.tokens.map(_.attr[L]).toSeq model.maximize(vars)(null) } document } else { document } def tokenAnnotationString(token: Token) = token.attr[L].categoryValue object ChainNERFeaturesDomain extends CategoricalVectorDomain[String] class ChainNERFeatures(val token: Token) extends BinaryFeatureVectorVariable[String] { def domain = ChainNERFeaturesDomain override def skipNonCategories = true } class ChainNERModel[Features <: CategoricalVectorVar[String]:ClassTag](featuresDomain: CategoricalVectorDomain[String], labelToFeatures: L => Features, labelToToken: L => Token, tokenToLabel: Token => L) extends ChainModel[L, Features, Token](labelDomain, featuresDomain, labelToFeatures, labelToToken, tokenToLabel) val model = new ChainNERModel[ChainNERFeatures](ChainNERFeaturesDomain, l => labelToToken(l).attr[ChainNERFeatures], labelToToken, t => t.attr[L]) val objective = cc.factorie.variable.HammingObjective if (modelIs != null) { deserialize(modelIs) ChainNERFeaturesDomain.freeze() println("found model") } def serialize(stream: java.io.OutputStream): Unit = { import cc.factorie.util.CubbieConversions._ val is = new DataOutputStream(new BufferedOutputStream(stream)) BinarySerializer.serialize(ChainNERFeaturesDomain.dimensionDomain, is) BinarySerializer.serialize(model, is) is.close() } def deserialize(stream: java.io.InputStream): Unit = { import cc.factorie.util.CubbieConversions._ val is = new DataInputStream(new BufferedInputStream(stream)) BinarySerializer.deserialize(ChainNERFeaturesDomain.dimensionDomain, is) BinarySerializer.deserialize(model, is) is.close() } def prefix( prefixSize : Int, cluster : String ) : String = if(cluster.length > prefixSize) cluster.substring(0, prefixSize) else cluster val clusters = JavaHashMap[String, String]() def addFeatures(document: Document, vf: Token => CategoricalVectorVar[String]): Unit = { document.annotators(classOf[ChainNERFeatures]) = ChainNer.this.getClass import cc.factorie.app.strings.simplifyDigits val tokenSequence = document.tokens.toIndexedSeq nerLexiconFeatures.addLexiconFeatures(tokenSequence, vf) for (token <- document.tokens) { val features = vf(token) val rawWord = token.string val word = simplifyDigits(rawWord).toLowerCase features += s"W=$word" features += s"SHAPE=${cc.factorie.app.strings.stringShape(rawWord, 2)}" if (token.isPunctuation) features += "PUNCTUATION" if (clusters.nonEmpty && clusters.contains(rawWord)) { features += "CLUS="+prefix(4,clusters(rawWord)) features += "CLUS="+prefix(6,clusters(rawWord)) features += "CLUS="+prefix(10,clusters(rawWord)) features += "CLUS="+prefix(20,clusters(rawWord)) } } for (sentence <- document.sentences) { cc.factorie.app.chain.Observations.addNeighboringFeatures(sentence.tokens,vf,FEATURE_PREFIX_REGEX,-2,2) } val tokenBuffer = new CircularBuffer[CategoricalVectorVar[String]](4) val stringBuffer = new CircularBuffer[String](4) // This is a separate iteration as combining them would be semantically different due to addNeighbouringFeatures(). for (token <- document.tokens) { val tokenStr = token.string val tokenFeatures = vf(token) val simpleLowerStr = simplifyDigits(tokenStr).toLowerCase() if (simpleLowerStr.length < 5){ tokenFeatures += "P="+cc.factorie.app.strings.prefix(simpleLowerStr, 4) tokenFeatures += "S="+cc.factorie.app.strings.suffix(simpleLowerStr, 4) } val nextStr = "NEXTWINDOW="+simpleLowerStr // Add features from window of 4 words before and after var i = 0 while (i < 4) { val curTok = tokenBuffer(i) if (curTok != null) { curTok += nextStr // add next window feature to the token history } val prevStr = stringBuffer(i) if (prevStr != null) { tokenFeatures += prevStr // add previous window feature to the current token } i += 1 } tokenBuffer += vf(token) stringBuffer += "PREVWINDOW="+simpleLowerStr } val tokenMap = JavaHashMap[String,Seq[String]]() for (token <- document.tokens) { val tokenStr = token.string if (token.isCapitalized && token.string.length > 1) { if (!tokenMap.contains(tokenStr)) { //First mention of this token tokenMap += (tokenStr -> vf(token).activeCategories.map(f => "FIRSTMENTION=" + f)) } else { //Add first mention features vf(token) ++= tokenMap(tokenStr) } } } document.tokens.foreach(t => if (t.string.matches("[A-Za-z]+")) vf(t) ++= t.charNGrams(2,5).map(n => "NGRAM="+n)) } def sampleOutputString(tokens: Iterable[Token]): String = { val sb = new StringBuffer for (token <- tokens) sb.append( "%s %20s %10s %10s\n".format( if (token.attr[L with LabeledMutableCategoricalVar[String]].valueIsTarget) " " else "*", token.string, token.attr[L with LabeledMutableCategoricalVar[String]].target.categoryValue, token.attr[L].categoryValue)) sb.toString } def train(trainDocs: Seq[Document], testDocs: Seq[Document], rate: Double=0.18, delta: Double=0.066)(implicit random: scala.util.Random): Double = { def labels(docs: Iterable[Document]): Iterable[L with LabeledMutableDiscreteVar] = { docs.flatMap(doc => doc.tokens.map(_.attr[L with LabeledMutableDiscreteVar])) } println("initializing training features...") (trainDocs ++ testDocs).foreach(_.tokens.map(token => token.attr += new ChainNERFeatures(token))) trainDocs.foreach(addFeatures(_, (t:Token)=>t.attr[ChainNERFeatures])) ChainNERFeaturesDomain.freeze() println("initializing testing features...") testDocs.foreach(addFeatures(_, (t:Token)=>t.attr[ChainNERFeatures])) println(sampleOutputString(trainDocs.take(20).last.tokens.take(100))) val trainLabels = labels(trainDocs).toIndexedSeq val testLabels = labels(testDocs).toIndexedSeq val labelDomain: CategoricalDomain[String] = trainLabels.head.domain.asInstanceOf[CategoricalDomain[String]] (trainLabels ++ testLabels).foreach(_.setRandomly) val examples = trainDocs.flatMap(_.sentences.filter(_.length > 1).map(sentence => new model.ChainLikelihoodExample(sentence.tokens.map(_.attr[L with LabeledMutableDiscreteVar])))) val optimizer = new AdaGrad(rate=rate, delta=delta) with ParameterAveraging def evaluate(){ val segmentEvaluation = new SegmentEvaluation[L with CategoricalLabeling[String]]( labelDomain.categories.filter(_.length > 2).map(_.substring(2)), "(B|U)-", "(I|L)-" ) trainDocs.foreach(doc => { process(doc) for (sentence <- doc.sentences) segmentEvaluation += sentence.tokens.map(_.attr[L with CategoricalLabeling[String]]) }) println(s"Train accuracy ${objective.accuracy(trainLabels)}") println(segmentEvaluation) if (testDocs.nonEmpty) { val testSegmentEvaluation = new SegmentEvaluation[L with LabeledMutableCategoricalVar[String]]( labelDomain.categories.filter(_.length > 2).map(_.substring(2)), "(B|U)-", "(I|L)-" ) testDocs.foreach(doc => { process(doc) for (sentence <- doc.sentences) testSegmentEvaluation += sentence.tokens.map(_.attr[L with CategoricalLabeling[String]]) }) println(s"Test accuracy ${objective.accuracy(testLabels)}") println(testSegmentEvaluation) } println(model.parameters.tensors.sumInts(t => t.toSeq.count(x => x == 0)).toFloat/model.parameters.tensors.sumInts(_.length)+" sparsity") } println(s"training with ${examples.length} examples") Trainer.onlineTrain(model.parameters, examples, optimizer=optimizer, evaluate=evaluate, maxIterations = 5) val finalEval = new SegmentEvaluation[L with LabeledMutableCategoricalVar[String]](labelDomain.categories.filter(_.length > 2).map(_.substring(2)), "(B|U)-", "(I|L)-") val buf = new StringBuffer buf.append(new LabeledDiscreteEvaluation(testDocs.flatMap(_.tokens.map(_.attr[L with LabeledMutableDiscreteVar])))) for (doc <- testDocs; sentence <- doc.sentences) finalEval += sentence.tokens.map(_.attr[L with LabeledMutableCategoricalVar[String]]) println("final results:") println(finalEval) finalEval.f1 } def printEvaluation(trainDocs: Iterable[Document], testDocs: Iterable[Document], iteration: String): Double = { println(s"TRAIN ${evaluationString(trainDocs)}") val result = evaluationString(testDocs) println(s"TEST $result") result } def evaluationString(documents: Iterable[Document]): Double = { val buf = new StringBuffer buf.append(new LabeledDiscreteEvaluation(documents.flatMap(_.tokens.map(_.attr[L with LabeledMutableDiscreteVar])))) val segmentEvaluation = new cc.factorie.app.chain.SegmentEvaluation[L with LabeledMutableCategoricalVar[String]](labelDomain.categories.filter(_.length > 2).map(_.substring(2)), "(B|U)-", "(I|L)-") for (doc <- documents; sentence <- doc.sentences) segmentEvaluation += sentence.tokens.map(_.attr[L with LabeledMutableCategoricalVar[String]]) println(s"Segment evaluation $segmentEvaluation") segmentEvaluation.f1 } } class ChainNerOpts extends cc.factorie.util.CmdOptions with SharedNLPCmdOptions with ModelProviderCmdOptions with DefaultCmdOptions { val saveModel = new CmdOption("save-model", "CoNLLChainNer.factorie", "FILE", "Filename for the model (saving a trained model or reading a running model.") val serialize = new CmdOption("serialize", true, "BOOLEAN", "Whether to serialize at all") val train = new CmdOption("train", List.empty[File], "List[File]", "Filename(s) from which to read training data in CoNLL 2003 one-word-per-lineformat.") val test = new CmdOption("test", List.empty[File], "List[File]", "Filename(s) from which to read test data in CoNLL 2003 one-word-per-lineformat.") val brownClusFile = new CmdOption("brown", "brownBllipClusters", "FILE", "File containing brown clusters.") val trainDir = new CmdOption("train-dir", new File(""), "Dir", "Path to directory of training data.") val testDir = new CmdOption("test-dir", new File(""), "Dir", "Path to directory of test data.") val rate = new CmdOption("rate", 0.18, "DOUBLE", "learning rate") val delta = new CmdOption("delta", 0.066, "DOUBLE", "learning delta") val modelFile = new CmdOption("model-file", "", "STRING", "Filename of the serialized model that you want to load.") val useTagger = new CmdOption("use-tagger", "", "STRING", "Which tagger? (remove me later)") val lexicons = new LexiconsProviderCmdOption("lexicons") } object ConllChainNerTrainer extends cc.factorie.util.HyperparameterMain { def evaluateParameters(args:Array[String]): Double = { val opts = new ChainNerOpts implicit val random = new scala.util.Random(0) opts.parse(args) val ner = new ConllChainNer()(ModelProvider.empty, new StaticLexiconFeatures(new StaticLexicons()(opts.lexicons.value))) if (opts.brownClusFile.wasInvoked) { println(s"Reading brown cluster file: ${opts.brownClusFile.value}") for (line <- scala.io.Source.fromFile(opts.brownClusFile.value).getLines()) { val splitLine = line.split("\t") ner.clusters(splitLine(1)) = splitLine(0) } } val trainPortionToTake = if(opts.trainPortion.wasInvoked) opts.trainPortion.value else 1.0 val testPortionToTake = if(opts.testPortion.wasInvoked) opts.testPortion.value else 1.0 val (trainDocsFull, testDocsFull) = if(opts.train.wasInvoked && opts.test.wasInvoked) { opts.train.value.flatMap(f => ner.loadDocs(f.getAbsolutePath)).toSeq -> opts.test.value.flatMap(f => ner.loadDocs(f.getAbsolutePath)).toSeq } else if(opts.trainDir.wasInvoked && opts.testDir.wasInvoked) { opts.trainDir.value.listFiles().flatMap(f => ner.loadDocs(f.getAbsolutePath)).toSeq -> opts.testDir.value.listFiles().flatMap(f => ner.loadDocs(f.getAbsolutePath)).toSeq } else { throw new IllegalArgumentException("You must provide values for either --train and --test or --train-dir and --test-dir") } val trainDocs = trainDocsFull.take((trainDocsFull.length*trainPortionToTake).floor.toInt) val testDocs = testDocsFull.take((testDocsFull.length*testPortionToTake).floor.toInt) println(s"using training set: ${opts.train.value} ; test set: ${opts.test.value}") println(s"$trainPortionToTake of training data; $testPortionToTake of test data:") println(s"using ${trainDocs.length} / ${trainDocsFull.length} train docs, ${trainDocs.map(_.tokenCount).sum} tokens") println(s"using ${testDocs.length} / ${testDocsFull.length} test docs, ${testDocs.map(_.tokenCount).sum} tokens") val ret = ner.train(trainDocs, testDocs, opts.rate.value, opts.delta.value) if (opts.serialize.value) { println("serializing model to " + opts.saveModel.value) ner.serialize(new FileOutputStream(opts.saveModel.value)) } if(opts.targetAccuracy.wasInvoked) cc.factorie.assertMinimalAccuracy(ret,opts.targetAccuracy.value.toDouble) ret } } object ConllNerOptimizer { def main(args: Array[String]) { val opts = new ChainNerOpts opts.parse(args) opts.serialize.setValue(false) import cc.factorie.util.LogUniformDoubleSampler val rate = HyperParameter(opts.rate, new LogUniformDoubleSampler(1e-3, 1)) val delta = HyperParameter(opts.delta, new LogUniformDoubleSampler(0.01, 0.1)) val qs = new cc.factorie.util.QSubExecutor(10, "cc.factorie.app.nlp.ner.ConllChainNerTrainer") val optimizer = new cc.factorie.util.HyperParameterSearcher(opts, Seq(rate, delta), qs.execute, 100, 90, 60) val result = optimizer.optimize() println("Got results: " + result.mkString(" ")) println("Best rate: " + opts.rate.value + " best delta: " + opts.delta.value) println("Running best configuration...") opts.serialize.setValue(true) import scala.concurrent.Await import scala.concurrent.duration._ Await.result(qs.execute(opts.values.flatMap(_.unParse).toArray), 1.hours) println("Done.") } }
patverga/factorie
src/main/scala/cc/factorie/app/nlp/ner/ChainNer.scala
Scala
apache-2.0
18,678
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.util import java.text.ParseException import java.time._ import java.time.format.DateTimeParseException import java.time.temporal.ChronoField.MICRO_OF_SECOND import java.time.temporal.TemporalQueries import java.util.{Locale, TimeZone} import java.util.concurrent.TimeUnit.SECONDS sealed trait TimestampFormatter extends Serializable { /** * Parses a timestamp in a string and converts it to microseconds. * * @param s - string with timestamp to parse * @return microseconds since epoch. * @throws ParseException can be thrown by legacy parser * @throws DateTimeParseException can be thrown by new parser * @throws DateTimeException unable to obtain local date or time */ @throws(classOf[ParseException]) @throws(classOf[DateTimeParseException]) @throws(classOf[DateTimeException]) def parse(s: String): Long def format(us: Long): String } class Iso8601TimestampFormatter( pattern: String, zoneId: ZoneId, locale: Locale) extends TimestampFormatter with DateTimeFormatterHelper { @transient protected lazy val formatter = getOrCreateFormatter(pattern, locale) override def parse(s: String): Long = { val parsed = formatter.parse(s) val parsedZoneId = parsed.query(TemporalQueries.zone()) val timeZoneId = if (parsedZoneId == null) zoneId else parsedZoneId val zonedDateTime = toZonedDateTime(parsed, timeZoneId) val epochSeconds = zonedDateTime.toEpochSecond val microsOfSecond = zonedDateTime.get(MICRO_OF_SECOND) Math.addExact(SECONDS.toMicros(epochSeconds), microsOfSecond) } override def format(us: Long): String = { val instant = DateTimeUtils.microsToInstant(us) formatter.withZone(zoneId).format(instant) } } /** * The formatter parses/formats timestamps according to the pattern `yyyy-MM-dd HH:mm:ss.[..fff..]` * where `[..fff..]` is a fraction of second up to microsecond resolution. The formatter does not * output trailing zeros in the fraction. For example, the timestamp `2019-03-05 15:00:01.123400` is * formatted as the string `2019-03-05 15:00:01.1234`. * * @param zoneId the time zone identifier in which the formatter parses or format timestamps */ class FractionTimestampFormatter(zoneId: ZoneId) extends Iso8601TimestampFormatter("", zoneId, TimestampFormatter.defaultLocale) { @transient override protected lazy val formatter = DateTimeFormatterHelper.fractionFormatter } object TimestampFormatter { val defaultPattern: String = "yyyy-MM-dd HH:mm:ss" val defaultLocale: Locale = Locale.US def apply(format: String, zoneId: ZoneId, locale: Locale): TimestampFormatter = { new Iso8601TimestampFormatter(format, zoneId, locale) } def apply(format: String, zoneId: ZoneId): TimestampFormatter = { apply(format, zoneId, defaultLocale) } def apply(zoneId: ZoneId): TimestampFormatter = { apply(defaultPattern, zoneId, defaultLocale) } def getFractionFormatter(zoneId: ZoneId): TimestampFormatter = { new FractionTimestampFormatter(zoneId) } }
aosagie/spark
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TimestampFormatter.scala
Scala
apache-2.0
3,855
package reactivemongo.api import scala.concurrent.{ ExecutionContext, Future } import reactivemongo.core.errors.GenericDatabaseException import reactivemongo.api.commands._ import reactivemongo.api.indexes.CollectionIndexesManager /** * A mixin that provides commands about this Collection itself. * * @define autoIndexIdParam If true should automatically add an index on the `_id` field. By default, regular collections will have an indexed `_id` field, in contrast to capped collections. This MongoDB option is deprecated and will be removed in a future release. * @define cappedSizeParam the size of the collection (number of bytes) * @define cappedMaxParam the maximum number of documents this capped collection can contain */ trait CollectionMetaCommands { self: Collection => private implicit lazy val unitBoxReader = CommandCodecs.unitBoxReader(command.pack) private implicit lazy val createWriter = CreateCollection.writer(command.pack) /** * Creates this collection. * * The returned future will be completed with an error if * this collection already exists. * * {{{ * coll.create().recover { * case CommandError.Code(48 /*NamespaceExists*/ ) => * println(s"Collection \\${coll.fullCollectionName} already exists") * } * }}} */ def create()(implicit ec: ExecutionContext): Future[Unit] = command.unboxed(self, Create(None, false), ReadPreference.primary) /** * @param failsIfExists if true fails if the collection already exists (default: false) */ def create(@deprecatedName(Symbol("autoIndexId")) failsIfExists: Boolean = false)(implicit ec: ExecutionContext): Future[Unit] = create().recover { case CommandError.Code(48 /* already exists */ ) if !failsIfExists => () case CommandError.Message( "collection already exists") if !failsIfExists => () } /** * Creates this collection as a capped one. * * The returned future will be completed with an error if this collection already exists. * * @param size $cappedSizeParam * @param maxDocuments $cappedMaxParam * @param autoIndexId $autoIndexIdParam */ def createCapped(size: Long, maxDocuments: Option[Int], autoIndexId: Boolean = false)(implicit ec: ExecutionContext): Future[Unit] = command.unboxed( self, Create(Some(Capped(size, maxDocuments)), autoIndexId), ReadPreference.primary) /** * Drops this collection. * * The returned future will be completed with an error * if this collection does not exist. */ @deprecated("Use `drop(Boolean)`", "0.12.0") def drop()(implicit ec: ExecutionContext): Future[Unit] = drop(true).map(_ => {}) private implicit lazy val dropWriter = DropCollection.writer(command.pack) private implicit lazy val dropReader = DropCollectionResult.reader(command.pack) /** * Drops this collection. * * If the collection existed and is successfully dropped, * the returned future will be completed with true. * * If `failIfNotFound` is false and the collection doesn't exist, * the returned future will be completed with false. * * Otherwise in case, the future will be completed with the encountered error. */ def drop(failIfNotFound: Boolean)(implicit ec: ExecutionContext): Future[Boolean] = { command(self, DropCollection, ReadPreference.primary).flatMap { case DropCollectionResult(false) if failIfNotFound => Future.failed[Boolean](GenericDatabaseException( s"fails to drop collection: $name", Some(26))) case DropCollectionResult(dropped) => Future.successful(dropped) } } private implicit lazy val convertWriter = ConvertToCapped.writer(command.pack) /** * Converts this collection to a capped one. * * @param size $cappedSizeParam * @param maxDocuments $cappedMaxParam */ def convertToCapped(size: Long, maxDocuments: Option[Int])(implicit ec: ExecutionContext): Future[Unit] = command.unboxed(self, ConvertToCapped(Capped(size, maxDocuments)), ReadPreference.primary) /** * Renames this collection. * * @param to the new name of this collection * @param dropExisting if a collection of name `to` already exists, then drops that collection before renaming this one * * @return a failure if the dropExisting option is false and the target collection already exists */ @deprecated(message = "Use `reactivemongo.api.DBMetaCommands.renameCollection on the admin database instead.", since = "0.12.4") def rename(to: String, dropExisting: Boolean = false)(implicit ec: ExecutionContext): Future[Unit] = { implicit val renameWriter = RenameCollection.writer(command.pack) command.unboxed(self.db, RenameCollection(db.name + "." + name, db.name + "." + to, dropExisting), ReadPreference.primary) } private implicit lazy val statsWriter = CollStats.writer(command.pack) private implicit lazy val statsReader = CollStats.reader(command.pack) /** * Returns various information about this collection. */ def stats()(implicit ec: ExecutionContext): Future[CollStatsResult] = command(self, CollStats(None), ReadPreference.primary) /** * Returns various information about this collection. * * @param scale the scale factor (for example, to get all the sizes in kilobytes) */ def stats(scale: Int)(implicit ec: ExecutionContext): Future[CollStatsResult] = command(self, CollStats(Some(scale)), ReadPreference.primary) /** Returns an index manager for this collection. */ def indexesManager(implicit ec: ExecutionContext): CollectionIndexesManager = CollectionIndexesManager(self.db, name) // Command runner private lazy val command = Command.run(Serialization.internalSerializationPack, failoverStrategy) }
ornicar/ReactiveMongo
driver/src/main/scala/api/CollectionMetaCommands.scala
Scala
apache-2.0
5,774
package im.actor.server.session import akka.actor._ import akka.stream.actor.{ ActorPublisher, ActorSubscriber } import akka.stream.scaladsl._ import akka.stream.{ FlowShape, OverflowStrategy } import scodec.bits._ import im.actor.api.rpc.ClientData import im.actor.server.mtproto.protocol._ import im.actor.server.mtproto.transport.MTPackage sealed trait SessionStreamMessage object SessionStreamMessage { @SerialVersionUID(1L) case class HandleMessageBox(messageBox: MessageBox, clientData: ClientData) extends SessionStreamMessage @SerialVersionUID(1L) case class HandleRpcRequest(messageId: Long, requestBytes: BitVector, clientData: ClientData) extends SessionStreamMessage @SerialVersionUID(1L) case class HandleSubscribe(command: SubscribeCommand) extends SessionStreamMessage @SerialVersionUID(1L) case class SendProtoMessage(message: ProtoMessage with OutgoingProtoMessage) extends SessionStreamMessage case class EnqueuedProtoMessage(message: ProtoMessage, reduceKey: Option[String]) extends SessionStreamMessage } private[session] object SessionStream { type ReduceKey = Option[String] type OutProtoMessage = (ProtoMessage, ReduceKey) type InOrOut = Either[ProtoMessage, OutProtoMessage] def graph( authId: Long, sessionId: Long, rpcHandler: ActorRef, updatesHandler: ActorRef, reSender: ActorRef )(implicit context: ActorContext) = { FlowGraph.partial() { implicit builder โ‡’ import FlowGraph.Implicits._ import SessionStreamMessage._ val discr = builder.add(new SessionMessageDiscriminator) // TODO: think about buffer sizes and overflow strategies val rpc = discr.outRpc.buffer(100, OverflowStrategy.backpressure) val subscribe = discr.outSubscribe.buffer(100, OverflowStrategy.backpressure) val incomingAck = discr.outIncomingAck.buffer(100, OverflowStrategy.backpressure).map(in) val outProtoMessages = discr.outProtoMessage.buffer(100, OverflowStrategy.backpressure).map(out) val outRequestResend = discr.outRequestResend.buffer(100, OverflowStrategy.backpressure).map(in) val unmatched = discr.outUnmatched.buffer(100, OverflowStrategy.backpressure) val rpcRequestSubscriber = builder.add(Sink(ActorSubscriber[HandleRpcRequest](rpcHandler))) val rpcResponsePublisher = builder.add(Source(ActorPublisher[ProtoMessage](rpcHandler)).map(out)) val updatesSubscriber = builder.add(Sink(ActorSubscriber[SubscribeCommand](updatesHandler))) val updatesPublisher = builder.add(Source(ActorPublisher[OutProtoMessage](updatesHandler))).map(out) val reSendSubscriber = builder.add(Sink(ActorSubscriber[ReSenderMessage](reSender))) val reSendPublisher = builder.add(Source(ActorPublisher[MTPackage](reSender))) val mergeProto = builder.add(MergePreferred[ReSenderMessage](3)) val mergeProtoPriority = builder.add(MergePreferred[ReSenderMessage](1)) val logging = akka.event.Logging(context.system, s"SessionStream-${authId}-${sessionId}") val log = Sink.foreach[SessionStreamMessage](logging.warning("Unmatched {}", _)) // @format: OFF incomingAck ~> mergeProtoPriority.preferred outProtoMessages ~> mergeProtoPriority ~> mergeProto.preferred outRequestResend ~> mergeProto ~> reSendSubscriber rpc ~> rpcRequestSubscriber rpcResponsePublisher ~> mergeProto subscribe ~> updatesSubscriber updatesPublisher ~> mergeProto unmatched ~> log // @format: ON FlowShape(discr.in, reSendPublisher.outlet) } } import ReSenderMessage._ private def in(m: MessageAck): ReSenderMessage = IncomingAck(m.messageIds) private def in(m: RequestResend): ReSenderMessage = IncomingRequestResend(m.messageId) private def out(m: ProtoMessage): ReSenderMessage = out(m, None) private def out(msg: ProtoMessage, reduceKey: ReduceKey) = OutgoingMessage(msg, reduceKey) private def out(tup: (ProtoMessage, ReduceKey)) = (OutgoingMessage.apply _).tupled(tup) }
jamesbond12/actor-platform
actor-server/actor-session/src/main/scala/im/actor/server/session/SessionStream.scala
Scala
mit
4,145
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.expressions.codegen import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.types._ /** * Generates a [[Projection]] that returns an [[UnsafeRow]]. * * It generates the code for all the expressions, computes the total length for all the columns * (can be accessed via variables), and then copies the data into a scratch buffer space in the * form of UnsafeRow (the scratch buffer will grow as needed). * * @note The returned UnsafeRow will be pointed to a scratch buffer inside the projection. */ object GenerateUnsafeProjection extends CodeGenerator[Seq[Expression], UnsafeProjection] { /** Returns true iff we support this data type. */ def canSupport(dataType: DataType): Boolean = dataType match { case NullType => true case t: AtomicType => true case _: CalendarIntervalType => true case t: StructType => t.forall(field => canSupport(field.dataType)) case t: ArrayType if canSupport(t.elementType) => true case MapType(kt, vt, _) if canSupport(kt) && canSupport(vt) => true case udt: UserDefinedType[_] => canSupport(udt.sqlType) case _ => false } // TODO: if the nullability of field is correct, we can use it to save null check. private def writeStructToBuffer( ctx: CodegenContext, input: String, fieldTypes: Seq[DataType], bufferHolder: String): String = { // Puts `input` in a local variable to avoid to re-evaluate it if it's a statement. val tmpInput = ctx.freshName("tmpInput") val fieldEvals = fieldTypes.zipWithIndex.map { case (dt, i) => ExprCode("", s"$tmpInput.isNullAt($i)", CodeGenerator.getValue(tmpInput, dt, i.toString)) } s""" final InternalRow $tmpInput = $input; if ($tmpInput instanceof UnsafeRow) { ${writeUnsafeData(ctx, s"((UnsafeRow) $tmpInput)", bufferHolder)} } else { ${writeExpressionsToBuffer(ctx, tmpInput, fieldEvals, fieldTypes, bufferHolder)} } """ } private def writeExpressionsToBuffer( ctx: CodegenContext, row: String, inputs: Seq[ExprCode], inputTypes: Seq[DataType], bufferHolder: String, isTopLevel: Boolean = false): String = { val rowWriterClass = classOf[UnsafeRowWriter].getName val rowWriter = ctx.addMutableState(rowWriterClass, "rowWriter", v => s"$v = new $rowWriterClass($bufferHolder, ${inputs.length});") val resetWriter = if (isTopLevel) { // For top level row writer, it always writes to the beginning of the global buffer holder, // which means its fixed-size region always in the same position, so we don't need to call // `reset` to set up its fixed-size region every time. if (inputs.map(_.isNull).forall(_ == "false")) { // If all fields are not nullable, which means the null bits never changes, then we don't // need to clear it out every time. "" } else { s"$rowWriter.zeroOutNullBytes();" } } else { s"$rowWriter.reset();" } val writeFields = inputs.zip(inputTypes).zipWithIndex.map { case ((input, dataType), index) => val dt = dataType match { case udt: UserDefinedType[_] => udt.sqlType case other => other } val tmpCursor = ctx.freshName("tmpCursor") val setNull = dt match { case t: DecimalType if t.precision > Decimal.MAX_LONG_DIGITS => // Can't call setNullAt() for DecimalType with precision larger than 18. s"$rowWriter.write($index, (Decimal) null, ${t.precision}, ${t.scale});" case _ => s"$rowWriter.setNullAt($index);" } val writeField = dt match { case t: StructType => s""" // Remember the current cursor so that we can calculate how many bytes are // written later. final int $tmpCursor = $bufferHolder.cursor; ${writeStructToBuffer(ctx, input.value, t.map(_.dataType), bufferHolder)} $rowWriter.setOffsetAndSize($index, $tmpCursor, $bufferHolder.cursor - $tmpCursor); """ case a @ ArrayType(et, _) => s""" // Remember the current cursor so that we can calculate how many bytes are // written later. final int $tmpCursor = $bufferHolder.cursor; ${writeArrayToBuffer(ctx, input.value, et, bufferHolder)} $rowWriter.setOffsetAndSize($index, $tmpCursor, $bufferHolder.cursor - $tmpCursor); """ case m @ MapType(kt, vt, _) => s""" // Remember the current cursor so that we can calculate how many bytes are // written later. final int $tmpCursor = $bufferHolder.cursor; ${writeMapToBuffer(ctx, input.value, kt, vt, bufferHolder)} $rowWriter.setOffsetAndSize($index, $tmpCursor, $bufferHolder.cursor - $tmpCursor); """ case t: DecimalType => s"$rowWriter.write($index, ${input.value}, ${t.precision}, ${t.scale});" case NullType => "" case _ => s"$rowWriter.write($index, ${input.value});" } if (input.isNull == "false") { s""" ${input.code} ${writeField.trim} """ } else { s""" ${input.code} if (${input.isNull}) { ${setNull.trim} } else { ${writeField.trim} } """ } } val writeFieldsCode = if (isTopLevel && (row == null || ctx.currentVars != null)) { // TODO: support whole stage codegen writeFields.mkString("\n") } else { assert(row != null, "the input row name cannot be null when generating code to write it.") ctx.splitExpressions( expressions = writeFields, funcName = "writeFields", arguments = Seq("InternalRow" -> row)) } s""" $resetWriter $writeFieldsCode """.trim } // TODO: if the nullability of array element is correct, we can use it to save null check. private def writeArrayToBuffer( ctx: CodegenContext, input: String, elementType: DataType, bufferHolder: String): String = { // Puts `input` in a local variable to avoid to re-evaluate it if it's a statement. val tmpInput = ctx.freshName("tmpInput") val arrayWriterClass = classOf[UnsafeArrayWriter].getName val arrayWriter = ctx.addMutableState(arrayWriterClass, "arrayWriter", v => s"$v = new $arrayWriterClass();") val numElements = ctx.freshName("numElements") val index = ctx.freshName("index") val et = elementType match { case udt: UserDefinedType[_] => udt.sqlType case other => other } val jt = CodeGenerator.javaType(et) val elementOrOffsetSize = et match { case t: DecimalType if t.precision <= Decimal.MAX_LONG_DIGITS => 8 case _ if CodeGenerator.isPrimitiveType(jt) => et.defaultSize case _ => 8 // we need 8 bytes to store offset and length } val tmpCursor = ctx.freshName("tmpCursor") val element = CodeGenerator.getValue(tmpInput, et, index) val writeElement = et match { case t: StructType => s""" final int $tmpCursor = $bufferHolder.cursor; ${writeStructToBuffer(ctx, element, t.map(_.dataType), bufferHolder)} $arrayWriter.setOffsetAndSize($index, $tmpCursor, $bufferHolder.cursor - $tmpCursor); """ case a @ ArrayType(et, _) => s""" final int $tmpCursor = $bufferHolder.cursor; ${writeArrayToBuffer(ctx, element, et, bufferHolder)} $arrayWriter.setOffsetAndSize($index, $tmpCursor, $bufferHolder.cursor - $tmpCursor); """ case m @ MapType(kt, vt, _) => s""" final int $tmpCursor = $bufferHolder.cursor; ${writeMapToBuffer(ctx, element, kt, vt, bufferHolder)} $arrayWriter.setOffsetAndSize($index, $tmpCursor, $bufferHolder.cursor - $tmpCursor); """ case t: DecimalType => s"$arrayWriter.write($index, $element, ${t.precision}, ${t.scale});" case NullType => "" case _ => s"$arrayWriter.write($index, $element);" } val primitiveTypeName = if (CodeGenerator.isPrimitiveType(jt)) CodeGenerator.primitiveTypeName(et) else "" s""" final ArrayData $tmpInput = $input; if ($tmpInput instanceof UnsafeArrayData) { ${writeUnsafeData(ctx, s"((UnsafeArrayData) $tmpInput)", bufferHolder)} } else { final int $numElements = $tmpInput.numElements(); $arrayWriter.initialize($bufferHolder, $numElements, $elementOrOffsetSize); for (int $index = 0; $index < $numElements; $index++) { if ($tmpInput.isNullAt($index)) { $arrayWriter.setNull${elementOrOffsetSize}Bytes($index); } else { $writeElement } } } """ } // TODO: if the nullability of value element is correct, we can use it to save null check. private def writeMapToBuffer( ctx: CodegenContext, input: String, keyType: DataType, valueType: DataType, bufferHolder: String): String = { // Puts `input` in a local variable to avoid to re-evaluate it if it's a statement. val tmpInput = ctx.freshName("tmpInput") val tmpCursor = ctx.freshName("tmpCursor") // Writes out unsafe map according to the format described in `UnsafeMapData`. s""" final MapData $tmpInput = $input; if ($tmpInput instanceof UnsafeMapData) { ${writeUnsafeData(ctx, s"((UnsafeMapData) $tmpInput)", bufferHolder)} } else { // preserve 8 bytes to write the key array numBytes later. $bufferHolder.grow(8); $bufferHolder.cursor += 8; // Remember the current cursor so that we can write numBytes of key array later. final int $tmpCursor = $bufferHolder.cursor; ${writeArrayToBuffer(ctx, s"$tmpInput.keyArray()", keyType, bufferHolder)} // Write the numBytes of key array into the first 8 bytes. Platform.putLong($bufferHolder.buffer, $tmpCursor - 8, $bufferHolder.cursor - $tmpCursor); ${writeArrayToBuffer(ctx, s"$tmpInput.valueArray()", valueType, bufferHolder)} } """ } /** * If the input is already in unsafe format, we don't need to go through all elements/fields, * we can directly write it. */ private def writeUnsafeData(ctx: CodegenContext, input: String, bufferHolder: String) = { val sizeInBytes = ctx.freshName("sizeInBytes") s""" final int $sizeInBytes = $input.getSizeInBytes(); // grow the global buffer before writing data. $bufferHolder.grow($sizeInBytes); $input.writeToMemory($bufferHolder.buffer, $bufferHolder.cursor); $bufferHolder.cursor += $sizeInBytes; """ } def createCode( ctx: CodegenContext, expressions: Seq[Expression], useSubexprElimination: Boolean = false): ExprCode = { val exprEvals = ctx.generateExpressions(expressions, useSubexprElimination) val exprTypes = expressions.map(_.dataType) val numVarLenFields = exprTypes.count { case dt if UnsafeRow.isFixedLength(dt) => false // TODO: consider large decimal and interval type case _ => true } val result = ctx.addMutableState("UnsafeRow", "result", v => s"$v = new UnsafeRow(${expressions.length});") val holderClass = classOf[BufferHolder].getName val holder = ctx.addMutableState(holderClass, "holder", v => s"$v = new $holderClass($result, ${numVarLenFields * 32});") val resetBufferHolder = if (numVarLenFields == 0) { "" } else { s"$holder.reset();" } val updateRowSize = if (numVarLenFields == 0) { "" } else { s"$result.setTotalSize($holder.totalSize());" } // Evaluate all the subexpression. val evalSubexpr = ctx.subexprFunctions.mkString("\n") val writeExpressions = writeExpressionsToBuffer(ctx, ctx.INPUT_ROW, exprEvals, exprTypes, holder, isTopLevel = true) val code = s""" $resetBufferHolder $evalSubexpr $writeExpressions $updateRowSize """ ExprCode(code, "false", result) } protected def canonicalize(in: Seq[Expression]): Seq[Expression] = in.map(ExpressionCanonicalizer.execute) protected def bind(in: Seq[Expression], inputSchema: Seq[Attribute]): Seq[Expression] = in.map(BindReferences.bindReference(_, inputSchema)) def generate( expressions: Seq[Expression], subexpressionEliminationEnabled: Boolean): UnsafeProjection = { create(canonicalize(expressions), subexpressionEliminationEnabled) } protected def create(references: Seq[Expression]): UnsafeProjection = { create(references, subexpressionEliminationEnabled = false) } private def create( expressions: Seq[Expression], subexpressionEliminationEnabled: Boolean): UnsafeProjection = { val ctx = newCodeGenContext() val eval = createCode(ctx, expressions, subexpressionEliminationEnabled) val codeBody = s""" public java.lang.Object generate(Object[] references) { return new SpecificUnsafeProjection(references); } class SpecificUnsafeProjection extends ${classOf[UnsafeProjection].getName} { private Object[] references; ${ctx.declareMutableStates()} public SpecificUnsafeProjection(Object[] references) { this.references = references; ${ctx.initMutableStates()} } public void initialize(int partitionIndex) { ${ctx.initPartition()} } // Scala.Function1 need this public java.lang.Object apply(java.lang.Object row) { return apply((InternalRow) row); } public UnsafeRow apply(InternalRow ${ctx.INPUT_ROW}) { ${eval.code.trim} return ${eval.value}; } ${ctx.declareAddedFunctions()} } """ val code = CodeFormatter.stripOverlappingComments( new CodeAndComment(codeBody, ctx.getPlaceHolderToComments())) logDebug(s"code for ${expressions.mkString(",")}:\n${CodeFormatter.format(code)}") val (clazz, _) = CodeGenerator.compile(code) clazz.generate(ctx.references.toArray).asInstanceOf[UnsafeProjection] } }
ioana-delaney/spark
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateUnsafeProjection.scala
Scala
apache-2.0
15,235
package model.swagger case class Authorizations(authorizations: Seq[Authorization])
dwyks/log2swagger
app/model/swagger/Authorizations.scala
Scala
mit
84
/* * Copyright 2015 Databricks Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.databricks.spark.sql.perf.tpcds import com.databricks.spark.sql.perf.Benchmark trait ImpalaKitQueries extends Benchmark { import ExecutionMode._ // Queries are from // https://github.com/cloudera/impala-tpcds-kit/tree/master/queries-sql92-modified/queries val queries = Seq( ("q19", """ |-- start query 1 in stream 0 using template query19.tpl |select | i_brand_id, | i_brand, | i_manufact_id, | i_manufact, | sum(ss_ext_sales_price) ext_price |from | store_sales | join item on (store_sales.ss_item_sk = item.i_item_sk) | join customer on (store_sales.ss_customer_sk = customer.c_customer_sk) | join customer_address on (customer.c_current_addr_sk = customer_address.ca_address_sk) | join store on (store_sales.ss_store_sk = store.s_store_sk) | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) |where | --ss_date between '1999-11-01' and '1999-11-30' | ss_sold_date_sk between 2451484 and 2451513 | and d_moy = 11 | and d_year = 1999 | and i_manager_id = 7 | and substr(ca_zip, 1, 5) <> substr(s_zip, 1, 5) |group by | i_brand, | i_brand_id, | i_manufact_id, | i_manufact |order by | ext_price desc, | i_brand, | i_brand_id, | i_manufact_id, | i_manufact |limit 100 |-- end query 1 in stream 0 using template query19.tpl """.stripMargin), ("q27", """ |-- start query 1 in stream 0 using template query27.tpl |select | i_item_id, | s_state, | -- grouping(s_state) g_state, | avg(ss_quantity) agg1, | avg(ss_list_price) agg2, | avg(ss_coupon_amt) agg3, | avg(ss_sales_price) agg4 |from | store_sales | join store on (store_sales.ss_store_sk = store.s_store_sk) | join customer_demographics on (store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk) | join item on (store_sales.ss_item_sk = item.i_item_sk) | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) |where | -- ss_date between '1998-01-01' and '1998-12-31' | ss_sold_date_sk between 2450815 and 2451179 -- partition key filter | and d_year = 1998 | and s_state in ('WI', 'CA', 'TX', 'FL', 'WA', 'TN') | and cd_gender = 'F' | and cd_marital_status = 'W' | and cd_education_status = 'Primary' |group by | -- rollup(i_item_id, s_state) | i_item_id, | s_state |order by | i_item_id, | s_state |limit 100 |-- end query 1 in stream 0 using template query27.tpl """.stripMargin), ("q3", """ |-- start query 1 in stream 0 using template query3.tpl |select | dt.d_year, | -- year(ss_date) as d_year, | -- case | -- when ss_sold_date_sk between 2451149 and 2451179 then 1998 | -- when ss_sold_date_sk between 2451514 and 2451544 then 1999 | -- when ss_sold_date_sk between 2451880 and 2451910 then 2000 | -- when ss_sold_date_sk between 2452245 and 2452275 then 2001 | -- when ss_sold_date_sk between 2452610 and 2452640 then 2002 | -- end as d_year, | item.i_brand_id brand_id, | item.i_brand brand, | sum(ss_ext_sales_price) sum_agg |from | store_sales | join item on (store_sales.ss_item_sk = item.i_item_sk) | join date_dim dt on (dt.d_date_sk = store_sales.ss_sold_date_sk) |where | item.i_manufact_id = 436 | and dt.d_moy = 12 | -- and (ss_date between '1998-12-01' and '1998-12-31' | -- or ss_date between '1999-12-01' and '1999-12-31' | -- or ss_date between '2000-12-01' and '2000-12-31' | -- or ss_date between '2001-12-01' and '2001-12-31' | -- or ss_date between '2002-12-01' and '2002-12-31') | and (ss_sold_date_sk between 2451149 and 2451179 | or ss_sold_date_sk between 2451514 and 2451544 | or ss_sold_date_sk between 2451880 and 2451910 | or ss_sold_date_sk between 2452245 and 2452275 | or ss_sold_date_sk between 2452610 and 2452640) |group by | d_year, | item.i_brand, | item.i_brand_id |order by | d_year, | sum_agg desc, | brand_id |-- end query 1 in stream 0 using template query3.tpl |limit 100 """.stripMargin), ("q34", """ |-- start query 1 in stream 0 using template query34.tpl |select | c_last_name, | c_first_name, | c_salutation, | c_preferred_cust_flag, | ss_ticket_number, | cnt |from | (select | ss_ticket_number, | ss_customer_sk, | count(*) cnt | from | store_sales | join household_demographics on (store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk) | join store on (store_sales.ss_store_sk = store.s_store_sk) | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) | where | date_dim.d_year in (1998, 1998 + 1, 1998 + 2) | and (date_dim.d_dom between 1 and 3 | or date_dim.d_dom between 25 and 28) | and (household_demographics.hd_buy_potential = '>10000' | or household_demographics.hd_buy_potential = 'unknown') | and household_demographics.hd_vehicle_count > 0 | and (case when household_demographics.hd_vehicle_count > 0 then household_demographics.hd_dep_count / household_demographics.hd_vehicle_count else null end) > 1.2 | and store.s_county in ('Saginaw County', 'Sumner County', 'Appanoose County', 'Daviess County', 'Fairfield County', 'Raleigh County', 'Ziebach County', 'Williamson County') | and ss_sold_date_sk between 2450816 and 2451910 -- partition key filter | group by | ss_ticket_number, | ss_customer_sk | ) dn |join customer on (dn.ss_customer_sk = customer.c_customer_sk) |where | cnt between 15 and 20 |order by | c_last_name, | c_first_name, | c_salutation, | c_preferred_cust_flag desc |limit 1000 |-- end query 1 in stream 0 using template query34.tpl """.stripMargin), ("q42", """ |-- start query 1 in stream 0 using template query42.tpl |select | d_year, | i_category_id, | i_category, | sum(ss_ext_sales_price) as total_price |from | store_sales | join item on (store_sales.ss_item_sk = item.i_item_sk) | join date_dim dt on (dt.d_date_sk = store_sales.ss_sold_date_sk) |where | item.i_manager_id = 1 | and dt.d_moy = 12 | and dt.d_year = 1998 | -- and ss_date between '1998-12-01' and '1998-12-31' | and ss_sold_date_sk between 2451149 and 2451179 -- partition key filter |group by | d_year, | i_category_id, | i_category |order by | -- sum(ss_ext_sales_price) desc, | total_price desc, | d_year, | i_category_id, | i_category |limit 100 |-- end query 1 in stream 0 using template query42.tpl """.stripMargin), ("q43", """ |-- start query 1 in stream 0 using template query43.tpl |select | s_store_name, | s_store_id, | sum(case when (d_day_name = 'Sunday') then ss_sales_price else null end) sun_sales, | sum(case when (d_day_name = 'Monday') then ss_sales_price else null end) mon_sales, | sum(case when (d_day_name = 'Tuesday') then ss_sales_price else null end) tue_sales, | sum(case when (d_day_name = 'Wednesday') then ss_sales_price else null end) wed_sales, | sum(case when (d_day_name = 'Thursday') then ss_sales_price else null end) thu_sales, | sum(case when (d_day_name = 'Friday') then ss_sales_price else null end) fri_sales, | sum(case when (d_day_name = 'Saturday') then ss_sales_price else null end) sat_sales |from | store_sales | join store on (store_sales.ss_store_sk = store.s_store_sk) | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) |where | s_gmt_offset = -5 | and d_year = 1998 | -- and ss_date between '1998-01-01' and '1998-12-31' | and ss_sold_date_sk between 2450816 and 2451179 -- partition key filter |group by | s_store_name, | s_store_id |order by | s_store_name, | s_store_id, | sun_sales, | mon_sales, | tue_sales, | wed_sales, | thu_sales, | fri_sales, | sat_sales |limit 100 |-- end query 1 in stream 0 using template query43.tpl """.stripMargin), ("q46", """ |-- start query 1 in stream 0 using template query46.tpl |select | c_last_name, | c_first_name, | ca_city, | bought_city, | ss_ticket_number, | amt, | profit |from | (select | ss_ticket_number, | ss_customer_sk, | ca_city bought_city, | sum(ss_coupon_amt) amt, | sum(ss_net_profit) profit | from | store_sales | join store on (store_sales.ss_store_sk = store.s_store_sk) | join household_demographics on (store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk) | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) | join customer_address on (store_sales.ss_addr_sk = customer_address.ca_address_sk) | where | store.s_city in ('Midway', 'Concord', 'Spring Hill', 'Brownsville', 'Greenville') | and (household_demographics.hd_dep_count = 5 | or household_demographics.hd_vehicle_count = 3) | and date_dim.d_dow in (6, 0) | and date_dim.d_year in (1999, 1999 + 1, 1999 + 2) | -- and ss_date between '1999-01-01' and '2001-12-31' | -- and ss_sold_date_sk between 2451180 and 2452275 -- partition key filter | and ss_sold_date_sk in (2451181, 2451182, 2451188, 2451189, 2451195, 2451196, 2451202, 2451203, 2451209, 2451210, 2451216, 2451217, | 2451223, 2451224, 2451230, 2451231, 2451237, 2451238, 2451244, 2451245, 2451251, 2451252, 2451258, 2451259, | 2451265, 2451266, 2451272, 2451273, 2451279, 2451280, 2451286, 2451287, 2451293, 2451294, 2451300, 2451301, | 2451307, 2451308, 2451314, 2451315, 2451321, 2451322, 2451328, 2451329, 2451335, 2451336, 2451342, 2451343, | 2451349, 2451350, 2451356, 2451357, 2451363, 2451364, 2451370, 2451371, 2451377, 2451378, 2451384, 2451385, | 2451391, 2451392, 2451398, 2451399, 2451405, 2451406, 2451412, 2451413, 2451419, 2451420, 2451426, 2451427, | 2451433, 2451434, 2451440, 2451441, 2451447, 2451448, 2451454, 2451455, 2451461, 2451462, 2451468, 2451469, | 2451475, 2451476, 2451482, 2451483, 2451489, 2451490, 2451496, 2451497, 2451503, 2451504, 2451510, 2451511, | 2451517, 2451518, 2451524, 2451525, 2451531, 2451532, 2451538, 2451539, 2451545, 2451546, 2451552, 2451553, | 2451559, 2451560, 2451566, 2451567, 2451573, 2451574, 2451580, 2451581, 2451587, 2451588, 2451594, 2451595, | 2451601, 2451602, 2451608, 2451609, 2451615, 2451616, 2451622, 2451623, 2451629, 2451630, 2451636, 2451637, | 2451643, 2451644, 2451650, 2451651, 2451657, 2451658, 2451664, 2451665, 2451671, 2451672, 2451678, 2451679, | 2451685, 2451686, 2451692, 2451693, 2451699, 2451700, 2451706, 2451707, 2451713, 2451714, 2451720, 2451721, | 2451727, 2451728, 2451734, 2451735, 2451741, 2451742, 2451748, 2451749, 2451755, 2451756, 2451762, 2451763, | 2451769, 2451770, 2451776, 2451777, 2451783, 2451784, 2451790, 2451791, 2451797, 2451798, 2451804, 2451805, | 2451811, 2451812, 2451818, 2451819, 2451825, 2451826, 2451832, 2451833, 2451839, 2451840, 2451846, 2451847, | 2451853, 2451854, 2451860, 2451861, 2451867, 2451868, 2451874, 2451875, 2451881, 2451882, 2451888, 2451889, | 2451895, 2451896, 2451902, 2451903, 2451909, 2451910, 2451916, 2451917, 2451923, 2451924, 2451930, 2451931, | 2451937, 2451938, 2451944, 2451945, 2451951, 2451952, 2451958, 2451959, 2451965, 2451966, 2451972, 2451973, | 2451979, 2451980, 2451986, 2451987, 2451993, 2451994, 2452000, 2452001, 2452007, 2452008, 2452014, 2452015, | 2452021, 2452022, 2452028, 2452029, 2452035, 2452036, 2452042, 2452043, 2452049, 2452050, 2452056, 2452057, | 2452063, 2452064, 2452070, 2452071, 2452077, 2452078, 2452084, 2452085, 2452091, 2452092, 2452098, 2452099, | 2452105, 2452106, 2452112, 2452113, 2452119, 2452120, 2452126, 2452127, 2452133, 2452134, 2452140, 2452141, | 2452147, 2452148, 2452154, 2452155, 2452161, 2452162, 2452168, 2452169, 2452175, 2452176, 2452182, 2452183, | 2452189, 2452190, 2452196, 2452197, 2452203, 2452204, 2452210, 2452211, 2452217, 2452218, 2452224, 2452225, | 2452231, 2452232, 2452238, 2452239, 2452245, 2452246, 2452252, 2452253, 2452259, 2452260, 2452266, 2452267, | 2452273, 2452274) | group by | ss_ticket_number, | ss_customer_sk, | ss_addr_sk, | ca_city | ) dn | join customer on (dn.ss_customer_sk = customer.c_customer_sk) | join customer_address current_addr on (customer.c_current_addr_sk = current_addr.ca_address_sk) |where | current_addr.ca_city <> bought_city |order by | c_last_name, | c_first_name, | ca_city, | bought_city, | ss_ticket_number |limit 100 |-- end query 1 in stream 0 using template query46.tpl """.stripMargin), ("q52", """ |-- start query 1 in stream 0 using template query52.tpl |select | d_year, | i_brand_id, | i_brand, | sum(ss_ext_sales_price) ext_price |from | store_sales | join item on (store_sales.ss_item_sk = item.i_item_sk) | join date_dim dt on (store_sales.ss_sold_date_sk = dt.d_date_sk) |where | i_manager_id = 1 | and d_moy = 12 | and d_year = 1998 | -- and ss_date between '1998-12-01' and '1998-12-31' | and ss_sold_date_sk between 2451149 and 2451179 -- partition key filter |group by | d_year, | i_brand, | i_brand_id |order by | d_year, | ext_price desc, | i_brand_id |limit 100 |-- end query 1 in stream 0 using template query52.tpl """.stripMargin), ("q53", """ |-- start query 1 in stream 0 using template query53.tpl |select | * |from | (select | i_manufact_id, | sum(ss_sales_price) sum_sales | -- avg(sum(ss_sales_price)) over(partition by i_manufact_id) avg_quarterly_sales | from | store_sales | join item on (store_sales.ss_item_sk = item.i_item_sk) | join store on (store_sales.ss_store_sk = store.s_store_sk) | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) | where | ss_sold_date_sk between 2451911 and 2452275 -- partition key filter | -- ss_date between '2001-01-01' and '2001-12-31' | and d_month_seq in(1212, 1212 + 1, 1212 + 2, 1212 + 3, 1212 + 4, 1212 + 5, 1212 + 6, 1212 + 7, 1212 + 8, 1212 + 9, 1212 + 10, 1212 + 11) | and ( | (i_category in('Books', 'Children', 'Electronics') | and i_class in('personal', 'portable', 'reference', 'self-help') | and i_brand in('scholaramalgamalg #14', 'scholaramalgamalg #7', 'exportiunivamalg #9', 'scholaramalgamalg #9') | ) | or | (i_category in('Women', 'Music', 'Men') | and i_class in('accessories', 'classical', 'fragrances', 'pants') | and i_brand in('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1') | ) | ) | group by | i_manufact_id, | d_qoy | ) tmp1 |-- where |-- case when avg_quarterly_sales > 0 then abs(sum_sales - avg_quarterly_sales) / avg_quarterly_sales else null end > 0.1 |order by | -- avg_quarterly_sales, | sum_sales, | i_manufact_id |limit 100 |-- end query 1 in stream 0 using template query53.tpl """.stripMargin), ("q55", """ |-- start query 1 in stream 0 using template query55.tpl |select | i_brand_id, | i_brand, | sum(ss_ext_sales_price) ext_price |from | store_sales | join item on (store_sales.ss_item_sk = item.i_item_sk) | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) |where | i_manager_id = 36 | and d_moy = 12 | and d_year = 2001 | -- and ss_date between '2001-12-01' and '2001-12-31' | and ss_sold_date_sk between 2452245 and 2452275 -- partition key filter |group by | i_brand, | i_brand_id |order by | ext_price desc, | i_brand_id |limit 100 |-- end query 1 in stream 0 using template query55.tpl """.stripMargin), ("q59", """ |-- start query 1 in stream 0 using template query59.tpl |select | s_store_name1, | s_store_id1, | d_week_seq1, | sun_sales1 / sun_sales2, | mon_sales1 / mon_sales2, | tue_sales1 / tue_sales2, | wed_sales1 / wed_sales2, | thu_sales1 / thu_sales2, | fri_sales1 / fri_sales2, | sat_sales1 / sat_sales2 |from | (select | s_store_name s_store_name1, | wss.d_week_seq d_week_seq1, | s_store_id s_store_id1, | sun_sales sun_sales1, | mon_sales mon_sales1, | tue_sales tue_sales1, | wed_sales wed_sales1, | thu_sales thu_sales1, | fri_sales fri_sales1, | sat_sales sat_sales1 | from | (select | d_week_seq, | ss_store_sk, | sum(case when(d_day_name = 'Sunday') then ss_sales_price else null end) sun_sales, | sum(case when(d_day_name = 'Monday') then ss_sales_price else null end) mon_sales, | sum(case when(d_day_name = 'Tuesday') then ss_sales_price else null end) tue_sales, | sum(case when(d_day_name = 'Wednesday') then ss_sales_price else null end) wed_sales, | sum(case when(d_day_name = 'Thursday') then ss_sales_price else null end) thu_sales, | sum(case when(d_day_name = 'Friday') then ss_sales_price else null end) fri_sales, | sum(case when(d_day_name = 'Saturday') then ss_sales_price else null end) sat_sales | from | store_sales | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) | where | -- ss_date between '1998-10-01' and '1999-09-30' | ss_sold_date_sk between 2451088 and 2451452 | group by | d_week_seq, | ss_store_sk | ) wss | join store on (wss.ss_store_sk = store.s_store_sk) | join date_dim d on (wss.d_week_seq = d.d_week_seq) | where | d_month_seq between 1185 and 1185 + 11 | ) y | join | (select | s_store_name s_store_name2, | wss.d_week_seq d_week_seq2, | s_store_id s_store_id2, | sun_sales sun_sales2, | mon_sales mon_sales2, | tue_sales tue_sales2, | wed_sales wed_sales2, | thu_sales thu_sales2, | fri_sales fri_sales2, | sat_sales sat_sales2 | from | (select | d_week_seq, | ss_store_sk, | sum(case when(d_day_name = 'Sunday') then ss_sales_price else null end) sun_sales, | sum(case when(d_day_name = 'Monday') then ss_sales_price else null end) mon_sales, | sum(case when(d_day_name = 'Tuesday') then ss_sales_price else null end) tue_sales, | sum(case when(d_day_name = 'Wednesday') then ss_sales_price else null end) wed_sales, | sum(case when(d_day_name = 'Thursday') then ss_sales_price else null end) thu_sales, | sum(case when(d_day_name = 'Friday') then ss_sales_price else null end) fri_sales, | sum(case when(d_day_name = 'Saturday') then ss_sales_price else null end) sat_sales | from | store_sales | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) | where | -- ss_date between '1999-10-01' and '2000-09-30' | ss_sold_date_sk between 2451088 and 2451452 | group by | d_week_seq, | ss_store_sk | ) wss | join store on (wss.ss_store_sk = store.s_store_sk) | join date_dim d on (wss.d_week_seq = d.d_week_seq) | where | d_month_seq between 1185 + 12 and 1185 + 23 | ) x | on (y.s_store_id1 = x.s_store_id2) |where | d_week_seq1 = d_week_seq2 - 52 |order by | s_store_name1, | s_store_id1, | d_week_seq1 |limit 100 |-- end query 1 in stream 0 using template query59.tpl """.stripMargin), ("q63", """ |-- start query 1 in stream 0 using template query63.tpl |select | * |from | (select | i_manager_id, | sum(ss_sales_price) sum_sales | -- avg(sum(ss_sales_price)) over(partition by i_manager_id) avg_monthly_sales | from | store_sales | join item on (store_sales.ss_item_sk = item.i_item_sk) | join store on (store_sales.ss_store_sk = store.s_store_sk) | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) | where | ss_sold_date_sk between 2451911 and 2452275 -- partition key filter | -- ss_date between '2001-01-01' and '2001-12-31' | and d_month_seq in (1212, 1212 + 1, 1212 + 2, 1212 + 3, 1212 + 4, 1212 + 5, 1212 + 6, 1212 + 7, 1212 + 8, 1212 + 9, 1212 + 10, 1212 + 11) | and ( | (i_category in('Books', 'Children', 'Electronics') | and i_class in('personal', 'portable', 'refernece', 'self-help') | and i_brand in('scholaramalgamalg #14', 'scholaramalgamalg #7', 'exportiunivamalg #9', 'scholaramalgamalg #9') | ) | or | (i_category in('Women', 'Music', 'Men') | and i_class in('accessories', 'classical', 'fragrances', 'pants') | and i_brand in('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1') | ) | ) | group by | i_manager_id, | d_moy | ) tmp1 |-- where |-- case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 |order by | i_manager_id, | -- avg_monthly_sales, | sum_sales |limit 100 |-- end query 1 in stream 0 using template query63.tpl """.stripMargin), ("q65", """ |--q65 |-- start query 1 in stream 0 using template query65.tpl |select | s_store_name, | i_item_desc, | sc.revenue, | i_current_price, | i_wholesale_cost, | i_brand |from | (select | ss_store_sk, | ss_item_sk, | sum(ss_sales_price) as revenue | from | store_sales | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) | where | -- ss_date between '2001-01-01' and '2001-12-31' | ss_sold_date_sk between 2451911 and 2452275 -- partition key filter | and d_month_seq between 1212 and 1212 + 11 | group by | ss_store_sk, | ss_item_sk | ) sc | join item on (sc.ss_item_sk = item.i_item_sk) | join store on (sc.ss_store_sk = store.s_store_sk) | join | (select | ss_store_sk, | avg(revenue) as ave | from | (select | ss_store_sk, | ss_item_sk, | sum(ss_sales_price) as revenue | from | store_sales | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) | where | -- ss_date between '2001-01-01' and '2001-12-31' | ss_sold_date_sk between 2451911 and 2452275 -- partition key filter | and d_month_seq between 1212 and 1212 + 11 | group by | ss_store_sk, | ss_item_sk | ) sa | group by | ss_store_sk | ) sb on (sc.ss_store_sk = sb.ss_store_sk) -- 676 rows |where | sc.revenue <= 0.1 * sb.ave |order by | s_store_name, | i_item_desc |limit 100 |-- end query 1 in stream 0 using template query65.tpl """.stripMargin), ("q68", """ |-- start query 1 in stream 0 using template query68.tpl |select | c_last_name, | c_first_name, | ca_city, | bought_city, | ss_ticket_number, | extended_price, | extended_tax, | list_price |from | (select | ss_ticket_number, | ss_customer_sk, | ca_city bought_city, | sum(ss_ext_sales_price) extended_price, | sum(ss_ext_list_price) list_price, | sum(ss_ext_tax) extended_tax | from | store_sales | join store on (store_sales.ss_store_sk = store.s_store_sk) | join household_demographics on (store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk) | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) | join customer_address on (store_sales.ss_addr_sk = customer_address.ca_address_sk) | where | store.s_city in('Midway', 'Fairview') | --and date_dim.d_dom between 1 and 2 | --and date_dim.d_year in(1999, 1999 + 1, 1999 + 2) | -- and ss_date between '1999-01-01' and '2001-12-31' | -- and dayofmonth(ss_date) in (1,2) | -- and ss_sold_date_sk in (2451180, 2451181, 2451211, 2451212, 2451239, 2451240, 2451270, 2451271, 2451300, 2451301, 2451331, | -- 2451332, 2451361, 2451362, 2451392, 2451393, 2451423, 2451424, 2451453, 2451454, 2451484, 2451485, | -- 2451514, 2451515, 2451545, 2451546, 2451576, 2451577, 2451605, 2451606, 2451636, 2451637, 2451666, | -- 2451667, 2451697, 2451698, 2451727, 2451728, 2451758, 2451759, 2451789, 2451790, 2451819, 2451820, | -- 2451850, 2451851, 2451880, 2451881, 2451911, 2451912, 2451942, 2451943, 2451970, 2451971, 2452001, | -- 2452002, 2452031, 2452032, 2452062, 2452063, 2452092, 2452093, 2452123, 2452124, 2452154, 2452155, | -- 2452184, 2452185, 2452215, 2452216, 2452245, 2452246) | and (household_demographics.hd_dep_count = 5 | or household_demographics.hd_vehicle_count = 3) | and d_date between '1999-01-01' and '1999-03-31' | and ss_sold_date_sk between 2451180 and 2451269 -- partition key filter (3 months) | group by | ss_ticket_number, | ss_customer_sk, | ss_addr_sk, | ca_city | ) dn | join customer on (dn.ss_customer_sk = customer.c_customer_sk) | join customer_address current_addr on (customer.c_current_addr_sk = current_addr.ca_address_sk) |where | current_addr.ca_city <> bought_city |order by | c_last_name, | ss_ticket_number |limit 100 |-- end query 1 in stream 0 using template query68.tpl """.stripMargin), ("q7", """ |-- start query 1 in stream 0 using template query7.tpl |select | i_item_id, | avg(ss_quantity) agg1, | avg(ss_list_price) agg2, | avg(ss_coupon_amt) agg3, | avg(ss_sales_price) agg4 |from | store_sales | join customer_demographics on (store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk) | join item on (store_sales.ss_item_sk = item.i_item_sk) | join promotion on (store_sales.ss_promo_sk = promotion.p_promo_sk) | join date_dim on (ss_sold_date_sk = d_date_sk) |where | cd_gender = 'F' | and cd_marital_status = 'W' | and cd_education_status = 'Primary' | and (p_channel_email = 'N' | or p_channel_event = 'N') | and d_year = 1998 | -- and ss_date between '1998-01-01' and '1998-12-31' | and ss_sold_date_sk between 2450815 and 2451179 -- partition key filter |group by | i_item_id |order by | i_item_id |limit 100 |-- end query 1 in stream 0 using template query7.tpl """.stripMargin), ("q73", """ |-- start query 1 in stream 0 using template query73.tpl |select | c_last_name, | c_first_name, | c_salutation, | c_preferred_cust_flag, | ss_ticket_number, | cnt |from | (select | ss_ticket_number, | ss_customer_sk, | count(*) cnt | from | store_sales | join household_demographics on (store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk) | join store on (store_sales.ss_store_sk = store.s_store_sk) | -- join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) | where | store.s_county in ('Saginaw County', 'Sumner County', 'Appanoose County', 'Daviess County') | -- and date_dim.d_dom between 1 and 2 | -- and date_dim.d_year in(1998, 1998 + 1, 1998 + 2) | -- and ss_date between '1999-01-01' and '2001-12-02' | -- and dayofmonth(ss_date) in (1,2) | -- partition key filter | -- and ss_sold_date_sk in (2450816, 2450846, 2450847, 2450874, 2450875, 2450905, 2450906, 2450935, 2450936, 2450966, 2450967, | -- 2450996, 2450997, 2451027, 2451028, 2451058, 2451059, 2451088, 2451089, 2451119, 2451120, 2451149, | -- 2451150, 2451180, 2451181, 2451211, 2451212, 2451239, 2451240, 2451270, 2451271, 2451300, 2451301, | -- 2451331, 2451332, 2451361, 2451362, 2451392, 2451393, 2451423, 2451424, 2451453, 2451454, 2451484, | -- 2451485, 2451514, 2451515, 2451545, 2451546, 2451576, 2451577, 2451605, 2451606, 2451636, 2451637, | -- 2451666, 2451667, 2451697, 2451698, 2451727, 2451728, 2451758, 2451759, 2451789, 2451790, 2451819, | -- 2451820, 2451850, 2451851, 2451880, 2451881) | and (household_demographics.hd_buy_potential = '>10000' | or household_demographics.hd_buy_potential = 'unknown') | and household_demographics.hd_vehicle_count > 0 | and case when household_demographics.hd_vehicle_count > 0 then household_demographics.hd_dep_count / household_demographics.hd_vehicle_count else null end > 1 | and ss_sold_date_sk between 2451180 and 2451269 -- partition key filter (3 months) | group by | ss_ticket_number, | ss_customer_sk | ) dj | join customer on (dj.ss_customer_sk = customer.c_customer_sk) |where | cnt between 1 and 5 |order by | cnt desc |limit 1000 |-- end query 1 in stream 0 using template query73.tpl """.stripMargin), ("q79", """ |-- start query 1 in stream 0 using template query79.tpl |select | c_last_name, | c_first_name, | substr(s_city, 1, 30) as city, | ss_ticket_number, | amt, | profit |from | (select | ss_ticket_number, | ss_customer_sk, | s_city, | sum(ss_coupon_amt) amt, | sum(ss_net_profit) profit | from | store_sales | join household_demographics on (store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk) | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) | join store on (store_sales.ss_store_sk = store.s_store_sk) | where | store.s_number_employees between 200 and 295 | and (household_demographics.hd_dep_count = 8 | or household_demographics.hd_vehicle_count > 0) | and date_dim.d_dow = 1 | and date_dim.d_year in (1998, 1998 + 1, 1998 + 2) | -- and ss_date between '1998-01-01' and '2000-12-25' | -- 156 days | and d_date between '1999-01-01' and '1999-03-31' | and ss_sold_date_sk between 2451180 and 2451269 -- partition key filter | group by | ss_ticket_number, | ss_customer_sk, | ss_addr_sk, | s_city | ) ms | join customer on (ms.ss_customer_sk = customer.c_customer_sk) |order by | c_last_name, | c_first_name, | -- substr(s_city, 1, 30), | city, | profit |limit 100 |-- end query 1 in stream 0 using template query79.tpl """.stripMargin), ("q8", """ | |-- start query 1 in stream 0 using template query8.tpl |select | s_store_name, | sum(ss_net_profit) |from | store_sales | join store on (store_sales.ss_store_sk = store.s_store_sk) | -- join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) | join | (select | a.ca_zip | from | (select | substr(ca_zip, 1, 5) ca_zip, | count( *) cnt | from | customer_address | join customer on (customer_address.ca_address_sk = customer.c_current_addr_sk) | where | c_preferred_cust_flag = 'Y' | group by | ca_zip | having | count( *) > 10 | ) a | left semi join | (select | substr(ca_zip, 1, 5) ca_zip | from | customer_address | where | substr(ca_zip, 1, 5) in ('89436', '30868', '65085', '22977', '83927', '77557', '58429', '40697', '80614', '10502', '32779', | '91137', '61265', '98294', '17921', '18427', '21203', '59362', '87291', '84093', '21505', '17184', '10866', '67898', '25797', | '28055', '18377', '80332', '74535', '21757', '29742', '90885', '29898', '17819', '40811', '25990', '47513', '89531', '91068', | '10391', '18846', '99223', '82637', '41368', '83658', '86199', '81625', '26696', '89338', '88425', '32200', '81427', '19053', | '77471', '36610', '99823', '43276', '41249', '48584', '83550', '82276', '18842', '78890', '14090', '38123', '40936', '34425', | '19850', '43286', '80072', '79188', '54191', '11395', '50497', '84861', '90733', '21068', '57666', '37119', '25004', '57835', | '70067', '62878', '95806', '19303', '18840', '19124', '29785', '16737', '16022', '49613', '89977', '68310', '60069', '98360', | '48649', '39050', '41793', '25002', '27413', '39736', '47208', '16515', '94808', '57648', '15009', '80015', '42961', '63982', | '21744', '71853', '81087', '67468', '34175', '64008', '20261', '11201', '51799', '48043', '45645', '61163', '48375', '36447', | '57042', '21218', '41100', '89951', '22745', '35851', '83326', '61125', '78298', '80752', '49858', '52940', '96976', '63792', | '11376', '53582', '18717', '90226', '50530', '94203', '99447', '27670', '96577', '57856', '56372', '16165', '23427', '54561', | '28806', '44439', '22926', '30123', '61451', '92397', '56979', '92309', '70873', '13355', '21801', '46346', '37562', '56458', | '28286', '47306', '99555', '69399', '26234', '47546', '49661', '88601', '35943', '39936', '25632', '24611', '44166', '56648', | '30379', '59785', '11110', '14329', '93815', '52226', '71381', '13842', '25612', '63294', '14664', '21077', '82626', '18799', | '60915', '81020', '56447', '76619', '11433', '13414', '42548', '92713', '70467', '30884', '47484', '16072', '38936', '13036', | '88376', '45539', '35901', '19506', '65690', '73957', '71850', '49231', '14276', '20005', '18384', '76615', '11635', '38177', | '55607', '41369', '95447', '58581', '58149', '91946', '33790', '76232', '75692', '95464', '22246', '51061', '56692', '53121', | '77209', '15482', '10688', '14868', '45907', '73520', '72666', '25734', '17959', '24677', '66446', '94627', '53535', '15560', | '41967', '69297', '11929', '59403', '33283', '52232', '57350', '43933', '40921', '36635', '10827', '71286', '19736', '80619', | '25251', '95042', '15526', '36496', '55854', '49124', '81980', '35375', '49157', '63512', '28944', '14946', '36503', '54010', | '18767', '23969', '43905', '66979', '33113', '21286', '58471', '59080', '13395', '79144', '70373', '67031', '38360', '26705', | '50906', '52406', '26066', '73146', '15884', '31897', '30045', '61068', '45550', '92454', '13376', '14354', '19770', '22928', | '97790', '50723', '46081', '30202', '14410', '20223', '88500', '67298', '13261', '14172', '81410', '93578', '83583', '46047', | '94167', '82564', '21156', '15799', '86709', '37931', '74703', '83103', '23054', '70470', '72008', '49247', '91911', '69998', | '20961', '70070', '63197', '54853', '88191', '91830', '49521', '19454', '81450', '89091', '62378', '25683', '61869', '51744', | '36580', '85778', '36871', '48121', '28810', '83712', '45486', '67393', '26935', '42393', '20132', '55349', '86057', '21309', | '80218', '10094', '11357', '48819', '39734', '40758', '30432', '21204', '29467', '30214', '61024', '55307', '74621', '11622', | '68908', '33032', '52868', '99194', '99900', '84936', '69036', '99149', '45013', '32895', '59004', '32322', '14933', '32936', | '33562', '72550', '27385', '58049', '58200', '16808', '21360', '32961', '18586', '79307', '15492') | ) b | on (a.ca_zip = b.ca_zip) | ) v1 on (substr(store.s_zip, 1, 2) = substr(v1.ca_zip, 1, 2)) |where | ss_date between '2002-01-01' and '2002-04-01' | -- and d_qoy = 1 | -- and d_year = 2002 |group by | s_store_name |order by | s_store_name |limit 100; |-- end query 1 in stream 0 using template query8.tpl """.stripMargin), ("q82", """ |-- start query 1 in stream 0 using template query82.tpl |select | i_item_id, | i_item_desc, | i_current_price |from | store_sales | join item on (store_sales.ss_item_sk = item.i_item_sk) | join inventory on (item.i_item_sk = inventory.inv_item_sk) | -- join date_dim on (inventory.inv_date_sk = date_dim.d_date_sk) |where | i_current_price between 30 and 30 + 30 | and i_manufact_id in (437, 129, 727, 663) | and inv_quantity_on_hand between 100 and 500 | and inv_date between '2002-05-30' and '2002-07-29' | -- and d_date between cast('2002-05-30' as date) and (cast('2002-05-30' as date) + 60) |group by | i_item_id, | i_item_desc, | i_current_price |order by | i_item_id |limit 100 |-- end query 1 in stream 0 using template query82.tpl """.stripMargin), ("q89", """ |-- start query 1 in stream 0 using template query89.tpl |select | * |from | (select | i_category, | i_class, | i_brand, | s_store_name, | s_company_name, | d_moy, | sum(ss_sales_price) sum_sales | -- avg(sum(ss_sales_price)) over (partition by i_category, i_brand, s_store_name, s_company_name) avg_monthly_sales | from | store_sales | join item on (store_sales.ss_item_sk = item.i_item_sk) | join store on (store_sales.ss_store_sk = store.s_store_sk) | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) | where | -- ss_date between '2000-01-01' and '2000-12-31' | ss_sold_date_sk between 2451545 and 2451910 -- partition key filter | and d_year in (2000) | and ((i_category in('Home', 'Books', 'Electronics') | and i_class in('wallpaper', 'parenting', 'musical')) | or (i_category in('Shoes', 'Jewelry', 'Men') | and i_class in('womens', 'birdal', 'pants')) | ) | group by | i_category, | i_class, | i_brand, | s_store_name, | s_company_name, | d_moy | ) tmp1 |-- where |-- case when(avg_monthly_sales <> 0) then(abs(sum_sales - avg_monthly_sales) / avg_monthly_sales) else null end > 0.1 |order by | -- sum_sales - avg_monthly_sales, | sum_sales, | s_store_name |limit 100 |-- end query 1 in stream 0 using template query89.tpl """.stripMargin), ("q98", """ |-- start query 1 in stream 0 using template query98.tpl |select | i_item_desc, | i_category, | i_class, | i_current_price, | sum(ss_ext_sales_price) as itemrevenue | -- sum(ss_ext_sales_price) * 100 / sum(sum(ss_ext_sales_price)) over (partition by i_class) as revenueratio |from | store_sales | join item on (store_sales.ss_item_sk = item.i_item_sk) | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) |where | i_category in('Jewelry', 'Sports', 'Books') | -- and d_date between cast('2001-01-12' as date) and (cast('2001-01-12' as date) + 30) | -- and d_date between '2001-01-12' and '2001-02-11' | -- and ss_date between '2001-01-12' and '2001-02-11' | -- and ss_sold_date_sk between 2451922 and 2451952 -- partition key filter | and ss_sold_date_sk between 2451911 and 2451941 -- partition key filter (1 calendar month) | and d_date between '2001-01-01' and '2001-01-31' |group by | i_item_id, | i_item_desc, | i_category, | i_class, | i_current_price |order by | i_category, | i_class, | i_item_id, | i_item_desc | -- revenueratio |limit 1000 |-- end query 1 in stream 0 using template query98.tpl """.stripMargin), ("ss_max", """ |select | count(*) as total, | count(ss_sold_date_sk) as not_null_total, | count(distinct ss_sold_date_sk) as unique_days, | max(ss_sold_date_sk) as max_ss_sold_date_sk, | max(ss_sold_time_sk) as max_ss_sold_time_sk, | max(ss_item_sk) as max_ss_item_sk, | max(ss_customer_sk) as max_ss_customer_sk, | max(ss_cdemo_sk) as max_ss_cdemo_sk, | max(ss_hdemo_sk) as max_ss_hdemo_sk, | max(ss_addr_sk) as max_ss_addr_sk, | max(ss_store_sk) as max_ss_store_sk, | max(ss_promo_sk) as max_ss_promo_sk |from store_sales """.stripMargin) ).map { case (name, sqlText) => Query(name, sqlText, description = "", executionMode = CollectResults) } val queriesMap = queries.map(q => q.name -> q).toMap val originalQueries = Seq( ("q3", """ select d_year ,item.i_brand_id brand_id ,item.i_brand brand ,sum(ss_ext_sales_price) sum_agg from date_dim dt JOIN store_sales on dt.d_date_sk = store_sales.ss_sold_date_sk JOIN item on store_sales.ss_item_sk = item.i_item_sk where item.i_manufact_id = 436 and dt.d_moy=12 group by d_year ,item.i_brand ,item.i_brand_id order by d_year ,sum_agg desc ,brand_id limit 100"""), ("q7", """ select i_item_id, avg(ss_quantity) agg1, avg(ss_list_price) agg2, avg(ss_coupon_amt) agg3, avg(ss_sales_price) agg4 from store_sales JOIN customer_demographics ON store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk JOIN date_dim ON store_sales.ss_sold_date_sk = date_dim.d_date_sk JOIN item ON store_sales.ss_item_sk = item.i_item_sk JOIN promotion ON store_sales.ss_promo_sk = promotion.p_promo_sk where cd_gender = 'F' and cd_marital_status = 'W' and cd_education_status = 'Primary' and (p_channel_email = 'N' or p_channel_event = 'N') and d_year = 1998 group by i_item_id order by i_item_id limit 100"""), ("q19", """ select i_brand_id, i_brand, i_manufact_id, i_manufact, sum(ss_ext_sales_price) as ext_price from date_dim JOIN store_sales ON date_dim.d_date_sk = store_sales.ss_sold_date_sk JOIN item ON store_sales.ss_item_sk = item.i_item_sk JOIN customer ON store_sales.ss_customer_sk = customer.c_customer_sk JOIN customer_address ON customer.c_current_addr_sk = customer_address.ca_address_sk JOIN store ON store_sales.ss_store_sk = store.s_store_sk where i_manager_id=7 and d_moy=11 and d_year=1999 and substr(ca_zip,1,5) <> substr(s_zip,1,5) group by i_brand ,i_brand_id ,i_manufact_id ,i_manufact order by ext_price desc ,i_brand ,i_brand_id ,i_manufact_id ,i_manufact limit 100"""), ("q27", """ select i_item_id, s_state, avg(ss_quantity) agg1, avg(ss_list_price) agg2, avg(ss_coupon_amt) agg3, avg(ss_sales_price) agg4 from store_sales JOIN customer_demographics ON store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk JOIN date_dim ON store_sales.ss_sold_date_sk = date_dim.d_date_sk JOIN store ON store_sales.ss_store_sk = store.s_store_sk JOIN item ON store_sales.ss_item_sk = item.i_item_sk where cd_gender = 'F' and cd_marital_status = 'W' and cd_education_status = 'Primary' and d_year = 1998 and s_state = 'TN' group by i_item_id, s_state order by i_item_id ,s_state limit 100"""), ("q34", """ select c_last_name ,c_first_name ,c_salutation ,c_preferred_cust_flag ,ss_ticket_number ,cnt from (select ss_ticket_number ,ss_customer_sk ,count(*) cnt from store_sales JOIN date_dim ON store_sales.ss_sold_date_sk = date_dim.d_date_sk JOIN store ON store_sales.ss_store_sk = store.s_store_sk JOIN household_demographics ON store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk where (date_dim.d_dom between 1 and 3 or date_dim.d_dom between 25 and 28) and (household_demographics.hd_buy_potential = '>10000' or household_demographics.hd_buy_potential = 'unknown') and household_demographics.hd_vehicle_count > 0 and (case when household_demographics.hd_vehicle_count > 0 then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count else null end) > 1.2 and date_dim.d_year in (1998,1998+1,1998+2) and store.s_county in ('Williamson County','Williamson County','Williamson County','Williamson County', 'Williamson County','Williamson County','Williamson County','Williamson County') group by ss_ticket_number,ss_customer_sk) dn JOIN customer ON dn.ss_customer_sk = customer.c_customer_sk WHERE cnt between 15 and 20 order by c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc"""), ("q42", """ select d_year ,item.i_category_id ,item.i_category ,sum(ss_ext_sales_price) as s from date_dim dt JOIN store_sales ON dt.d_date_sk = store_sales.ss_sold_date_sk JOIN item ON store_sales.ss_item_sk = item.i_item_sk where item.i_manager_id = 1 and dt.d_moy=12 and dt.d_year=1998 group by d_year ,item.i_category_id ,item.i_category order by s desc,d_year ,i_category_id ,i_category limit 100"""), ("q43", """ select s_store_name, s_store_id, sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales from date_dim JOIN store_sales ON date_dim.d_date_sk = store_sales.ss_sold_date_sk JOIN store ON store.s_store_sk = store_sales.ss_store_sk where s_gmt_offset = -5 and d_year = 1998 group by s_store_name, s_store_id order by s_store_name, s_store_id,sun_sales,mon_sales,tue_sales,wed_sales,thu_sales,fri_sales,sat_sales limit 100"""), ("q46", """ select c_last_name ,c_first_name ,ca_city ,bought_city ,ss_ticket_number ,amt,profit from (select ss_ticket_number ,ss_customer_sk ,ca_city as bought_city ,sum(ss_coupon_amt) as amt ,sum(ss_net_profit) as profit from store_sales JOIN date_dim ON store_sales.ss_sold_date_sk = date_dim.d_date_sk JOIN store ON store_sales.ss_store_sk = store.s_store_sk JOIN household_demographics ON store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk JOIN customer_address ON store_sales.ss_addr_sk = customer_address.ca_address_sk where (household_demographics.hd_dep_count = 5 or household_demographics.hd_vehicle_count= 3) and date_dim.d_dow in (6,0) and date_dim.d_year in (1999,1999+1,1999+2) and store.s_city in ('Midway','Fairview','Fairview','Fairview','Fairview') group by ss_ticket_number,ss_customer_sk,ss_addr_sk,ca_city) dn JOIN customer ON dn.ss_customer_sk = customer.c_customer_sk JOIN customer_address ON customer.c_current_addr_sk = customer_address.ca_address_sk where customer_address.ca_city <> dn.bought_city order by c_last_name ,c_first_name ,ca_city ,bought_city ,ss_ticket_number limit 100"""), ("q52", """ select d_year ,item.i_brand_id brand_id ,item.i_brand brand ,sum(ss_ext_sales_price) as ext_price from date_dim JOIN store_sales ON date_dim.d_date_sk = store_sales.ss_sold_date_sk JOIN item ON store_sales.ss_item_sk = item.i_item_sk where item.i_manager_id = 1 and date_dim.d_moy=12 and date_dim.d_year=1998 group by d_year ,item.i_brand ,item.i_brand_id order by d_year ,ext_price desc ,brand_id limit 100"""), ("q55", """ select i_brand_id as brand_id, i_brand as brand, sum(store_sales.ss_ext_sales_price) ext_price from date_dim JOIN store_sales ON date_dim.d_date_sk = store_sales.ss_sold_date_sk JOIN item ON store_sales.ss_item_sk = item.i_item_sk where i_manager_id=36 and d_moy=12 and d_year=2001 group by i_brand, i_brand_id order by ext_price desc, brand_id limit 100 """), ("q59", """ |select | s_store_name1, | s_store_id1, | d_week_seq1, | sun_sales1 / sun_sales2, | mon_sales1 / mon_sales2, | tue_sales1 / tue_sales2, | wed_sales1 / wed_sales2, | thu_sales1 / thu_sales2, | fri_sales1 / fri_sales2, | sat_sales1 / sat_sales2 |from | (select | /*+ MAPJOIN(store, date_dim) */ | s_store_name s_store_name1, | wss.d_week_seq d_week_seq1, | s_store_id s_store_id1, | sun_sales sun_sales1, | mon_sales mon_sales1, | tue_sales tue_sales1, | wed_sales wed_sales1, | thu_sales thu_sales1, | fri_sales fri_sales1, | sat_sales sat_sales1 | from | (select | /*+ MAPJOIN(date_dim) */ | d_week_seq, | ss_store_sk, | sum(case when(d_day_name = 'Sunday') then ss_sales_price else null end) sun_sales, | sum(case when(d_day_name = 'Monday') then ss_sales_price else null end) mon_sales, | sum(case when(d_day_name = 'Tuesday') then ss_sales_price else null end) tue_sales, | sum(case when(d_day_name = 'Wednesday') then ss_sales_price else null end) wed_sales, | sum(case when(d_day_name = 'Thursday') then ss_sales_price else null end) thu_sales, | sum(case when(d_day_name = 'Friday') then ss_sales_price else null end) fri_sales, | sum(case when(d_day_name = 'Saturday') then ss_sales_price else null end) sat_sales | from | store_sales | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) | where | -- ss_date between '1998-10-01' and '1999-09-30' | ss_sold_date_sk between 2451088 and 2451452 | group by | d_week_seq, | ss_store_sk | ) wss | join store on (wss.ss_store_sk = store.s_store_sk) | join date_dim d on (wss.d_week_seq = d.d_week_seq) | where | d_month_seq between 1185 and 1185 + 11 | ) y | join | (select | /*+ MAPJOIN(store, date_dim) */ | s_store_name s_store_name2, | wss.d_week_seq d_week_seq2, | s_store_id s_store_id2, | sun_sales sun_sales2, | mon_sales mon_sales2, | tue_sales tue_sales2, | wed_sales wed_sales2, | thu_sales thu_sales2, | fri_sales fri_sales2, | sat_sales sat_sales2 | from | (select | /*+ MAPJOIN(date_dim) */ | d_week_seq, | ss_store_sk, | sum(case when(d_day_name = 'Sunday') then ss_sales_price else null end) sun_sales, | sum(case when(d_day_name = 'Monday') then ss_sales_price else null end) mon_sales, | sum(case when(d_day_name = 'Tuesday') then ss_sales_price else null end) tue_sales, | sum(case when(d_day_name = 'Wednesday') then ss_sales_price else null end) wed_sales, | sum(case when(d_day_name = 'Thursday') then ss_sales_price else null end) thu_sales, | sum(case when(d_day_name = 'Friday') then ss_sales_price else null end) fri_sales, | sum(case when(d_day_name = 'Saturday') then ss_sales_price else null end) sat_sales | from | store_sales | join date_dim on (store_sales.ss_sold_date_sk = date_dim.d_date_sk) | where | -- ss_date between '1999-10-01' and '2000-09-30' | ss_sold_date_sk between 2451088 and 2451452 | group by | d_week_seq, | ss_store_sk | ) wss | join store on (wss.ss_store_sk = store.s_store_sk) | join date_dim d on (wss.d_week_seq = d.d_week_seq) | where | d_month_seq between 1185 + 12 and 1185 + 23 | ) x | on (y.s_store_id1 = x.s_store_id2) |where | d_week_seq1 = d_week_seq2 - 52 |order by | s_store_name1, | s_store_id1, | d_week_seq1 |limit 100 """.stripMargin), ("q68", """ select c_last_name ,c_first_name ,ca_city ,bought_city ,ss_ticket_number ,extended_price ,extended_tax ,list_price from (select ss_ticket_number ,ss_customer_sk ,ca_city as bought_city ,sum(ss_ext_sales_price) as extended_price ,sum(ss_ext_list_price) as list_price ,sum(ss_ext_tax) as extended_tax from store_sales JOIN date_dim ON store_sales.ss_sold_date_sk = date_dim.d_date_sk JOIN store ON store_sales.ss_store_sk = store.s_store_sk JOIN household_demographics ON store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk JOIN customer_address ON store_sales.ss_addr_sk = customer_address.ca_address_sk where date_dim.d_dom between 1 and 2 and (household_demographics.hd_dep_count = 5 or household_demographics.hd_vehicle_count= 3) and date_dim.d_year in (1999,1999+1,1999+2) and store.s_city in ('Midway','Fairview') group by ss_ticket_number ,ss_customer_sk ,ss_addr_sk,ca_city) dn JOIN customer ON dn.ss_customer_sk = customer.c_customer_sk JOIN customer_address ON customer.c_current_addr_sk = customer_address.ca_address_sk where customer_address.ca_city <> dn.bought_city order by c_last_name ,ss_ticket_number limit 100"""), ("q73", """ select c_last_name ,c_first_name ,c_salutation ,c_preferred_cust_flag ,ss_ticket_number ,cnt from (select ss_ticket_number ,ss_customer_sk ,count(*) cnt from store_sales JOIN date_dim ON store_sales.ss_sold_date_sk = date_dim.d_date_sk JOIN store ON store_sales.ss_store_sk = store.s_store_sk JOIN household_demographics ON store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk where date_dim.d_dom between 1 and 2 and (household_demographics.hd_buy_potential = '>10000' or household_demographics.hd_buy_potential = 'unknown') and household_demographics.hd_vehicle_count > 0 and case when household_demographics.hd_vehicle_count > 0 then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count else null end > 1 and date_dim.d_year in (1998,1998+1,1998+2) and store.s_county in ('Williamson County','Williamson County','Williamson County','Williamson County') group by ss_ticket_number,ss_customer_sk) dj JOIN customer ON dj.ss_customer_sk = customer.c_customer_sk where cnt between 5 and 10 order by cnt desc"""), ("q79", """ select c_last_name,c_first_name,substr(s_city,1,30) as s_city,ss_ticket_number,amt,profit from (select ss_ticket_number ,ss_customer_sk ,store.s_city ,sum(ss_coupon_amt) amt ,sum(ss_net_profit) profit from store_sales JOIN date_dim ON store_sales.ss_sold_date_sk = date_dim.d_date_sk JOIN store ON store_sales.ss_store_sk = store.s_store_sk JOIN household_demographics ON store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk where (household_demographics.hd_dep_count = 8 or household_demographics.hd_vehicle_count > 0) and date_dim.d_dow = 1 and date_dim.d_year in (1998,1998+1,1998+2) and store.s_number_employees between 200 and 295 group by ss_ticket_number,ss_customer_sk,ss_addr_sk,store.s_city) ms JOIN customer on ms.ss_customer_sk = customer.c_customer_sk order by c_last_name,c_first_name,s_city, profit limit 100"""), ("qSsMax", """ |select | count(*) as total, | count(ss_sold_date_sk) as not_null_total, | count(distinct ss_sold_date_sk) as unique_days, | max(ss_sold_date_sk) as max_ss_sold_date_sk, | max(ss_sold_time_sk) as max_ss_sold_time_sk, | max(ss_item_sk) as max_ss_item_sk, | max(ss_customer_sk) as max_ss_customer_sk, | max(ss_cdemo_sk) as max_ss_cdemo_sk, | max(ss_hdemo_sk) as max_ss_hdemo_sk, | max(ss_addr_sk) as max_ss_addr_sk, | max(ss_store_sk) as max_ss_store_sk, | max(ss_promo_sk) as max_ss_promo_sk |from store_sales """.stripMargin) ).map { case (name, sqlText) => Query(name, sqlText, description = "original query", executionMode = CollectResults) } val interactiveQueries = Seq("q19", "q42", "q52", "q55", "q63", "q68", "q73", "q98").map(queriesMap) val reportingQueries = Seq("q3","q7","q27","q43", "q53", "q89").map(queriesMap) val deepAnalyticQueries = Seq("q34", "q46", "q59", "q65", "q79", "ss_max").map(queriesMap) val impalaKitQueries = interactiveQueries ++ reportingQueries ++ deepAnalyticQueries }
bit1129/spark-sql-perf
src/main/scala/com/databricks/spark/sql/perf/tpcds/ImpalaKitQueries.scala
Scala
apache-2.0
69,756
package scala.slick.test.memory import org.junit.Test import org.junit.Assert._ import scala.slick.testutil.TestDBs import scala.slick.memory.{DistributedDriver, DistributedBackend} /** Test for the DistributedDriver */ class DistributedQueryingTest { val tdb1 = TestDBs.H2Mem val tdb2 = TestDBs.DerbyMem val dProfile = new DistributedDriver(tdb1.driver, tdb2.driver).profile val ts = { import tdb1.profile.simple._ class T(tag: Tag) extends Table[(Int, Int, String)](tag, "tdb1_T") { def id = column[Int]("id", O.PrimaryKey) def a = column[Int]("a") def b = column[String]("b") def * = (id, a, b) } TableQuery[T] } class U(tag: scala.slick.lifted.Tag) extends tdb2.profile.Table[(Int, Int, String)](tag, "tdb2_U") { import tdb2.profile.simple._ def id = column[Int]("id", O.PrimaryKey) def a = column[Int]("a") def b = column[String]("b") def * = (id, a, b) } val us = scala.slick.lifted.TableQuery[U] val tData = Seq((1, 1, "a"), (2, 1, "b"), (3, 2, "c"), (4, 2, "d"), (5, 3, "e"), (6, 3, "f")) val uData = Seq((1, 1, "A"), (2, 1, "B"), (3, 2, "C"), (4, 2, "D"), (5, 3, "E"), (6, 3, "F")) def runTest[T](f: (tdb1.profile.Backend#Session, tdb2.profile.Backend#Session, DistributedBackend#Session) => T) { tdb1.cleanUpBefore() try { val db1 = tdb1.createDB() tdb2.cleanUpBefore() try { val db2 = tdb2.createDB() val db = DistributedBackend.Database(db1, db2) db.withSession { s => f(s.sessions(0).asInstanceOf[tdb1.profile.Backend#Session], s.sessions(1).asInstanceOf[tdb2.profile.Backend#Session], s) } } finally tdb2.cleanUpAfter() } finally tdb1.cleanUpAfter() } @Test def test1 = runTest { (s1, s2, sDist) => { import tdb1.profile.simple._ implicit val session = s1 ts.ddl.create ts ++= tData assertEquals(tData.toSet, ts.run.toSet) } { import tdb2.profile.simple._ implicit val session = s2 us.ddl.create us ++= uData assertEquals(uData.toSet, us.run.toSet) } { import dProfile.simple._ implicit val session = sDist assertEquals(tData.toSet, ts.run.toSet) assertEquals(uData.toSet, us.run.toSet) assertEquals( tData.flatMap(t => uData.map(u => (t, u))).toSet, ts.flatMap(t => us.map(u => (t, u))).run.toSet ) } } }
dvinokurov/slick
slick-testkit/src/test/scala/scala/slick/test/memory/DistributedQueryingTest.scala
Scala
bsd-2-clause
2,433
package twatcher.logics import twatcher.globals.{db, twitter} import twatcher.models._ import twatcher.actors._ import play.api.libs.concurrent.Akka import play.api.libs.concurrent.Execution.Implicits.defaultContext import play.api.Logger import play.api.Play.current import scala.concurrent.Future import scala.concurrent.duration._ import scala.sys.process._ import akka.actor.Props import akka.pattern.ask import akka.util.Timeout object BatchLogic { private[this] val batchActor = Akka.system.actorOf(Props[BatchActor]) private[this] implicit val timeout = Timeout(5.seconds) def check(): Future[Boolean] = { val resultFut = for { periodDay <- db.run(Configs.get).map(_.period) accountList <- db.run(Accounts.get).map(_.toList) _ <- TwitterLogic.insertTweetsAll(twitter, accountList) result <- isActiveFut(periodDay, accountList) runningSt <- batchActor ? IsRunning() } yield (result, runningSt, accountList) resultFut onSuccess { case (true, _, _) => { // Do not have to run script Logger.info("You are alive!") } case (false, RunningStatus(false), accountList) => { // Execute script Logger.info("You are dead!") db.run(Scripts.get).foreach { scripts => batchActor ! ScriptList(scripts.toList) batchActor ! AccountList(accountList) } } case _ => { Logger.info("batching has been already doing.") } } resultFut.map(_._1) } /** * Check accounts and exit program */ def batch() = { val resultFut = check() resultFut onSuccess { case true => exit() case false => // BatchActor kills App so here have nothing to do. } resultFut onFailure { case e: Throwable => { e.printStackTrace() exit() } } } /** * Check whecher accounts are active or not */ private[this] def isActiveFut(periodDay: Int, accountList: List[Account]): Future[Boolean] = TwitterLogic.isActiveAll(twitter, periodDay, accountList) def exit() { (10 to 1 by -1).foreach { n => Logger.info(s"exit program in $n seconds...") Thread.sleep(1000L) } if (isWindows) { "cmd /c exit.bat".! } else { "./exit".! } } private[this] def isWindows: Boolean = System.getProperty("os.name") contains "Windows" /** * Listen Batch Actor whether app can exit. * Execute exit script if app can. */ private[this] def listenToExit() = { Akka.system.scheduler.schedule( initialDelay = 10.seconds , interval = 10.seconds , receiver = batchActor , message = Exit() ) } }
srd7/twatcher
app/logics/BatchLogic.scala
Scala
mit
2,710
/** * _____ _ _ _____ _ * | __| |_ ___|_|___ ___| __|___| |_ _ ___ ___ * |__ | _| _| | | . |__ | . | | | | -_| _| * |_____|_| |_| |_|_|_|_ |_____|___|_|\_/|___|_| * |___| * * File: Stringsolver.scala * Author: Mikaรซl Mayer * Date: 27.11.2013 * Purpose;Provides all algorithms related to synthesis */ package ch.epfl.lara.synthesis.stringsolver import java.util.regex.Pattern import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.{HashMap => MMap} import scala.collection.mutable.ListBuffer import scala.concurrent._ import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration._ import scala.util.matching.Regex import ProgramSet._ import org.apache.commons.lang.StringEscapeUtils /** * An extension delivers start and end position to which it applies, * and a way to create a SSpecialConversion out of a SSubStr. */ trait Extension { def apply(s: String, output: String): Seq[(Int, Int, SSubStr => SSpecialConversion)] } /** Class to use StriSynth very easily */ object CurrentInstance { def HELP = println(""" NEW Triggers learning of a new program. Automatic first time. Transform "input" ==> "output" ("input", index) ==> "output" The variable TRANSFORM will contain the resulting program After one or multiple TRANSFORM examples, you can invoke disambiguate(input1, input2...) to check for ambiguities. For example: "MikaelM" ==> "MM"; disambiguate("MarionM", "LeonP") Reduce (List("input1", ...), index) ==> "output" List("input1", ...) ==> "output" The variable REDUCE will contain the resulting program Split examples (can provide several examples) "input" ==> ("output1", "output2", ..., "...") The variable SPLIT will contain the resulting program. Partition examples. PARTITIONTYPE("example1-1", "example1-2", ...) The variable PARTITION will contain the resulting program. Filter examples. YES ==> "input1" YES ==> ("input1", ...) NO ==> "input2" NO ==> List("input2", ...) The variable FILTER will contain the resulting program. Program composition val split = SPLIT val reduce = REDUCE val program = (TRANSFORM as map) | REDUCE Exporting the program PROGRAM in Powershell to "script.ps1" HELP Displays this help """) private sealed trait LearningType private object MAPTYPE extends LearningType { override def toString = "TRANSFORM or REDUCE" } private object PARTITIONTYPE extends LearningType { override def toString = "PARTITION" } private object FILTERTYPE extends LearningType { override def toString = "FILTER" } private object SPLITTYPE extends LearningType { override def toString = "SPLIT" } private object ALL extends LearningType { override def toString = "all type" } private var _currentSolver: StringSolver = null; private var currentType: LearningType = ALL private var programToRecompute = true; private def createNew(): Unit = { _currentSolver = StringSolver() } def NEW = { currentType = ALL CurrentInstance.createNew() } import Program._ def PROGRAM: ExportableWithType[List[String], String] = ReduceProgram(_currentProgram) def REDUCE: ExportableWithType[List[String], String] = ReduceProgram(_currentProgram) def MAP: ExportableWithType[String, String] = { println("The use of MAP is deprecated. Use the synonym TRANSFORM"); TransformProgram(_currentProgram) } def TRANSFORM: ExportableWithType[String, String] = TransformProgram(_currentProgram) def PARTITION: ExportableWithType[List[String], List[List[String]]] = _currentPartitionProgram def FILTER: ExportableWithType[List[String], List[String]] = _currentFilterProgram def CANCEL = currentType match { case ALL => println("Nothing to cancel") case MAPTYPE => currentSolver.cancelLast() solve() case PARTITIONTYPE => if(partitionExamples.nonEmpty) { partitionExamples = partitionExamples.init solve() } else println("Nothing to cancel") case FILTERTYPE => if(filterExamples.nonEmpty) { filterExamples = filterExamples.init solve() } else println("Nothing to cancel") case SPLITTYPE => _currentSolver.cancelLast() solve() } def SPLIT: SplitProgram = _currentSplitProgram def currentSolver = if(_currentSolver == null) { NEW _currentSolver } else _currentSolver import StringSolver.{InputOutputExample, Input_state, SplitExample, PartitionExample} private var _currentProgram: Program = null private var _currentSplitProgram: SplitProgram = null private var _currentPartitionProgram: PartitionProgram = null private var _currentFilterProgram: FilterProgram = null private var partitionExamples = List[PartitionExample]() private def _disambiguate(programs: Seq[Program], inputs: Seq[String]): Unit = { if(programs.isEmpty) return; if(currentSolver.isVerbose) println("Found several programs to disambiguate, the first one is :") if(currentSolver.isVerbose) println(programs.head) object Ambiguous { def unapply(i: (String, Int)): Option[Stream[(String, String)]] = { val originalOutput = try { programs.head.apply(i._1, i._2) } catch { case e: Exception => "" } val alternativeOutputs = programs.tail.toStream.flatMap{ (p: Program) => //println(s"Evaluating $p on $i") try { Some((p, p(i._1, i._2))) } catch { case e: Exception => None } } val ambiguities = alternativeOutputs.filter(pp => pp._2 != originalOutput) if(ambiguities.isEmpty) None else { var seen: Set[String] = Set(originalOutput) Some((i._1, originalOutput) #:: (ambiguities flatMap { case (prog, output) if(!seen(output)) => seen += output Some((i._1, output)) case _ => None })) } } } inputs.zipWithIndex.collectFirst { case input@Ambiguous(originalAlternative) => println("Ambiguity found. What is the right output? First is current") originalAlternative.foreach { case (original, alternative) => println("\"" + StringEscapeUtils.escapeJava(original) + "\" ==> \"" + StringEscapeUtils.escapeJava(alternative) + "\"") } }.getOrElse(println("No ambiguity found. Try to increase currentSolver.numBestPrograms = 10")) } def disambiguate(inputs: String*): Unit = disambiguate(inputs.toList) def disambiguate(inputs: List[String]): Unit = { currentType match { case ALL => println("No example given. Type DOC to get the documentation") case MAPTYPE => _currentSolver.solveNBest() match { case Some(programs) => _disambiguate(programs, inputs) case None => println("No map/reduce program found. To cancel the last example, please type CANCEL. To reset, call NEW") } case _ => println("Disambiguation currently works only for TRANSFORM") } } def solve(): Unit = currentType match { case ALL => println("No example given. Type DOC to get the documentation") case MAPTYPE => _currentSolver.solve() match { case Some(program) => _currentProgram = program println(_currentProgram) case None => println("No map/reduce program found. To cancel the last example, please type CANCEL. To reset, call NEW") } case PARTITIONTYPE => partitionExamples.length match { case 0 => println("Please write two partition examples like ==>(\"part1\", \"part2\") before continuing") case 1 => println("Please write one more partition example like ==>(\"part1\", \"part2\")") case n => //First regroup partitions sharing similar elements. val prepartitions = partitionExamples.toArray var i = 0 for(p <- partitionExamples) { p.next = null p.prev = null p.index = i i += 1 } i = 0 while(i < prepartitions.length) { var j = i + 1 val partition1 = prepartitions(i) while(j < prepartitions.length) { val partition2 = prepartitions(j) if(partition1.partition.intersect(partition2.partition).nonEmpty) { partition1.mergeNextWith(partition2) } j += 1 } i += 1 } val partitions = partitionExamples.map(_.computeUnique).filter(_.nonEmpty) val input = partitions.zipWithIndex.flatMap{case (p, i) => p.map(e => (e, i.toString))}; Service.getPartition(input.toList) match { case Some((c, c2, f)) => c.solve() match { case Some(p) => _currentPartitionProgram = PartitionProgram(p) println(_currentPartitionProgram) case None => println("No PARTITION program found. CANCEL the last example or NEW to create a new program") } case None => println("No PARTITION program found. CANCEL the last example or NEW to create a new program") } } case FILTERTYPE => if(filterExamples.length == 0) println("Need at least one OK==>(example) to learn FILTER. Type RESET to reset") else { Service.getFilter(filterExamples.flatten) match { case Some((ss, m)) => ss.solve() match { case Some(s) => _currentFilterProgram = FilterProgram(s, m) println(_currentFilterProgram) case None => println("No FILTER program found. Add a new example, CANCEL the last example or NEW to create a new program") } case None => println("No FILTER program found. Add a new example, CANCEL the last example or NEW to create a new program") } } case SPLITTYPE => _currentSolver.solve() match { case Some(program) => _currentSplitProgram = SplitProgram(program) println(_currentSplitProgram) case None => println("No split program found. To cancel the last example, please type CANCEL. To reset, call NEW") } } def timed[A](block: => A): A = { val start = System.currentTimeMillis() val result = block val end = System.currentTimeMillis() println((end-start)+"ms to complete") result } implicit class StringWrapper(input: String) { def ==>(output: String): ExportableWithType[String, String] = timed { currentType match { case ALL | MAPTYPE => if(currentType != MAPTYPE) println("Learning TRANSFORM") currentType = MAPTYPE currentSolver.add(InputOutputExample(Input_state(IndexedSeq(input), 0), output, false)) solve() TRANSFORM case _ => println("Stopped learning a " + currentType + " program.\n") NEW ==>(output) } } } implicit class StringIndexWrapper(inputIndex: (String, Int)) { def ==>(output: String): ExportableWithType[String, String] = timed { currentType match { case ALL | MAPTYPE => if(currentType != MAPTYPE) println("Learning TRANSFORM") currentType = MAPTYPE currentSolver.add(InputOutputExample(Input_state(IndexedSeq(inputIndex._1), inputIndex._2-1), output, true)) solve() TRANSFORM case _ => println("Stopped learning a " + currentType + " program.\n") NEW ==>(output) } } } implicit class TupleListWrapper(inputsIndex: List[String]) { def ==>(output: String): ExportableWithType[List[String], String] = timed { currentType match { case ALL | MAPTYPE => if(currentType != MAPTYPE) println("Learning REDUCE") currentType = MAPTYPE currentSolver.add(InputOutputExample(Input_state(inputsIndex.toIndexedSeq, 0), output, false)) solve() REDUCE case _ => println("Stopped learning a " + currentType + " program.\n") NEW ==>(output) } } } implicit class TupleListIndexWrapper(inputsIndex: (List[String], Int)) { def ==>(output: String): ExportableWithType[List[String], String] = timed { currentType match { case ALL | MAPTYPE => if(currentType != MAPTYPE) println("Learning REDUCE") currentType = MAPTYPE currentSolver.add(InputOutputExample(Input_state(inputsIndex._1.toIndexedSeq, inputsIndex._2-1), output, true)) solve() REDUCE case _ => println("Stopped learning a " + currentType + " program.\n") NEW ==>(output) } } } implicit class TupleWrapper2(inputs: (String, String)) { def ==>(output: String): ExportableWithType[List[String], String] = List(inputs._1, inputs._2) ==> output } implicit class TupleWrapper3(inputs: (String, String, String)) { def ==>(output: String): ExportableWithType[List[String], String] = List(inputs._1, inputs._2, inputs._3) ==> output } implicit class TupleWrapper4(inputs: (String, String, String, String)) { def ==>(output: String): ExportableWithType[List[String], String] = List(inputs._1, inputs._2, inputs._3, inputs._4) ==> output } implicit class TupleWrapper2Index(inputs: ((String, String), Int)) { def ==>(output: String): ExportableWithType[List[String], String] = (List(inputs._1._1, inputs._1._2), inputs._2) ==> output } implicit class TupleWrapper3Index(inputs: ((String, String, String), Int)) { def ==>(output: String): ExportableWithType[List[String], String] = (List(inputs._1._1, inputs._1._2, inputs._1._3), inputs._2) ==> output } implicit class TupleWrapper4Index(inputs: ((String, String, String, String), Int)) { def ==>(output: String): ExportableWithType[List[String], String] = (List(inputs._1._1, inputs._1._2, inputs._1._3, inputs._1._4), inputs._2) ==> output } implicit class SplitWrapper(input: String) { def ==>(output1: String, output2: String, outputs: String*): SplitProgram = ==>(output1::output2::outputs.toList) def ==>(outputs: List[String]): SplitProgram = timed { currentType match { case ALL | SPLITTYPE => if(currentType != SPLITTYPE) println("Learning SPLIT") currentType = SPLITTYPE currentSolver.add(SplitExample(input, outputs.takeWhile(s => s != "..."))) solve() SPLIT case _ => println("Stopped learning a " + currentType + " program.\n") NEW this.==>(outputs) } } } def ==>(partition: String*): ExportableWithType[List[String], List[List[String]]] = timed {currentType match { case ALL | PARTITIONTYPE => if(currentType == ALL) { partitionExamples = Nil println("Learning PARTITION") } currentType = PARTITIONTYPE partitionExamples = partitionExamples ++ List(PartitionExample(partition.toList)) solve() PARTITION case _ => println("Stopped learning a " + currentType + " program.\n") NEW ==>(partition: _*) } } var filterExamples: List[List[(String, Boolean)]] = Nil sealed abstract class FilterToken(positive: Boolean) { private def addExamples(strs: List[String]) = { filterExamples = filterExamples ++ List(strs.map(s => (s, positive))) solve() } private def addAndCheck(accepted: List[String]): ExportableWithType[List[String], List[String]] = timed {currentType match { case ALL | FILTERTYPE => if(currentType == ALL) { filterExamples = Nil println("Learning FILTER") } currentType = FILTERTYPE addExamples(accepted) FILTER case _ => println("Stopped learning a " + currentType + " program.\n") NEW addAndCheck(accepted) } } def ==>(accepted: String*):ExportableWithType[List[String], List[String]] = { ==>(accepted.toList) FILTER } def ==>(accepted: List[String]):ExportableWithType[List[String], List[String]] = addAndCheck(accepted) } object YES extends FilterToken(true) object NO extends FilterToken(false) val OK, Ok, ok, Yes, yes = YES val NOTOK, NotOk, Notok, notok, No, no = NO sealed trait MapConstant object map extends MapConstant implicit class MapWrapper[In, Out](e: ExportableWithType[In, Out]) { def as(dummy: MapConstant): ExportableWithType[List[In], List[Out]] = Mapper(e) } } /**StringSolver object * Used to create a StringSolver instance to solve input/output problems. */ object StringSolver { import Program._ import ProgramSet._ import Evaluator._ import Implicits._ import scala.language._ final val debugActive = false private type PrevNumberString = String type Output_state = String //case class Output_state(value: String, position: Int) type S = String case class Input_state(inputs: IndexedSeq[String], position: Int) type ฯƒ = Input_state type Regular_Expression = RegExp type W[Node] = Map[(Node, Node), Set[SAtomicExpr]] type Start = Int type End = Int type Index = Int case class InputOutputExample(inputState: Input_state, output: Output_state, indexSet: Boolean) case class SplitExample(input: String, outputs: List[String]) //def debug(s: String) = if(debugActive) println(s) case class PreExample(index: Int, output: String) /*implicit class Wrapper(index: Int) { def ==>(output: String) = PreExample(index, output) } implicit class Wrapper2(input: String) { def index(remaining: PreExample) = InputOutputExample(Input_state(IndexedSeq(input), remaining.index-1), remaining.output, true) def index(i: Int) = Input_state(IndexedSeq(input), i-1) def ==>(output: String) = InputOutputExample(Input_state(IndexedSeq(input), 0), output, false) } implicit class WrapperInputIndex(inputIndex: (String, Int)) { def ==>(output: String) = InputOutputExample(Input_state(IndexedSeq(inputIndex._1), inputIndex._2-1), output, true) } implicit class Wrapper3(inputs: List[String]) { def index(remaining: PreExample) = InputOutputExample(Input_state(inputs.toIndexedSeq, remaining.index-1), remaining.output, true) def index(i: Int) = Input_state(inputs.toIndexedSeq, i-1) def ==>(output: String) = InputOutputExample(Input_state(inputs.toIndexedSeq, 0), output, false) } implicit class WrapperSplit(input: String) { def ==>(output1: String, outputs: String*): SplitExample = { ==>(output1::outputs.toList); } def ==>(outputs: List[String]): SplitExample = { SplitExample(input, outputs.takeWhile(s => s != "...")) } }*/ object PartitionExample { // Inserts sequence p2 into sequence p1. Needs that p2.prev == null def mergeSequences(p1: PartitionExample, p2: PartitionExample): Unit = { if(p1 == null || p2 == null) return; if(p1.index == p2.index || p1 == p2) return mergeSequences(p1.next, p2.next); if(p1.index < p2.index) { if(p1.next != null) { if(p1.next.index > p2.index) { val p2next = p2.next p2next.prev = null p1.next.prev = p2 p2.next = p1.next p2.prev = p1 p1.next = p2 mergeSequences(p2, p2next) } else if(p1.next.index == p2.index) { return; // It's ok !! } else { // p1.next.index < p2.index mergeSequences(p1.next, p2) } } else { // p1.next == null if(p2.prev == null) { p1.next = p2 p2.prev = p1 } else { mergeSequences(p1, p2.computeFirst()) } } } else { mergeSequences(p2, p1) } } } case class PartitionExample(partition: List[String]) { var index: Int = 0 var next: PartitionExample = null // index of next is greater than index var prev: PartitionExample = null // index of prev is less than index def computeUnique(): List[String] = if(prev != null) List() else computeNext() def computeNext(): List[String] = if(next != null) partition.union(next.computeNext()) else partition def isConnectedWith(p: PartitionExample): Boolean = { this == p || (p != null && ( (p.index > index && next != null && next.isConnectedWith(p)) || (p.index < index && prev != null && prev.isConnectedWith(p)))) } def computeFirst(): PartitionExample = if(prev == null) this else prev.computeFirst() def mergeNextWith(p: PartitionExample): PartitionExample = { if(isConnectedWith(p)) return this; if(p.index == index) return this; if(p.index < index) { p.mergeNextWith(this) return this } if(this.next == null && p.prev == null) { // Simple merge: just concatenate. this.next = p p.prev = this return this } PartitionExample.mergeSequences(this.computeFirst(), p.computeFirst()) return this } } implicit def toPartitionExample2(input: (String, String)) = PartitionExample(List(input._1, input._2)) implicit def toPartitionExample3(input: (String, String, String)) = PartitionExample(List(input._1, input._2, input._3)) implicit def toPartitionExample3(input: (String, String, String, String)) = PartitionExample(List(input._1, input._2, input._3, input._4)) def apply(): StringSolver = new StringSolver() def apply(example: InputOutputExample, remaining: InputOutputExample*): Program = { val examples = StringSolver() for(e <- (example::remaining.toList)) { if(e.indexSet) { examples.add(e.inputState, e.output) } else { examples.add(e.inputState.inputs, e.output) } } examples.solve().getOrElse(null) } def apply(input: List[List[String]], output: List[String]): Option[Program] = { val solver = apply() (input zip output) foreach { case (i, o) => solver.add(i, o) } solver.solve() } def apply(example: SplitExample, remaining: SplitExample*): SplitProgram = { val examples = StringSolver() for(e <- (example::remaining.toList)) { examples.add(e); } val res = examples.solve().getOrElse(null) if(res != null) SplitProgram(res) else null } //def apply(example: PartitionExample, remaining: PartitionExample*): PartitionProgram = { // null //Service.getPartition(examples, c, c2, opt) //} //implicit def indexedSeqToInputState(arr: IndexedSeq[String]) = Input_state(arr, IndexedSeq[String]()) } /** * Instance solving the problem iteratively */ class StringSolver { import Program._ import ProgramSet._ import Evaluator._ import Implicits._ import scala.language._ import StringSolver._ private var ff = new StringSolverAlgorithms() private var currentPrograms: IndexedSeq[STraceExpr] = null private var singlePrograms = ArrayBuffer[IndexedSeq[STraceExpr]]() private var multiPrograms = ArrayBuffer[IndexedSeq[STraceExpr]]() //private var previousOutputs: IndexedSeq[String] = Array[String]() private var inputList = List[List[String]]() private var outputList = List[List[String]]() private var extra_time_to_merge = 2f private var extra_time_to_compute_loop = 0.5f private var extra_time_to_resolve = 2f private var index_number = 0 var numBestPrograms = 5 def copy(): StringSolver= { // TODO: better copy method val d = new StringSolver() d.currentPrograms = this.currentPrograms d.singlePrograms = ArrayBuffer(this.singlePrograms : _*) d.inputList = this.inputList d.outputList = this.outputList d.extra_time_to_merge = extra_time_to_merge d.extra_time_to_compute_loop = extra_time_to_compute_loop d.extra_time_to_resolve = extra_time_to_resolve d.index_number = index_number d.ff = ff.copy() d } /** * Proportion of the original time to give to compute loops */ def setExtraTimeToComputeLoops(f: Float) = {extra_time_to_compute_loop = f; this} /** * Proportion of the original time to resolve from computing loops */ def setExtraTimeToResolve(f: Float) = {extra_time_to_resolve = f; this} /** * Possibility to reset the counter used to number files */ def resetCounter() = { index_number = 0; this} /** * Sets the current input/output position example to i */ def setPosition(i: Int) = {index_number = i; this} /** * Use dots ... to trigger manual loop research */ def setUseDots(b: Boolean) = {ff.useDots = b; this} /** * Use numbering from previous input option */ def setUseNumbers(b: Boolean, undoBuffer: UndoBuffer = null) = { if(undoBuffer != null) undoBuffer.add(((last: Boolean) => () => ff.numbering = last)(ff.numbering)); ff.numbering = b; this } /** * Loop level. 0 will not look for loops */ def setLoopLevel(i: Int) = {ff.DEFAULT_REC_LOOP_LEVEL = i; this} /** * Timeout in seconds to add a new input/output example. * This is approximate. Default is 15s */ def setTimeout(seconds: Int) = {ff.TIMEOUT_SECONDS = seconds; this} /** * If looking for loops, what could be the maximum separator length */ def setMaxSeparatorLength(length: Int) = {ff.MAX_SEPARATOR_LENGTH = length; this} /** * If only interesting positions (aka word, special chars and digit separators) * are considered when looking for loops */ def setOnlyInterestingPositions(b: Boolean) = {ff.onlyInterestingPositions = b; this} /** * Outputs programs steps. Useful for debugging an other. */ def setVerbose(b: Boolean) = {ff.verbose = b; this} def isVerbose = ff.verbose /** * Allows to iterate over inputs. */ def setIterateInput(b: Boolean) = ff.iterateInput = b /** * Allows to use the example index for positions */ def setUseIndexForPosition(b: Boolean, undoBuffer: UndoBuffer = null) = { if(undoBuffer != null) undoBuffer.add(((last: Boolean) => () => ff.useIndexForPosition = last)(ff.useIndexForPosition)); ff.useIndexForPosition = b } /** * Retrieves statistics */ def getStatistics(): String = ff.statistics() /** * Advanced stats. */ def setAdvancedStats(b: Boolean) = ff.advanced_stats = b /** * Extra time to merge as a proportion of timeout */ def setExtraTimeToMerge(f: Float) = extra_time_to_merge = f /* Undo redo mechanism */ sealed trait Undo { def undo() } case class UndoAction(f : () => Unit) extends Undo { def undo() = f() } import collection.mutable.ListBuffer case class UndoBuffer(l: ListBuffer[Undo] = ListBuffer[Undo]()) extends Undo { UndoList = this::UndoList def undo() = l.toList.reverse.foreach(el => el.undo()) def add(f: () => Unit) = l += UndoAction(f) } var UndoList = List[Undo]() def cancelLast(): Unit = UndoList match { case head::tail => UndoList = tail head.undo() case Nil => println("Nothing to cancel") } /**Adds a new inputs/outputs example. **/ def add(input: Seq[String], output: Seq[String]): Seq[STraceExpr] = { val undo = UndoBuffer() if(!(output.exists(out => out.exists(_.isDigit)))) { // If not digit for output, we don't use numbers. setUseNumbers(false, undo) } if(inputList != Nil && input == inputList.last) { // Parsing case. We use index for position setUseIndexForPosition(true, undo) } undo.add(((last: List[List[String]]) => () => inputList = last)(inputList)) undo.add(((last: List[List[String]]) => () => outputList = last)(outputList)) undo.add(((last: Int) => () => index_number = last)(index_number)) inputList = inputList ++ List(input.toList) outputList = outputList ++ List(output.toList) val iv = input.toIndexedSeq val ov = output.toIndexedSeq val tmpIndexNumber = index_number if(Main.debug) Service.debug(input.mkString(",")+" ("+index_number+")==>"+output.mkString(",")) if(Main.debug) Service.debug("Looking for programs for this new entry...") val fetchPrograms = future { for(out <- ov) yield ff.generateStr(Input_state(iv, tmpIndexNumber), out, ff.DEFAULT_REC_LOOP_LEVEL) } index_number += 1 var tmp = ff.DEFAULT_REC_LOOP_LEVEL val newProgramSets : IndexedSeq[STraceExpr] = try { Await.result(fetchPrograms, ff.TIMEOUT_SECONDS.seconds) } catch { case e: TimeoutException => if(output.exists(_.indexOf("...") != -1 && ff.useDots)) { // Stop first phase of computing GenerateStr if there are still dots to compute. ff.timeoutGenerateStr = true try { Await.result(fetchPrograms, (ff.TIMEOUT_SECONDS * extra_time_to_compute_loop).seconds) } catch { case e: TimeoutException => ff.DEFAULT_REC_LOOP_LEVEL = 0 ff.timeout = true Await.result(fetchPrograms, (ff.TIMEOUT_SECONDS * extra_time_to_resolve).seconds) } } else { ff.timeoutGenerateStr = true ff.DEFAULT_REC_LOOP_LEVEL = 0 // No loops this time, especially if there is no ff.timeout = true Await.result(fetchPrograms, (ff.TIMEOUT_SECONDS * extra_time_to_resolve).seconds) } case e: Throwable => throw e } if(Main.debug) Service.debug("Adding the resulting program sets...") ff.DEFAULT_REC_LOOP_LEVEL = tmp ff.timeout = false val res = add(newProgramSets, undo) if(Main.debug) Service.debug("Computation done.") res } /**Adds a new program set **/ def add(newProgramSets: IndexedSeq[STraceExpr], undo: UndoBuffer): IndexedSeq[STraceExpr] = { if(currentPrograms == null) { if(undo != null) { undo.add(() => currentPrograms = null) } currentPrograms = newProgramSets } else { val waiting_seconds = (ff.TIMEOUT_SECONDS * extra_time_to_merge).toInt if(ff.verbose) println(s"Computing intersection with previous programs... (waiting $waiting_seconds seconds)") val intersectionParams = for(i <-0 until currentPrograms.length) yield { IntersectParam(None, currentPrograms(i).examplePosition, newProgramSets(i).examplePosition, false, ff.useIndexForPosition) } val intersectionsFuture = future { for(i <-0 until currentPrograms.length) yield { //println(s"Intersecting programs $i") intersect(currentPrograms(i), newProgramSets(i))(intersectionParams(i)) } //(currentPrograms zip newProgramSets) map { case (a, b) => intersect(a, b) } } val intersections = try { Await.result(intersectionsFuture, waiting_seconds.seconds) } catch { case e: TimeoutException => if(ff.verbose) println(s"Intersection took too much time! Resolving what has been done... (waiting ${ff.TIMEOUT_SECONDS * extra_time_to_resolve} seconds)") intersectionParams.foreach{ f => f.timeout = true} try { Await.result(intersectionsFuture, (ff.TIMEOUT_SECONDS * extra_time_to_resolve).seconds) } catch { case e: TimeoutException => if(ff.verbose) println("Resolving took too much time! Intersection cancelled") currentPrograms map {_ => SEmpty} } //throw e } if(undo != null) undo.add(((last: IndexedSeq[STraceExpr]) => () => currentPrograms = last)(currentPrograms)) currentPrograms = intersections } if(undo != null) undo.add(((last: ArrayBuffer[IndexedSeq[STraceExpr]]) => () => singlePrograms = last)(singlePrograms)) singlePrograms += newProgramSets if(debugActive) verifyCurrentState() newProgramSets } /**Adds a new program set **/ def add(newProgramSets: STraceExpr): STraceExpr = { add(IndexedSeq(newProgramSets), null)(0) } /**Adds a new input/output example. * If the best program already matches the input/output example, * it is not recomputed. **/ def add(input: Seq[String], output: String): STraceExpr = { val res = add(input, IndexedSeq(output)) res(0) } /**Adds a new input/output example. * If the best program already matches the input/output example, * it is not recomputed. **/ def add(input: String, output: String): STraceExpr = { val res = add(IndexedSeq(input), IndexedSeq(output)) res(0) } /**Adds a new input/output example. * If the best program already matches the input/output example, * it is not recomputed. **/ def add(inputOutput: Input_state, output: Output_state ): STraceExpr = { this.index_number = inputOutput.position val res = add(inputOutput.inputs, IndexedSeq(output)) res(0) } /**Adds a new input/output example. * If the best program already matches the input/output example, * it is not recomputed. **/ def add(input: String, output: String, index: Int): STraceExpr = { this.index_number = index val res = add(IndexedSeq(input), IndexedSeq(output)) res(0) } /** * Adds a new input/output example */ def add(inputoutput: String): STraceExpr = { val arrow = inputoutput.indexOf("->") if(arrow != -1) { val input = inputoutput.substring(0, arrow).trim() val output = inputoutput.substring(arrow+2).trim() add(List(input), output) }else throw new Exception("No separator such as | or -> found") } /** * Adds a new input/output example with | * @param inputoutput the input/output, can be multiline, separated by | * @param ninput number of inputs per line. */ def add(inputoutput: String, ninputs: Int = 1): Seq[STraceExpr] = { if(inputoutput contains '\n') { inputoutput.split('\n').map{add(_, ninputs)}.last } else { val pipe = inputoutput.indexOf("|") if(pipe != -1) { val elems = inputoutput.split("\\|").map(_.trim()).toList add(elems.take(ninputs), elems.drop(ninputs)) } else throw new Exception("No separator such as | or -> found") } } /** * Adds a new input/output example using the InputOutputExample API. */ def add(e: InputOutputExample) { if(e.indexSet) { add(e.inputState, e.output) } else { add(e.inputState.inputs, e.output) } } /** Adds a new input/output example */ def add(e: SplitExample) { for((output, i) <- e.outputs.zipWithIndex) { add(e.input, e.outputs(i), i+1) } } /** * Solved a piped example. Returns a piped example. * AB | CD | XY => EF | GH * * If multiline, solve the complete instance, e.g. * a | A | a1 a | A | a1 * b | B | b1 => b | B | b1 * c c | C | c1 * d d | D | d1 */ def solve(input: String, rawMode: Boolean = false): String = { if(input.indexOf('\n') != -1 && !rawMode) { val problems = input.split('\n').map(_.split("\\|").toList.map(_.trim())).toList val iolength = (0 /: problems) { case (a, io) => Math.max(a, io.length)} val ilength = (iolength /: problems) { case (a, io) => Math.min(a, io.length)} var adding_inputoutput = true (for(i <- problems.toList) yield { if(i.length == iolength) { if(adding_inputoutput) { add(i.take(ilength), i.drop(ilength)) i.mkString(" | ") } else throw new Exception("All examples must be at the beginning") } else if(i.length == ilength) { adding_inputoutput = false i.mkString(" | ") +" | "+solve(i).mkString(" | ") } else throw new Exception("All examples must be at the beginning") }) mkString "\n" } else { if(input.indexOf('|') != -1 && !rawMode) { val elems = input.split("\\|").map(_.trim()).toList solve(elems).mkString(" | ") } else { solve(List(input)).mkString } } } /** Returns the best solutions to the problem for the whole output */ def solveAll(): List[Option[Program]] = for(i <- (0 until currentPrograms.length).toList) yield solve(i) /** Returns the best solution to the last problem for the whole output */ def solveLasts(): List[Option[Program]] = for(i <- (0 until currentPrograms.length).toList) yield solveLast(i) def solve(): Option[Program] = solve(0) def solveNBest(): Option[Seq[Program]] = solveNBest(0) /** Returns the best solution to the problem for the whole output */ def solve(nth: Int): Option[Program] = if(currentPrograms != null) try { val res = Some(currentPrograms(nth).takeBest) if(debugActive) verifyCurrentState() res } catch { case e: java.lang.Error => if(isVerbose) { println(e.getMessage()) println(e.getStackTrace().mkString("\n")) } None case e: Exception => if(isVerbose) { println(e.getMessage()) println(e.getStackTrace().mkString("\n")) } None } else None /** Returns the best solution to the problem for the whole output */ def solveNBest(nth: Int): Option[Seq[Program]] = if(currentPrograms != null) try { val res = Some(currentPrograms(nth).takeNBest(numBestPrograms)) if(debugActive) verifyCurrentState() res.map(_.map(_._2)) } catch { case e: java.lang.Error => if(isVerbose) { println(e.getMessage()) println(e.getStackTrace().mkString("\n")) } None case e: Exception => if(isVerbose) { println(e.getMessage()) println(e.getStackTrace().mkString("\n")) } None } else None /** Returns the best solution to the problem for the whole output */ def solveLast(nth: Int = 0): Option[Program] = if(singlePrograms != null) try { val res = Some(singlePrograms(singlePrograms.length - 1)(nth).takeBest) if(debugActive) verifyCurrentState() res } catch { case _: java.lang.Error => None case _: Exception => None } else None /** Returns the best solution to the problem for the whole output */ def solveNBestLast(nth: Int = 0): Option[Seq[Program]] = if(singlePrograms != null) try { val res = Some(singlePrograms(singlePrograms.length - 1)(nth).takeNBest(numBestPrograms)) if(debugActive) verifyCurrentState() res.map(_.map(_._2)) } catch { case _: java.lang.Error => None case _: Exception => None } else None def takeBest[T <: Program](s: ProgramSet[T]): Program = s.takeBest /** * Solves for a new instance of input. */ def solve(input: Seq[String]): Seq[String] = { //println(s"Solving for input $input") val res = currentPrograms map (progSet => try { //println(s"progSet $progSet") val prog = progSet.takeBest val r = evalProg(prog)(Input_state(input.toIndexedSeq, index_number)) r.asString } catch { case _: java.lang.Error => "" case _: Exception => "" } ) index_number += 1 //previousOutputs = res.toIndexedSeq res } /** * Solves using the last input/output example * CAREFUL: It increments the index_number, leading to undesirable results. */ def solveLast(input: Seq[String]): Seq[String] = { //println(s"Solving for input $input") val res = singlePrograms(singlePrograms.length - 1) map (progSet => try { //println(s"progSet $progSet") val prog = progSet.takeBest val r = evalProg(prog)(Input_state(input.toIndexedSeq, index_number)) r.asString } catch { case _: java.lang.Error => "" case _: Exception => "" } ) index_number += 1 //previousOutputs = res.toIndexedSeq res } /** * Verifies the current state so that the resulting program * works for everybody. */ private def verifyCurrentState() = { var previousOutputsTmp = IndexedSeq[String]() var tmpIndex = 0 for((inputs, outputs) <- (inputList zip outputList).view; index = { val tmp = tmpIndex; tmpIndex += 1; tmp }; (output, progs) <- (outputs zip currentPrograms).view; prog = progs.takeBest ) { previousOutputsTmp = outputs.toIndexedSeq evalProg(prog)(Input_state(inputs.toIndexedSeq, index)) match { case StringValue(res) => if(ff.useDots) { val reg = output.split("\\Q...\\E").map(Pattern.quote(_)).mkString(".*").r.anchored assert(reg.findFirstIn(res) != None) } else { assert(res == output) } case BottomValue => assert(false) case _ => } } } } class StringSolverAlgorithms { import Program._ import ProgramSet._ import Evaluator._ import Implicits._ import scala.language._ import StringSolver._ // Parameter: Are we using dots to describe not finished loops. var useDots = true // Adds the last output number-only as input for the next program. var numbering = true // If a substring to extract is a space, should it be extracted from the inputs var extractSpaces = false // Use date conversion (english) def useDates: Boolean = extensions.indexOf(Dates) != -1 def useDates_=(b: Boolean) = { if(b) extensions = (Dates::extensions).distinct else extensions = extensions.filterNot(_ ==Dates) } var extensions = List[Extension]() def copy(): StringSolverAlgorithms = { val a = new StringSolverAlgorithms() a.useDots = useDots a.numbering = numbering a.extractSpaces = extractSpaces a.TIMEOUT_SECONDS = TIMEOUT_SECONDS a.DEFAULT_REC_LOOP_LEVEL = DEFAULT_REC_LOOP_LEVEL a.MAX_SEPARATOR_LENGTH = MAX_SEPARATOR_LENGTH a.onlyInterestingPositions = onlyInterestingPositions a.verbose = verbose a } useDates = true final val dots = "..." def dotsAtPosition(s: String, k: Int) = s.substring(k, Math.min(k + dots.length, s.length)) == dots var TIMEOUT_SECONDS = 15 var DEFAULT_REC_LOOP_LEVEL = 1 var MAX_SEPARATOR_LENGTH = 1 var onlyInterestingPositions = false var verbose = false // Possibility to iterate over input var iterateInput = true // Set to true if we want to extract stuff from the same string. var useIndexForPosition = false @volatile private var mTimeout = false @volatile private var mTimeoutPhaseGenerateStr = false def timeoutGenerateStr = mTimeoutPhaseGenerateStr def timeoutGenerateStr_=(v: Boolean) = { if(verbose && v) println("TimeoutGeneratestr") mTimeoutPhaseGenerateStr = v } def timeout = mTimeout def timeout_=(v: Boolean): Unit = { if(verbose && v) println("Timeout") mTimeout = v mTimeoutPhaseGenerateStr = v if(v) { ifTimeOut.success(SEmpty) // TODO : Timeout should allow the loop to finish its iteration. // So that if the loop is found, it will finish to compute it. ifTimeOut = promise[STraceExpr] } } private var ifTimeOut = promise[STraceExpr] /**synthesis algorithm*/ def generateStringProgram(S: Set[(ฯƒ, S)]) = { var T = Set.empty[(Set[ฯƒ], STraceExpr)] for((ฯƒ, s) <- S) T = T + ((Set(ฯƒ),generateStr(ฯƒ, s, DEFAULT_REC_LOOP_LEVEL))) T = generatePartition(T) val ฯƒp = for((ฯƒ, s) <- S) yield ฯƒ var B = Map[Set[ฯƒ], Bool]() for((ฯƒt, et) <- T) { B += ฯƒt -> generateBoolClassifier(ฯƒt, ฯƒp -- ฯƒt) } val P = T.toList.sortBy{case (ฯƒi, ei) => sizePrograms(ei)} map { case (ฯƒi, ei) => (B(ฯƒi), ei) } SSwitch(P) } def generatePartition(s: Set[(Set[ฯƒ], STraceExpr)]): Set[(Set[ฯƒ], STraceExpr)] = ??? def generateBoolClassifier(ฯƒ1: Set[ฯƒ], ฯƒ2: Set[ฯƒ]) = ??? /** * learnd the set of all SubStr expressions in our language that can be used to extract a given substring from a given string. */ implicit val cacheGenerateStr = MMap[(Input_state, Output_state, Int), STraceExpr]() def generateStr(ฯƒ: Input_state, s: Output_state, rec_loop_level: Int) = cached((ฯƒ, s, rec_loop_level), cacheGenerateStr) { //debug(s"generateStr on: $ฯƒ, $s and $rec_loop_level") val รฑ = (0 to s.length).toSet val ns = 0 val nt = s.length val ฮพ = (for(i <- 0 until s.length; j <- (i+1) to s.length) yield i->j).toSet if(verbose) println(s"Building constant map for $s") var W = Map[(Int, Int), Set[SAtomicExpr]]() for((i, j) <- ฮพ if !timeout) { W += (i,j) -> (Set(SConstStr(s.e(i, j-1)))) } if(verbose) println("Looking for longest substrings...") val dotsPositions = "\\.\\.\\.".r.findAllMatchIn(s).map(mt => mt.start(0)).toSet /** * Heuristict to deal with substrings of longest size first. */ val longestSizeFirst = (ij : (Int, Int)) => ij._1 - ij._2 var interestingEdges1 = (for( i <- 0 until s.length; j <- (i+1) to s.length; ฯƒvi <- ฯƒ.inputs if ฯƒvi.indexOf(s.substring(i, j)) >= 0) yield (i, j)).toSet // Removes edges which belong to a greater substring if they are not themselves longest numbers //if(verbose) println(s"found ${interestingEdges1.size} substrings:\n"+(interestingEdges1 map { case (i, j) => s"[$i,$j]"+s.substring(i, j)})) val longestInterestingEdges = ((interestingEdges1.toList filterNot { case (i, j) => val isNumber = s.substring(i, j).isNumber ((((interestingEdges1 contains ((i, j+1))) && (!dotsPositions(j))) && (isNumber implies s(j).isDigit)) || ((interestingEdges1 contains ((i-1, j))) && (isNumber implies s(i-1).isDigit))) }))sortBy(longestSizeFirst) if(verbose) println(s"found ${longestInterestingEdges.length} longest substrings:\n"+(longestInterestingEdges map { case (i, j) => s"[$i,$j]"+s.substring(i, j)})) val remaining_edges = ฮพ.filter{ case (i,j ) => !longestInterestingEdges.contains((i, j)) }.toList.sortBy(longestSizeFirst) /** * Heuristic to compute separators start positions. * Takes the positions having the same chars before the dots */ val presentSeparators = s.toList.zipWithIndex.flatMap{ case (char, i) => if(ProgramSet.isCommonSeparator(char.toString)) List(i) else Nil} val preferredStart = longestInterestingEdges.map(_1).toSet ++ presentSeparators val preferredEnd = longestInterestingEdges.map(_2).toSet ++ presentSeparators var preferredSeparatorStart: Set[Int] = presentSeparators.toSet var preferredSeparatorEnd: Set[Int] = preferredSeparatorStart.map(_+1) if(!dotsPositions.isEmpty) { val middleCandidates = (for(dpos <- dotsPositions; i <- (dpos-1) to 0 by -1; ss = s.substring(i, dpos); endpos <- Pattern.quote(ss).r.findAllMatchIn(s).map(_.end(0)) if(endpos != dpos) ) yield endpos) .groupBy(i => i).toList .sortBy{ case (key, value) => -value.size } .map(_._1) if(middleCandidates.nonEmpty) { preferredSeparatorEnd = middleCandidates.toSet preferredSeparatorStart = preferredSeparatorEnd ++ preferredSeparatorEnd.map(_ - 1) } } for((i, j) <- longestInterestingEdges.toIterable ++ remaining_edges if !timeout && !mTimeoutPhaseGenerateStr) { W += (i,j) -> (W.getOrElse((i, j), Set()) ++ (generateSubString(ฯƒ, s.substring(i, j), i))) } if(timeout && verbose) println("exited loop of generateStr because timed out") val previous = SDag(รฑ, ns, nt, ฮพ, W): STraceExpr val Wp = generateLoop(ฯƒ, s, W, rec_loop_level)( previous, preferredStart=preferredStart++preferredSeparatorStart, preferredSeparatorStart=preferredSeparatorStart, preferredEnd = preferredEnd++preferredSeparatorEnd, preferredSeparatorEnd=preferredSeparatorEnd) SDag(รฑ, ns, nt, ฮพ, Wp).setIndex(ฯƒ.position): STraceExpr } /** * Specializes a DAG by removing all positions except those between k1 and k2. Removes all corresponding edges */ def specializeDag(dag: STraceExpr, k1: Int, k2: Int, orElse: => STraceExpr): STraceExpr = dag match { case SDag(รฑ, ns: Int, nt, ฮพ, a) => val x = ฮพ.asInstanceOf[Set[(Int, Int)]] val nn = รฑ.asInstanceOf[Set[Int]] val aa = a.asInstanceOf[W[Int]] def ok(i: Int): Boolean = k1 <= i && i <= k2 def ok2(ij: (Int, Int)): Boolean = ok(ij._1) && ok(ij._2) SDag(nn.filter(ok), k1, k2, x.filter(ok2), aa.filterKeys(ok2)) case e => orElse } /** * Specializes a DAG by removing all edges except the one between k1 and k2 and other local positions */ def extractDag(dag: STraceExpr, k1: Int, k2: Int, notablePositions: Set[Int], orElse: => STraceExpr): STraceExpr = dag match { case SDag(รฑ, ns: Int, nt, ฮพ, a) => val x = ฮพ.asInstanceOf[Set[(Int, Int)]] val nn = รฑ.asInstanceOf[Set[Int]] val aa = a.asInstanceOf[W[Int]] def ok1(i: Int): Boolean = k1 == i || notablePositions(i) def ok2(i: Int): Boolean = i == k2 || notablePositions(i) def ok3(ij: (Int, Int)): Boolean = ok1(ij._1) && ok2(ij._2) && ij._1 >= k1 && ij._2 <= k2 SDag(Set(k1, k2)++notablePositions, k1, k2, x.filter(ok3), aa) case e => orElse } /** * In this section, we discuss how to infer the set of all Loop constructors that can be used to generate some unknown part of a given output string s from a given input state ฯƒ. In the process, we would also identify the unknown part of the output string that the Loop constructor can generate. Procedure GenerateLoop performs this task effectively, and involves the following steps: 1. Guess three positions within the output string k1, k2, and k3. 2. Unify the set of trace expressions that can generate s[k1 : k2] with the set of trace expressions that can generate s[k2 : k3] to obtain a new set of string expressions, say ~e that uses the loop iterator w. The unification algorithm is explained below. 3. Obtain the set of substrings obtained by running the string expressions ~e on input ฯƒ. If this set contains a singleton string that matches s[k1 : k3] for some k3, then we conclude that s[k1 : k3] can be generated by Loop(w : ~e). Otherwise ignore. The unification algorithm is same as the intersection algorithm except with the following replacement to Eq. 2 in Figure 4. IntersectPos(k1; k2) = (k2 - k1)w + k1 if k1 != k2 The key idea above is to guess a set of loop bodies by unifying the sets of trace expressions associated with the substrings s[k1 : k2] and s[k2 : k3], and then test the validity of the conjectured set of loops. For performance reasons, we do not recursively invoke GenerateLoop (in the call that it makes to GenerateStr). This allows us to discover all single loops. Nested loops may be discovered by controlling the recursion depth. */ var w_id = 0 def generateLoop(ฯƒ: Input_state, s: Output_state, W: W[Int], rec_loop_level: Int) (current: STraceExpr, preferredStart: Set[Int], preferredSeparatorStart: Set[Int], preferredEnd: Set[Int], preferredSeparatorEnd: Set[Int]): W[Int] = { if(rec_loop_level <= 0) return W if(verbose) println(s"Looking for loops for $ฯƒ => $s") //var WpLite = W // Create a copy? var Wp = W // Do not create a copy val LITE = 0 val FULL = 1 val preferredPositions = preferredStart ++ preferredEnd if(verbose) { println(s"preferredPositions = $preferredPositions") } val w = Identifier(if(w_id <= 25) ('a' + w_id.toChar).toChar.toString else "w" + (w_id-25)); w_id += 1 val positionToCheckEnd: (Int, Int) => Boolean = new ((Int, Int) => Boolean) { val l = ScalaRegExp.convertToken(LowerTok) val u = ScalaRegExp.convertToken(UpperTok) val n = ScalaRegExp.convertToken(NumTok) val w = ScalaRegExp.convertToken(SpaceTok) val acceptable: Set[Int] = s"[^a-zA-Z0-9 ]|$l|$u|$n|$w".r.findAllMatchIn(s).toSet.flatMap{(m: Regex.Match) =>Set(m.end(0))} def apply(i: Int, liteOrFull: Int) = if(onlyInterestingPositions) { acceptable(i) } else if(liteOrFull == LITE) acceptable(i) else true } val positionToCheckStart: (Int, Int) => Boolean = new ((Int, Int) => Boolean) { val l = ScalaRegExp.convertToken(LowerTok) val u = ScalaRegExp.convertToken(UpperTok) val n = ScalaRegExp.convertToken(NumTok) val w = ScalaRegExp.convertToken(SpaceTok) val acceptable: Set[Int] = s"[^a-zA-Z0-9 ]|$l|$u|$n|$w".r.findAllMatchIn(s).toSet.flatMap{(m: Regex.Match) =>Set(m.start(0))} def apply(i: Int, liteOrFull: Int) = if(onlyInterestingPositions) { acceptable(i) } else if(liteOrFull == LITE) acceptable(i) else true } def subDag(k1: Int, k2: Int, liteOrFull: Int): STraceExpr = { if(liteOrFull == LITE) { current match { case sd: SDag[_] => extractDag(current, k1, k2, preferredPositions, SEmpty) case _ => SEmpty } } else { if(rec_loop_level == 1) { specializeDag(current, k1, k2, generateStr(ฯƒ, s.substring(k1, k2), rec_loop_level - 1)) } else { generateStr(ฯƒ, s.substring(k1, k2), rec_loop_level - 1) } } } def preferredSeparatorStartFirst(i: Iterable[Int], liteOrFull: Int): Iterable[Int] = { i.filter(preferredSeparatorStart) ++ (if(liteOrFull == LITE) Nil else i.filterNot(preferredSeparatorStart)) } def preferredStartFirst(i: Iterable[Int], liteOrFull: Int): Iterable[Int] = { i.filter(preferredStart) ++ (if(liteOrFull == LITE) Nil else i.filterNot(preferredStart)) } def preferredSeparatorEndFirst(i: Iterable[Int], liteOrFull: Int): Iterable[Int] = { i.filter(preferredSeparatorEnd) ++ (if(liteOrFull == LITE) Nil else i.filterNot(preferredSeparatorEnd)) } def preferredEndFirst(i: Iterable[Int], liteOrFull: Int): Iterable[Int] = { i.filter(preferredEnd) ++ (if(liteOrFull == LITE) Nil else i.filterNot(preferredEnd)) } // Priority if dots found in string. def endingRange(liteOrFull: Int): Iterable[Int] = if(useDots) { s.indexOf("...") match { case -1 => Range(2, s.length+1) // Nothing can be done. case k3 => k3 :: preferredEndFirst((Range(2, s.length+1).toList.filterNot(_ == k3)), liteOrFull).toList } } else Range(2, s.length+1) // Two loops versions, one with lite loops (no more than 1 expression in the loop) // the other allows more expressions. if(verbose) println(s"Acceptable starts:"+preferredStart.toList.sortBy(i=>i)) if(verbose) println(s"Acceptable sep starts:"+preferredSeparatorStart.toList.sortBy(i=>i)) if(verbose) println(s"Acceptable ends:"+preferredEnd.toList.sortBy(i=>i)) if(verbose) println(s"Acceptable sep ends:"+preferredSeparatorEnd.toList.sortBy(i=>i)) for(liteOrFull <- (LITE to FULL).view; //dummy = (if(verbose) println(s"Looping ${if(liteOrFull == LITE) "LITE" else "FULL"}") else ()); k3_range = endingRange(liteOrFull).filter(positionToCheckEnd(_, liteOrFull)); //dummy2 = (if(verbose) println(s"k3 range: $k3_range") else ()); k3 <- k3_range.view; //dummy4 = (if(verbose) println(s"k3 =: $k3") else ()); ksep_range = preferredSeparatorEndFirst(k3-1 to 1 by -1, liteOrFull).filter(positionToCheckStart(_, liteOrFull)); //dummy3 = (if(verbose) println(s"ksep range: $ksep_range") else ()); ksep <- ksep_range.view; //dummy5 = (if(verbose) println(s"ksep $ksep") else ()); e2 = subDag(ksep, k3, liteOrFull); k2_range = preferredSeparatorStartFirst(ksep to (ksep - MAX_SEPARATOR_LENGTH) by -1, FULL); //dummy7 = (if(verbose) println(s"k2_range $k2_range") else ()); k2 <- k2_range.view; optionSeparator = if(ksep > k2) Some(ConstStr(s.substring(k2, ksep))) else None; //dummy6 = (if(verbose) println(s"k2 $k2") else ()); if(k2 == ksep || ProgramSet.isCommonSeparator(optionSeparator.get.s)); k1_range = preferredStartFirst(k2-1 to 0 by -1, liteOrFull).filter(positionToCheckStart(_, liteOrFull)); //dummy8 = (if(verbose) println(s"k1_range $k1_range") else ()); k1 <- k1_range.view; //dummy9 = (if(verbose) println(s"k1 $k1") else ()); e1 = subDag(k1, k2, liteOrFull)) { if(timeout) {if(verbose) println("exited loop of generateLoop because timed out"); return Wp } if(verbose) println(s"Going to unify '${s.substring(k1, k2)}' and '${s.substring(ksep, k3)}' separated by '${s.substring(k2, ksep)}'") val (e, time) = timedScope(if(liteOrFull == LITE) { unify(e1, e2, w, ฯƒ.position, ฯƒ.position, iterateInput) // If unify results only in constants } else { // If full, can take much more time per unification. val res = future{unify(e1, e2, w, ฯƒ.position, ฯƒ.position, iterateInput)} Await.result(first(res, ifTimeOut.future), 10.days) }) stats_unifications += 1 stats_time_unifications += time if(sizePrograms(e) != 0) { // Loops to find other data on the left val bestLoop = e.takeBest if(bestLoop.uses(w)) { var stop = false val prog = Loop(w, bestLoop, optionSeparator) var i = 0 while(!stop && (i == 0 || useDots && dotsAtPosition(s, k3))) { Loop.setStartIndex(prog, i) val resulting_strings = Evaluator.evalProg(prog)(ฯƒ) match { case StringValue(p) if p != "" => Some(p) case _ => stop = true; None } val body = if(i != 0) {replaceSTraceExpr(e)( { case l@ Linear(a, v, b) => if(w == v && a >= 0) { Linear(a, v, b + a*i) // Try with a previous step } else l } )} else e val newLoop = SLoop(w, body, optionSeparator) resulting_strings match { case Some(res) => val start: Int = if(i == 0) k1 else { // First matching occurence given that dots do not count. val firstOccurrence = s.indexOf(res) if(firstOccurrence != -1) { firstOccurrence } else if(useDots && s.indexOf(dots) != -1) { // Find a match until three dots (0 until s.length) find { case i => val dotsafterI = s.indexOf(dots, i) if(dotsafterI != -1) { res.startsWith(s.substring(i, dotsafterI)) } else { false } } match { case Some(i) => i case None => stop = true; s.length + 1 } } else { stop = true; s.length + 1// nothing to find. } } val k4 = start + res.length if(k4 <= s.length && s.substring(start, k4) == res) { // The match is exact && res.length > k3 - k1 || useDot val matchingDots = useDots && k4 < s.length && dotsAtPosition(s, k4) if(matchingDots || start < k1 || k4 > k3) { Wp = Wp + (((start, k4))->(Wp((start, k4)) ++ Set(SLoop(w, e, optionSeparator)))) if(matchingDots) { // If dots, then the match can be extended after the dotS. Wp = Wp + (((start, k4+dots.length))->(Wp((start, k4+dots.length)) ++ Set(SLoop(w, e, optionSeparator)))) if(verbose) println(s"Found dotted loop in ${s} (returns $res) [${Printer(newLoop.takeBest)}]") } else { if(verbose) println(s"Found loop in ${s} (returns $res) [${Printer(newLoop.takeBest)}] weight=${Weights.weight(newLoop.takeBest)}") } } // Checks if the match can be extended on the left (i.e. by changing the counters offset by -1) } else if(useDots) { // If we use dots '...' to match the remaining. val positionNotMatch: Option[Int] = (start until k4) find { k => k < s.length && s(k) != res(k-start) } positionNotMatch match { case Some(p) if s(p) == dots(0) => if(dotsAtPosition(s, p)) { Wp = Wp + (((start, p+dots.length))->(Wp((start, p+dots.length)) ++ Set(newLoop))) if(verbose) println(s"Found dotted loop in ${s} (returns $res) [${Printer(newLoop.takeBest)}]") } case _ => } } case _ => } i = i -1 } } } } Wp } /** * Generate all atomic expressions which can generate a string s from input states. */ implicit val cacheGenerateSubstring = MMap[(Input_state, String), Set[SAtomicExpr]]() def generateSubString(ฯƒ: Input_state, s: String, pos: Int = -1): Set[SAtomicExpr] = cached((ฯƒ, s), cacheGenerateSubstring){ var result = Set.empty[SAtomicExpr] if(!extractSpaces && s == " ") return result //if(verbose) println(s"Going to extract $s from $ฯƒ") for(vi <- 0 until ฯƒ.inputs.length if !timeoutGenerateStr) { val ฯƒvi = ฯƒ.inputs(vi) for((k, m) <- s substringWithCaseOf ฯƒvi if !timeoutGenerateStr) { val Y1 = generatePosition(ฯƒvi, k) val Y2 = generatePosition(ฯƒvi, k + s.length) /*if(debugActive) { for(y1 <- Y1; y <- y1) { assert(evalProg(y)(Input_state(IndexedSeq(ฯƒvi), 1)) == IntValue(k)) } for(y2 <- Y2; y <- y2) { assert(evalProg(y)(Input_state(IndexedSeq(ฯƒvi), 1)) == IntValue(k + s.length)) } }*/ val newResult = SSubStr(InputString(vi), Y1, Y2, m) newResult.setPos(ฯƒvi, s, k, k + s.length) newResult.weightMalus = if(k == pos) {-1} else {0} result = result + newResult } for(extension <- extensions; (start, end, programMaker) <- extension(ฯƒvi, s)) { val Y1 = generatePosition(ฯƒvi, start) val Y2 = generatePosition(ฯƒvi, end) val ss = SSubStr(InputString(vi), Y1, Y2, SSubStrFlag(List(NORMAL))) ss.setPos(ฯƒvi, s, start, end) val program = programMaker(ss) result += program } // TODO : Put this into an extension. if(s.isNumber && numbering) { for((start, end, offset) <- s subnumberIncNegativeOf ฯƒvi) { // Numbers that can be obtained from ฯƒvi by changing by steps for example. val Y1 = generatePosition(ฯƒvi, start) val Y2 = generatePosition(ฯƒvi, end+1) val possibleLengths = (if(s(0) != '0') {// It means that the generated length might be lower. SIntSemiLinearSet(1, s.length-1, s.length) } else SIntSemiLinearSet(s.length, 1, s.length)) if(!possibleLengths.isEmpty) result = result + SNumber(SSubStr(InputString(vi), Y1, Y2, SSubStrFlag(List(NORMAL))), possibleLengths, offset) } } } // Generates numbers from previous numbers in output strings. if(s.isNumber && numbering) { result += SCounter.fromExample(s, ฯƒ.position) } result } /** * Compute the set of all tokenseq between two given positions. */ private var computedForString = "" private var computedForList = List[Token]() private var cacheComputeTokenSeq = MMap[(Start, End), Set[(TokenSeq, (List[Start], List[End]))]]() def computetokenSeq(s: String, listTokens: List[Token]): MMap[(Start, End), Set[(TokenSeq, (List[Start], List[End]))]] = if(s == computedForString && (listTokens eq computedForList)) cacheComputeTokenSeq else { if(verbose) println(s"Compute token seq for " + s.substring(0, Math.min(s.length, 10)) + "...") val finalstart = 0 val finalend = s.length-1 var res = MMap[(Start, End), Set[(TokenSeq, (List[Start], List[End]))]]() def addMapping(i: Start, j: End, s: TokenSeq, index: (List[Start], List[End])) = if(s.t.isEmpty || s.t.exists(_ != NonDotTok)) res += (i, j) -> (res.getOrElse((i, j), Set()) + ((s, index))) //def removeMapping(t: TokenSeq) = res //= res.mapValues(s => s.filterNot(_._1 == t)) val tokenPositions: Map[Token, (List[Start], List[End])] = (listTokens map { token => token -> ScalaRegExp.computePositionsOfToken(token, s) }) toMap // Maps a position to a set of tokens with indexes val startTokens: Map[Start,List[(Token, (List[Start], List[End]))]] = tokenPositions.toList .flatMap{ case (tok, indexes@(liststart, listend)) => liststart map (i => (tok, i, listend))} .groupBy{case (tok, start, end) => start} .mapValues(list => list map { case (tok, start, index) => (tok, tokenPositions(tok)) }) // Maps a position and a token to its end position val endTokensFromStart: Map[Start, Map[Token, End]] = tokenPositions.toList .flatMap{ case (tok, indexes@(liststart, listend)) => liststart map(i => (tok, i, listend))} .groupBy{case (tok, start, end) => start} .mapValues(list => list map { case (tok, start, end) => //val ss = s.substring(start) val closestEnd = ScalaRegExp.computeFirstPositionEndingWith(tok, s, start) (tok, start + closestEnd.get)} toMap) // enumerate tokens sequence of length 0 //val epsilonRange = (finalstart to finalend).toList zip (-1 until finalend).toList for(i <- finalstart to (finalend+1)) { addMapping(i, i-1, TokenSeq(), ((finalstart to (finalend+1)).toList, (-1 to finalend).toList)) } // enumerate tokens sequence of length 1 //addMapping(finalstart, finalstart-1, TokenSeq(StartTok)) // Simple token starting at 0 //addMapping(finalend+1, finalend, TokenSeq(EndTok)) // Simple token starting at 0 for(i <- finalstart to finalend) { for((tok, index) <- startTokens.getOrElse(i, Nil) if tok != StartTok && tok != EndTok ) { // But can be empty val end = endTokensFromStart(i)(tok) if(end >= i) addMapping(i, end, TokenSeq(tok), index) // Simple token starting at 0 } } // enumerate tokens sequences of two tokens. val currentMapping = res for(((start, end), tokseqset) <- currentMapping; (tokseq, index) <- tokseqset if tokseq.t.size == 1 && end != finalend) { val contiguousTokens = startTokens(end + 1) val lastToken = tokseq.t.lastOption.getOrElse(null) for((tok2, index2) <- contiguousTokens if tok2 != lastToken) { val endnew = endTokensFromStart(end + 1)(tok2) val newTokenseq = TokenSeq(tokseq.t ++ List(tok2)) addMapping(start, endnew, newTokenseq, ScalaRegExp.computePositionsOfRegExp(newTokenseq, s)) } } // enumerate tokens sequences of three tokens. /*val currentMapping2 = res for(((start, end), tokseqset) <- currentMapping2; (tokseq, index) <- tokseqset if tokseq.t.size == 2 && end != finalend) { val contiguousTokens = startTokens(end + 1) val lastToken = tokseq.t.lastOption.getOrElse(null) for((tok2, index2) <- contiguousTokens if tok2 != lastToken) { // To prevent NonDotTok to appear twice. val endnew = endTokensFromStart(end + 1)(tok2) val newTokenseq = TokenSeq(tokseq.t ++ List(tok2)) addMapping(start, endnew, newTokenseq, ScalaRegExp.computePositionsOfRegExp(newTokenseq, s)) } }*/ // Add startoken and endtoken if it is related. val currentMapping3 = res for(((start, end), tokseqset) <- currentMapping3; (tokseq, (liststart, listend)) <- tokseqset) { if(start == finalstart) { addMapping(start, end, TokenSeq(StartTok::tokseq.t), (List(liststart.head), List(listend.head))) } if(start == finalend) { addMapping(start, end, TokenSeq(tokseq.t ++ List(EndTok)), (List(liststart.last), List(listend.last))) } } cacheComputeTokenSeq = res computedForString = s computedForList = listTokens if(verbose) println(s"Computation finished") res } /** * Returns a list of (Start, End) for tokens matching at the position, with the common index) */ def matchingTokenSeq(s: String, atPos: Int, listTokens: List[Token]=Program.listNonEmptyTokens) : Iterable[(Start, End, TokenSeq, TokenSeq, List[Index])] = { val ms = computetokenSeq(s, listTokens) for(i <- atPos to 0 by -1; j <- (atPos-1) until s.length; //if(i != atPos || j != atPos -1); // No doubly empty regexps. (tok1, (liststart1, listend1)) <- ms.getOrElse((i, atPos-1), Set()); (tok2, (liststart2, listend2)) <- ms.getOrElse((atPos, j), Set())) yield { val res1 = RegexpPositionsInString.computePositionsEndingWith(tok1, s).map(_ + 1) val res2 = RegexpPositionsInString.computePositionsStartingWith(tok2, s) val intersections = res1 intersect res2 //val intersections = (listend1 map {case end => end + 1}) intersect (liststart2 map { case start => start }) (i, j, tok1, tok2, intersections) } } /** * Returns an integer c such that position k is the cth for regexp in string */ def th_match_of(k: Int, _for: RegExp, in: String): Int = { ScalaRegExp.computePositionsStartingWith(_for, in).indexOf(k) + 1 } /** * Returns the total number of matches of this regex in the string in. */ def total_number_of_matches(_for: RegExp, in: String): Int = { ScalaRegExp.computePositionsEndingWith(_for, in).length } var cache_hit = 0 var cache_call = 0 var advanced_stats = false var advanced_cache = Map[Any, Int]() /** * Initialize the statistics */ def initStats() = { cache_hit = 0; cache_call = 0 } initStats() /** * Generate a cache */ def cached[T, A](s: T, cache: MMap[T, A])(f: => A) = { cache_call += 1 if(cache contains s) { //if(verbose) println("Cache hit") cache_hit += 1 if(advanced_stats) advanced_cache = advanced_cache + (s -> (advanced_cache.getOrElse(s, 0) + 1)) } cache.getOrElseUpdate(s, f) } /** * Generates a set of algebraic positions for a given position and a string. */ implicit val cache = MMap[(String, Int), Set[SPosition]]() def generatePosition(ฯƒ: String, k: Int) = cached((ฯƒ, k), cache){ if(verbose) println(s"Generating position $k in $ฯƒ") var result = Set[SPosition](SCPos(k), SCPos(-ฯƒ.length+k-1)) implicit val (tokenSet, mapping) = Reps(ฯƒ) for((_, _, r1@TokenSeq(t_list1), r2@TokenSeq(t_list2), intersections) <- matchingTokenSeq(ฯƒ, atPos=k, listTokens=tokenSet)) { val c = intersections.indexOf(k) //println(Printer(r1) + " before, " + Printer(r2) + " after") if( c >= 0) { // This if false for strange tokenizations. //val c = th_match_of(k1, _for=r12, in=s) val cp = intersections.length //val cp = total_number_of_matches(_for=r12, in=s) assert(cp >= 1) val r1p = generateRegex(r1, ฯƒ) // Expands all tokens. val r2p = generateRegex(r2, ฯƒ) val res = SPos(r1p, r2p, Set(c + 1, -(cp-c))) result += res } } result } def generateRegex(r: Regular_Expression, s: String)(implicit map: Map[Token, List[Token]]): SRegExp = { r match { case TokenSeq(l) => STokenSeq(l map ((t: Token) => IParts(s, t))) } } /** * Creates the equivalence class of a token. */ //implicit val cacheIParts = MMap[String, Map[Token,List[Token]]]() def IPart_s(s: String) = { val listTokens = Program.listNonEmptyTokens val res: Map[Token,List[Token]] = listTokens.map (tok => (tok, ScalaRegExp.computePositionsOfToken(tok, s))) .groupBy( t => t._2) .mapValues(t => t map (_._1)) .values .map(t => (t.head, t)) .toMap res } def IParts(ss: String, t: Token)(implicit map: Map[Token,List[Token]] = IPart_s(ss)): SToken = if(t == StartTok || t == EndTok) SToken(Set(t))(Program.listTokens) else SToken(map(t).toSet)(Program.listTokens) private var cacheReps = MMap[String, (List[Token], Map[Token,List[Token]])]() /** Returns a subset of equivalent tokens */ def Reps(s: String): (List[Token], Map[Token,List[Token]]) = cached(s, cacheReps){ val res = IPart_s(s) (res.values.toList .map(t => t.head), res) } private def insert[Repr, That](e: Repr, p: List[Repr], n: Int, betterThan: (Repr, Repr) => Boolean): List[Repr] = n match { case 0 => List(e) case i => if(p == Nil) List(e) else if(betterThan(e, p.head)) e::p.take(i-1) else { p.head::insert(e, p.tail, n-1, betterThan) } } private def insert5(e: (String, Int), p: List[(String, Int)]) = { insert(e, p, 5, { (si: (String, Int), tj: (String, Int)) => si._2 > tj._2}) } var stats_unifications = 0 var stats_time_unifications = 0L def statistics(): String = { val average = if(stats_unifications == 0) 0 else stats_time_unifications.toFloat/stats_unifications //("Number of elements in the cache 1:" + cacheIParts.size) + "\n"+ ("Number of elements in cacheGenerateStr:" + cacheGenerateStr.size) + "\n"+ ("Number of elements in cacheGenerateSubstring:" + cacheGenerateSubstring.size) + "\n"+ ("Number of elements in the cache:" + cache.size) + "\n"+ ("Number of times unified DAGS:" + stats_unifications) + s" in average of $average ns\n"+ ("Number of cache reuse:" + cache_hit + "/" + cache_call) + "\n"+ (if(advanced_stats) { val most_elements = advanced_cache.foldLeft(Nil: List[(String, Int)]) { case (p, (e, i) )=> insert5((e.toString, i), p)} most_elements mkString "\n" } else "") } /** * Empty the caches. */ def emptyCaches() = { //cacheIParts.clear() cacheGenerateStr.clear() cacheGenerateSubstring.clear() cache.clear() } def min(a: Int, b: Int, c: Int) = { if(a < b) { if(a < c) { a } else { c } } else { // b <= a if(b < c) { b } else { c } } } def LevenshteinDistance(s: String, t: String): Int = { val m = s.length val n = t.length // for all i and j, d[i,j] will hold the Levenshtein distance between // the first i characters of s and the first j characters of t; // note that d has (m+1)*(n+1) values val d = Array.ofDim[Int](m+1, n+1) //clear all elements in d // set each element to zero // source prefixes can be transformed into empty string by // dropping all characters (1 to m) foreach { i => d(i)(0) = i } // target prefixes can be reached from empty source prefix // by inserting every characters (1 to n) foreach { j => d(0)(j) = j } (1 to n) foreach { j => (1 to m) foreach { i => if(s(i-1) == t(j-1)) d(i)(j) = d(i-1)(j-1) // no operation required else d(i)(j) = min( d(i-1)(j) + 1, // a deletion d(i)(j-1) + 1, // an insertion d(i-1)(j-1) + 1 // a substitution ) } } return d(m)(n) } }
MikaelMayer/StringSolver
src/main/scala/ch/epfl/lara/synthesis/stringsolver/StringSolver.scala
Scala
gpl-2.0
75,644
import sbt._ import java.io.File import org.apache.commons.io.FileUtils import sbt.Keys._ object RestSeverDemoBuild extends Build { lazy val accTest = config("at") extend Test lazy val unitTest = config("ut") extend Test lazy val systemTest = config("st") extend Test lazy val VMaxRealtimeMonitorProject = Project( id = "rest-sever-demo", base = file("."), settings = Defaults.defaultSettings ++ Seq( sourcesInBase in Test := false, unmanagedSourceDirectories in Test += baseDirectory.value / "src" / "ut" / "java", unmanagedSourceDirectories in Test += baseDirectory.value / "src" / "ut" / "scala", unmanagedResourceDirectories in Test += baseDirectory.value / "src" / "ut" / "resources", unmanagedSourceDirectories in Test += baseDirectory.value / "src" / "at" / "java", unmanagedSourceDirectories in Test += baseDirectory.value / "src" / "at" / "scala", unmanagedResourceDirectories in Test += baseDirectory.value / "src" / "at" / "resources", unmanagedSourceDirectories in Test += baseDirectory.value / "src" / "st" / "java", unmanagedSourceDirectories in Test += baseDirectory.value / "src" / "st" / "scala", unmanagedResourceDirectories in Test += baseDirectory.value / "src" / "st" / "resources" ) ).configs( accTest ) .settings( inConfig(accTest)(Defaults.testSettings) : _*) .settings(testOptions in accTest := Seq(Tests.Argument("-l","SlowAT"))) .configs( unitTest ) .settings( inConfig(unitTest)(Defaults.testSettings) : _*) .configs( systemTest ) .settings( inConfig(systemTest)(Defaults.testSettings) : _*) def removeAllSvn(base: File): Unit = { for (x <- base.listFiles) { if (x.isDirectory) { if (x.getName == ".svn") FileUtils.deleteDirectory(x) else removeAllSvn(x) } } } }
LeagueForHacker/Rest-Sever-Demo
project/Build.scala
Scala
mit
1,842
/* * bytefrog: a tracing framework for the JVM. For more information * see http://code-pulse.com/bytefrog * * Copyright (C) 2014 Applied Visions - http://securedecisions.avi.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.secdec.bytefrog.fileapi.tracefile.entry import java.io.IOException import java.io.InputStream import java.io.PrintStream import scala.collection.Map import scala.io.Source import com.secdec.bytefrog.fileapi.data.MethodIdMapJson import com.secdec.bytefrog.fileapi.io.IOUtils.closeableWithCloseAfter import com.secdec.bytefrog.fileapi.io.zip.ZipFileBuilderToken import com.secdec.bytefrog.fileapi.tracefile.TraceFileBuilder object MethodIdEntry extends TraceFileEntry[Map[Int, String]] { val path = "method-ids.json" def reader: TraceFileEntryReader[Map[Int, String]] = new Reader def writer(builder: TraceFileBuilder): TraceFileEntryWriter[Map[Int, String]] = new Writer(builder.vend(path)) private class Reader extends TraceFileEntryReader[Map[Int, String]] { def read(content: InputStream)(callback: Map[Int, String] => Unit): Unit = { try { val s = Source.fromInputStream(content).mkString val parsed = MethodIdMapJson.deserialize(s) callback(parsed) } catch { case e: IOException => //noop } finally { content.close } } } private class Writer(token: ZipFileBuilderToken) extends TraceFileEntryWriter[Map[Int, String]] { private val methodIds = collection.mutable.Map[Int, String]() def write(ids: Map[Int, String]): Unit = { methodIds ++= ids } def finish(): Unit = { val s = MethodIdMapJson.serialize(methodIds) token.resource.openOutput closeAfter { out => val ps = new PrintStream(out) ps.print(s) } token.completionCallback() } } }
secdec/bytefrog-clients
file-api/src/main/scala/com/secdec/bytefrog/fileapi/tracefile/entry/MethodIdEntry.scala
Scala
apache-2.0
2,286
package spinoco.fs2.cassandra.internal import spinoco.fs2.cassandra.CType /** * Created by pach on 11/06/16. */ trait ListColumnInstance[C[_],V] object ListColumnInstance { implicit def seqInstance[V](implicit ev:CType[V]):ListColumnInstance[Seq,V] = new ListColumnInstance[Seq,V] { } implicit def listInstance[V](implicit ev:CType[V]):ListColumnInstance[List,V] = new ListColumnInstance[List,V] { } implicit def vectorInstance[V](implicit ev:CType[V]):ListColumnInstance[Vector,V] = new ListColumnInstance[Vector,V] { } }
Spinoco/fs2-cassandra
core/src/main/scala/spinoco/fs2/cassandra/internal/ListColumnInstance.scala
Scala
mit
554
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.spark /** */ object IgniteDataFrameSettings { /** * Name of DataSource format for loading data from Apache Ignite. */ val FORMAT_IGNITE = "ignite" /** * Config option to specify path to ignite config file. * Config from this file will be used to connect to existing Ignite cluster. * * @note All nodes for executing Spark task forcibly will be started in client mode. * * @example {{{ * val igniteDF = spark.read.format(IGNITE) * .option(OPTION_CONFIG_FILE, CONFIG_FILE) * // other options ... * .load() * }}} */ val OPTION_CONFIG_FILE = "config" /** * Config option to specify Ignite SQL table name to load data from. * * @example {{{ * val igniteDF = spark.read.format(IGNITE) * // other options ... * .option(OPTION_TABLE, "mytable") * .load() * }}} * * @see [[org.apache.ignite.cache.QueryEntity#tableName]] */ val OPTION_TABLE = "table" /** * Config option to specify newly created Ignite SQL table parameters. * Value of these option will be used in `CREATE TABLE ... WITH "option value goes here"` * * @example {{{ * val igniteDF = spark.write.format(IGNITE) * // other options ... * .option( OPTION_CREATE_TABLE_PARAMETERS, "backups=1, template=replicated") * .save() * }}} * * @see [[https://apacheignite-sql.readme.io/docs/create-table]] */ val OPTION_CREATE_TABLE_PARAMETERS = "createTableParameters" /** * Config option to specify comma separated list of primary key fields for a newly created Ignite SQL table. * * @example {{{ * val igniteDF = spark.write.format(IGNITE) * // other options ... * .option(OPTION_CREATE_TABLE_PRIMARY_KEY_FIELDS, "id") * .save() * }}} * * @see [[https://apacheignite-sql.readme.io/docs/create-table]] */ val OPTION_CREATE_TABLE_PRIMARY_KEY_FIELDS = "primaryKeyFields" /** * Config option for saving data frame. * Internally all SQL inserts are done through `IgniteDataStreamer`. * This options sets `allowOverwrite` property of streamer. * If `true` then row with same primary key value will be written to the table. * If `false` then row with same primary key value will be skipped. Existing row will be left in the table. * Default value if `false`. * * @example {{{ * val igniteDF = spark.write.format(IGNITE) * // other options ... * .option(OPTION_STREAMER_ALLOW_OVERWRITE, true) * .save() * }}} * * @see [[org.apache.ignite.IgniteDataStreamer]] * @see [[org.apache.ignite.IgniteDataStreamer#allowOverwrite(boolean)]] */ val OPTION_STREAMER_ALLOW_OVERWRITE = "streamerAllowOverwrite" /** * Config option for saving data frame. * Internally all SQL inserts are done through `IgniteDataStreamer`. * This options sets `autoFlushFrequency` property of streamer. * * @example {{{ * val igniteDF = spark.write.format(IGNITE) * // other options ... * .option(OPTION_STREAMING_FLUSH_FREQUENCY, 10000) * .save() * }}} * * @see [[org.apache.ignite.IgniteDataStreamer]] * @see [[org.apache.ignite.IgniteDataStreamer#autoFlushFrequency(long)]] */ val OPTION_STREAMER_FLUSH_FREQUENCY = "streamerFlushFrequency" /** * Config option for saving data frame. * Internally all SQL inserts are done through `IgniteDataStreamer`. * This options sets perNodeBufferSize` property of streamer. * * @example {{{ * val igniteDF = spark.write.format(IGNITE) * // other options ... * .option(OPTION_STREAMING_PER_NODE_BUFFER_SIZE, 1024) * .save() * }}} * * @see [[org.apache.ignite.IgniteDataStreamer]] * @see [[org.apache.ignite.IgniteDataStreamer#perNodeBufferSize(int)]] */ val OPTION_STREAMER_PER_NODE_BUFFER_SIZE = "streamerPerNodeBufferSize" /** * Config option for saving data frame. * Internally all SQL inserts are done through `IgniteDataStreamer`. * This options sets `perNodeParallelOperations` property of streamer. * * @example {{{ * val igniteDF = spark.write.format(IGNITE) * // other options ... * .option(OPTION_STREAMING_PER_NODE_PARALLEL_OPERATIONS, 42) * .save() * }}} * * @see [[org.apache.ignite.IgniteDataStreamer]] * @see [[org.apache.ignite.IgniteDataStreamer#perNodeParallelOperations(int)]] */ val OPTION_STREAMER_PER_NODE_PARALLEL_OPERATIONS = "streamerPerNodeParallelOperations" }
vladisav/ignite
modules/spark/src/main/scala/org/apache/ignite/spark/IgniteDataFrameSettings.scala
Scala
apache-2.0
5,685
package scala.meta.internal import java.io.ByteArrayOutputStream import java.io.InputStream import java.nio.charset.StandardCharsets import java.nio.file.Files import java.util import scala.meta.internal.classpath._ import scala.meta.io.AbsolutePath import scala.tools.asm._ import scala.tools.asm.ClassReader._ import scala.tools.asm.tree._ import scala.collection.JavaConverters._ import scala.meta.internal.scalacp.ScalaSigAttribute import scala.meta.internal.scalacp.ScalaSigNode import scala.reflect.internal.pickling.ByteCodecs import scala.tools.scalap.Main import scala.tools.scalap.scalax.rules.scalasig.ByteCode import scala.tools.scalap.scalax.rules.scalasig.ScalaSig import scala.tools.scalap.scalax.rules.scalasig.ScalaSigAttributeParsers package object metacp { implicit class XtensionClassNode(node: ClassNode) { def scalaSig: Option[ScalaSigNode] = { if (node.attrs == null) None else { for { scalaSigAttribute <- node.attrs.asScala.collectFirst { case ScalaSigAttribute(scalaSig) => scalaSig } scalaSig <- { if (scalaSigAttribute.table.nonEmpty) Some(scalaSigAttribute) else fromScalaSigAnnotation } } yield ScalaSigNode(node.name + ".class", scalaSig) } } private def fromScalaSigAnnotation: Option[ScalaSig] = { if (node.visibleAnnotations == null) None else { node.visibleAnnotations.asScala.collectFirst { case annot if annot.desc == Main.SCALA_SIG_ANNOTATION || annot.desc == Main.SCALA_LONG_SIG_ANNOTATION => annot.values.asScala match { case Seq("bytes", anyBytes) => val baos = new ByteArrayOutputStream() val bytes: Array[Byte] = anyBytes match { case bytesString: String => bytesString.getBytes(StandardCharsets.UTF_8) case bytesArray: util.ArrayList[_] => bytesArray.asScala.foreach { case bytesString: String => baos.write(bytesString.getBytes(StandardCharsets.UTF_8)) } baos.toByteArray case els => throw new IllegalArgumentException(els.getClass.getName) } val length = ByteCodecs.decode(bytes) val bytecode = ByteCode(bytes.take(length)) ScalaSigAttributeParsers.parse(bytecode) } } } } } implicit class XtensionAsmPathOps(path: AbsolutePath) { def toClassNode: ClassNode = { readInputStreamToClassNode(Files.newInputStream(path.toNIO)) } } implicit class XtensionAsmClassfileOps(classfile: Classfile) { def toClassNode: ClassNode = { readInputStreamToClassNode(classfile.openInputStream()) } def hasScalaSig: Boolean = { val classNode = readInputStreamToClassNode(classfile.openInputStream()) classNode.attrs != null && classNode.attrs.asScala.exists(_.`type` match { case "Scala" | "ScalaSig" => true case _ => false }) } } private def readInputStreamToClassNode(in: InputStream): ClassNode = { val node = new ClassNode() try { new ClassReader(in).accept( node, Array(ScalaSigAttribute), SKIP_CODE | SKIP_DEBUG | SKIP_FRAMES ) node } finally { in.close() } } }
olafurpg/scalameta
semanticdb/metacp/src/main/scala/scala/meta/internal/metacp/package.scala
Scala
bsd-3-clause
3,464
package wakfutcp.protocol sealed trait Message { def id: Short } trait ClientMessage extends Message { def arch: Byte } trait ServerMessage extends Message object ClientMessage { val HeaderSize: Int = 5 } object ServerMessage { val HeaderSize: Int = 4 }
OpenWakfu/wakfutcp
protocol/src/main/scala/wakfutcp/protocol/Message.scala
Scala
mit
267
/** * Intel Intrinsics for Lightweight Modular Staging Framework * https://github.com/ivtoskov/lms-intrinsics * Department of Computer Science, ETH Zurich, Switzerland * __ _ __ _ _ * / /____ ___ _____ (_)____ / /_ _____ (_)____ _____ (_)_____ _____ * / // __ `__ \\ / ___/______ / // __ \\ / __// ___// // __ \\ / ___// // ___// ___/ * / // / / / / /(__ )/_____// // / / // /_ / / / // / / /(__ )/ // /__ (__ ) * /_//_/ /_/ /_//____/ /_//_/ /_/ \\__//_/ /_//_/ /_//____//_/ \\___//____/ * * Copyright (C) 2017 Ivaylo Toskov (itoskov@ethz.ch) * Alen Stojanov (astojanov@inf.ethz.ch) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ch.ethz.acl.intrinsics object Utils { /** * Wraps a single line of text with given length * * @param str The string to be wrapped * @param wrapLength The length of a line of the returned string * @param newLineStr Newline character * @param wrapLongWords Indicates whether to wrap words longer than the length * @return The wrapped string */ def wrap(str: String, wrapLength: Int, newLineStr: String = scala.util.Properties.lineSeparator, wrapLongWords: Boolean = false): String = { if (str == null) { return null } val inputLineLength = str.length() var offset = 0 val wrappedLine = new StringBuilder(inputLineLength + 32) while (offset < inputLineLength) { if (str.charAt(offset) == ' ') { offset += 1 } else { if(inputLineLength - offset <= wrapLength) { wrappedLine.append(str.substring(offset)) return wrappedLine.toString() } var spaceToWrapAt = str.lastIndexOf(' ', wrapLength + offset) if (spaceToWrapAt >= offset) { wrappedLine.append(str.substring(offset, spaceToWrapAt)) wrappedLine.append(newLineStr) offset = spaceToWrapAt + 1 } else { if (wrapLongWords) { wrappedLine.append(str.substring(offset, wrapLength + offset)) wrappedLine.append(newLineStr) offset += wrapLength } else { spaceToWrapAt = str.indexOf(' ', wrapLength + offset) if (spaceToWrapAt >= 0) { wrappedLine.append(str.substring(offset, spaceToWrapAt)) wrappedLine.append(newLineStr) offset = spaceToWrapAt + 1 } else { wrappedLine.append(str.substring(offset)) offset = inputLineLength } } } } } wrappedLine.append(str.substring(offset)) wrappedLine.toString() } }
ivtoskov/lms-intrinsics
src/main/scala/ch/ethz/acl/intrinsics/Utils.scala
Scala
apache-2.0
3,248
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import java.text.SimpleDateFormat import java.util.{Date, Locale, Properties, UUID} import scala.collection.JavaConverters._ import org.apache.spark.annotation.InterfaceStability import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, UnresolvedRelation} import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.plans.logical.{AnalysisBarrier, InsertIntoTable, LogicalPlan} import org.apache.spark.sql.execution.SQLExecution import org.apache.spark.sql.execution.command.DDLUtils import org.apache.spark.sql.execution.datasources.{CreateTable, DataSource, LogicalRelation} import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Utils import org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2 import org.apache.spark.sql.sources.BaseRelation import org.apache.spark.sql.sources.v2._ import org.apache.spark.sql.types.StructType /** * Interface used to write a [[Dataset]] to external storage systems (e.g. file systems, * key-value stores, etc). Use `Dataset.write` to access this. * * @since 1.4.0 */ @InterfaceStability.Stable final class DataFrameWriter[T] private[sql](ds: Dataset[T]) { private val df = ds.toDF() /** * Specifies the behavior when data or table already exists. Options include: * - `SaveMode.Overwrite`: overwrite the existing data. * - `SaveMode.Append`: append the data. * - `SaveMode.Ignore`: ignore the operation (i.e. no-op). * - `SaveMode.ErrorIfExists`: default option, throw an exception at runtime. * * @since 1.4.0 */ def mode(saveMode: SaveMode): DataFrameWriter[T] = { this.mode = saveMode this } /** * Specifies the behavior when data or table already exists. Options include: * - `overwrite`: overwrite the existing data. * - `append`: append the data. * - `ignore`: ignore the operation (i.e. no-op). * - `error` or `errorifexists`: default option, throw an exception at runtime. * * @since 1.4.0 */ def mode(saveMode: String): DataFrameWriter[T] = { this.mode = saveMode.toLowerCase(Locale.ROOT) match { case "overwrite" => SaveMode.Overwrite case "append" => SaveMode.Append case "ignore" => SaveMode.Ignore case "error" | "errorifexists" | "default" => SaveMode.ErrorIfExists case _ => throw new IllegalArgumentException(s"Unknown save mode: $saveMode. " + "Accepted save modes are 'overwrite', 'append', 'ignore', 'error', 'errorifexists'.") } this } /** * Specifies the underlying output data source. Built-in options include "parquet", "json", etc. * * @since 1.4.0 */ def format(source: String): DataFrameWriter[T] = { this.source = source this } /** * Adds an output option for the underlying data source. * * You can set the following option(s): * <ul> * <li>`timeZone` (default session local timezone): sets the string that indicates a timezone * to be used to format timestamps in the JSON/CSV datasources or partition values.</li> * </ul> * * @since 1.4.0 */ def option(key: String, value: String): DataFrameWriter[T] = { this.extraOptions += (key -> value) this } /** * Adds an output option for the underlying data source. * * @since 2.0.0 */ def option(key: String, value: Boolean): DataFrameWriter[T] = option(key, value.toString) /** * Adds an output option for the underlying data source. * * @since 2.0.0 */ def option(key: String, value: Long): DataFrameWriter[T] = option(key, value.toString) /** * Adds an output option for the underlying data source. * * @since 2.0.0 */ def option(key: String, value: Double): DataFrameWriter[T] = option(key, value.toString) /** * (Scala-specific) Adds output options for the underlying data source. * * You can set the following option(s): * <ul> * <li>`timeZone` (default session local timezone): sets the string that indicates a timezone * to be used to format timestamps in the JSON/CSV datasources or partition values.</li> * </ul> * * @since 1.4.0 */ def options(options: scala.collection.Map[String, String]): DataFrameWriter[T] = { this.extraOptions ++= options this } /** * Adds output options for the underlying data source. * * You can set the following option(s): * <ul> * <li>`timeZone` (default session local timezone): sets the string that indicates a timezone * to be used to format timestamps in the JSON/CSV datasources or partition values.</li> * </ul> * * @since 1.4.0 */ def options(options: java.util.Map[String, String]): DataFrameWriter[T] = { this.options(options.asScala) this } /** * Partitions the output by the given columns on the file system. If specified, the output is * laid out on the file system similar to Hive's partitioning scheme. As an example, when we * partition a dataset by year and then month, the directory layout would look like: * * - year=2016/month=01/ * - year=2016/month=02/ * * Partitioning is one of the most widely used techniques to optimize physical data layout. * It provides a coarse-grained index for skipping unnecessary data reads when queries have * predicates on the partitioned columns. In order for partitioning to work well, the number * of distinct values in each column should typically be less than tens of thousands. * * This is applicable for all file-based data sources (e.g. Parquet, JSON) starting with Spark * 2.1.0. * * @since 1.4.0 */ @scala.annotation.varargs def partitionBy(colNames: String*): DataFrameWriter[T] = { this.partitioningColumns = Option(colNames) this } /** * Buckets the output by the given columns. If specified, the output is laid out on the file * system similar to Hive's bucketing scheme. * * This is applicable for all file-based data sources (e.g. Parquet, JSON) starting with Spark * 2.1.0. * * @since 2.0 */ @scala.annotation.varargs def bucketBy(numBuckets: Int, colName: String, colNames: String*): DataFrameWriter[T] = { this.numBuckets = Option(numBuckets) this.bucketColumnNames = Option(colName +: colNames) this } /** * Sorts the output in each bucket by the given columns. * * This is applicable for all file-based data sources (e.g. Parquet, JSON) starting with Spark * 2.1.0. * * @since 2.0 */ @scala.annotation.varargs def sortBy(colName: String, colNames: String*): DataFrameWriter[T] = { this.sortColumnNames = Option(colName +: colNames) this } /** * Saves the content of the `DataFrame` at the specified path. * * @since 1.4.0 */ def save(path: String): Unit = { this.extraOptions += ("path" -> path) save() } /** * Saves the content of the `DataFrame` as the specified table. * * @since 1.4.0 */ def save(): Unit = { if (source.toLowerCase(Locale.ROOT) == DDLUtils.HIVE_PROVIDER) { throw new AnalysisException("Hive data source can only be used with tables, you can not " + "write files of Hive data source directly.") } assertNotBucketed("save") val cls = DataSource.lookupDataSource(source, df.sparkSession.sessionState.conf) if (classOf[DataSourceV2].isAssignableFrom(cls)) { val ds = cls.newInstance() ds match { case ws: WriteSupport => val options = new DataSourceOptions((extraOptions ++ DataSourceV2Utils.extractSessionConfigs( ds = ds.asInstanceOf[DataSourceV2], conf = df.sparkSession.sessionState.conf)).asJava) // Using a timestamp and a random UUID to distinguish different writing jobs. This is good // enough as there won't be tons of writing jobs created at the same second. val jobId = new SimpleDateFormat("yyyyMMddHHmmss", Locale.US) .format(new Date()) + "-" + UUID.randomUUID() val writer = ws.createWriter(jobId, df.logicalPlan.schema, mode, options) if (writer.isPresent) { runCommand(df.sparkSession, "save") { WriteToDataSourceV2(writer.get(), df.logicalPlan) } } // Streaming also uses the data source V2 API. So it may be that the data source implements // v2, but has no v2 implementation for batch writes. In that case, we fall back to saving // as though it's a V1 source. case _ => saveToV1Source() } } else { saveToV1Source() } } private def saveToV1Source(): Unit = { // Code path for data source v1. runCommand(df.sparkSession, "save") { DataSource( sparkSession = df.sparkSession, className = source, partitionColumns = partitioningColumns.getOrElse(Nil), options = extraOptions.toMap).planForWriting(mode, AnalysisBarrier(df.logicalPlan)) } } /** * Inserts the content of the `DataFrame` to the specified table. It requires that * the schema of the `DataFrame` is the same as the schema of the table. * * @note Unlike `saveAsTable`, `insertInto` ignores the column names and just uses position-based * resolution. For example: * * {{{ * scala> Seq((1, 2)).toDF("i", "j").write.mode("overwrite").saveAsTable("t1") * scala> Seq((3, 4)).toDF("j", "i").write.insertInto("t1") * scala> Seq((5, 6)).toDF("a", "b").write.insertInto("t1") * scala> sql("select * from t1").show * +---+---+ * | i| j| * +---+---+ * | 5| 6| * | 3| 4| * | 1| 2| * +---+---+ * }}} * * Because it inserts data to an existing table, format or options will be ignored. * * @since 1.4.0 */ def insertInto(tableName: String): Unit = { insertInto(df.sparkSession.sessionState.sqlParser.parseTableIdentifier(tableName)) } private def insertInto(tableIdent: TableIdentifier): Unit = { assertNotBucketed("insertInto") if (partitioningColumns.isDefined) { throw new AnalysisException( "insertInto() can't be used together with partitionBy(). " + "Partition columns have already been defined for the table. " + "It is not necessary to use partitionBy()." ) } runCommand(df.sparkSession, "insertInto") { InsertIntoTable( table = UnresolvedRelation(tableIdent), partition = Map.empty[String, Option[String]], query = df.logicalPlan, overwrite = mode == SaveMode.Overwrite, ifPartitionNotExists = false) } } private def getBucketSpec: Option[BucketSpec] = { if (sortColumnNames.isDefined && numBuckets.isEmpty) { throw new AnalysisException("sortBy must be used together with bucketBy") } numBuckets.map { n => BucketSpec(n, bucketColumnNames.get, sortColumnNames.getOrElse(Nil)) } } private def assertNotBucketed(operation: String): Unit = { if (getBucketSpec.isDefined) { if (sortColumnNames.isEmpty) { throw new AnalysisException(s"'$operation' does not support bucketBy right now") } else { throw new AnalysisException(s"'$operation' does not support bucketBy and sortBy right now") } } } private def assertNotPartitioned(operation: String): Unit = { if (partitioningColumns.isDefined) { throw new AnalysisException( s"'$operation' does not support partitioning") } } /** * Saves the content of the `DataFrame` as the specified table. * * In the case the table already exists, behavior of this function depends on the * save mode, specified by the `mode` function (default to throwing an exception). * When `mode` is `Overwrite`, the schema of the `DataFrame` does not need to be * the same as that of the existing table. * * When `mode` is `Append`, if there is an existing table, we will use the format and options of * the existing table. The column order in the schema of the `DataFrame` doesn't need to be same * as that of the existing table. Unlike `insertInto`, `saveAsTable` will use the column names to * find the correct column positions. For example: * * {{{ * scala> Seq((1, 2)).toDF("i", "j").write.mode("overwrite").saveAsTable("t1") * scala> Seq((3, 4)).toDF("j", "i").write.mode("append").saveAsTable("t1") * scala> sql("select * from t1").show * +---+---+ * | i| j| * +---+---+ * | 1| 2| * | 4| 3| * +---+---+ * }}} * * In this method, save mode is used to determine the behavior if the data source table exists in * Spark catalog. We will always overwrite the underlying data of data source (e.g. a table in * JDBC data source) if the table doesn't exist in Spark catalog, and will always append to the * underlying data of data source if the table already exists. * * When the DataFrame is created from a non-partitioned `HadoopFsRelation` with a single input * path, and the data source provider can be mapped to an existing Hive builtin SerDe (i.e. ORC * and Parquet), the table is persisted in a Hive compatible format, which means other systems * like Hive will be able to read this table. Otherwise, the table is persisted in a Spark SQL * specific format. * * @since 1.4.0 */ def saveAsTable(tableName: String): Unit = { saveAsTable(df.sparkSession.sessionState.sqlParser.parseTableIdentifier(tableName)) } private def saveAsTable(tableIdent: TableIdentifier): Unit = { val catalog = df.sparkSession.sessionState.catalog val tableExists = catalog.tableExists(tableIdent) val db = tableIdent.database.getOrElse(catalog.getCurrentDatabase) val tableIdentWithDB = tableIdent.copy(database = Some(db)) val tableName = tableIdentWithDB.unquotedString (tableExists, mode) match { case (true, SaveMode.Ignore) => // Do nothing case (true, SaveMode.ErrorIfExists) => throw new AnalysisException(s"Table $tableIdent already exists.") case (true, SaveMode.Overwrite) => // Get all input data source or hive relations of the query. val srcRelations = df.logicalPlan.collect { case LogicalRelation(src: BaseRelation, _, _, _) => src case relation: HiveTableRelation => relation.tableMeta.identifier } val tableRelation = df.sparkSession.table(tableIdentWithDB).queryExecution.analyzed EliminateSubqueryAliases(tableRelation) match { // check if the table is a data source table (the relation is a BaseRelation). case LogicalRelation(dest: BaseRelation, _, _, _) if srcRelations.contains(dest) => throw new AnalysisException( s"Cannot overwrite table $tableName that is also being read from") // check hive table relation when overwrite mode case relation: HiveTableRelation if srcRelations.contains(relation.tableMeta.identifier) => throw new AnalysisException( s"Cannot overwrite table $tableName that is also being read from") case _ => // OK } // Drop the existing table catalog.dropTable(tableIdentWithDB, ignoreIfNotExists = true, purge = false) createTable(tableIdentWithDB) // Refresh the cache of the table in the catalog. catalog.refreshTable(tableIdentWithDB) case _ => createTable(tableIdent) } } private def createTable(tableIdent: TableIdentifier): Unit = { val storage = DataSource.buildStorageFormatFromOptions(extraOptions.toMap) val tableType = if (storage.locationUri.isDefined) { CatalogTableType.EXTERNAL } else { CatalogTableType.MANAGED } val tableDesc = CatalogTable( identifier = tableIdent, tableType = tableType, storage = storage, schema = new StructType, provider = Some(source), partitionColumnNames = partitioningColumns.getOrElse(Nil), bucketSpec = getBucketSpec) runCommand(df.sparkSession, "saveAsTable")(CreateTable(tableDesc, mode, Some(df.logicalPlan))) } /** * Saves the content of the `DataFrame` to an external database table via JDBC. In the case the * table already exists in the external database, behavior of this function depends on the * save mode, specified by the `mode` function (default to throwing an exception). * * Don't create too many partitions in parallel on a large cluster; otherwise Spark might crash * your external database systems. * * You can set the following JDBC-specific option(s) for storing JDBC: * <ul> * <li>`truncate` (default `false`): use `TRUNCATE TABLE` instead of `DROP TABLE`.</li> * </ul> * * In case of failures, users should turn off `truncate` option to use `DROP TABLE` again. Also, * due to the different behavior of `TRUNCATE TABLE` among DBMS, it's not always safe to use this. * MySQLDialect, DB2Dialect, MsSqlServerDialect, DerbyDialect, and OracleDialect supports this * while PostgresDialect and default JDBCDirect doesn't. For unknown and unsupported JDBCDirect, * the user option `truncate` is ignored. * * @param url JDBC database url of the form `jdbc:subprotocol:subname` * @param table Name of the table in the external database. * @param connectionProperties JDBC database connection arguments, a list of arbitrary string * tag/value. Normally at least a "user" and "password" property * should be included. "batchsize" can be used to control the * number of rows per insert. "isolationLevel" can be one of * "NONE", "READ_COMMITTED", "READ_UNCOMMITTED", "REPEATABLE_READ", * or "SERIALIZABLE", corresponding to standard transaction * isolation levels defined by JDBC's Connection object, with default * of "READ_UNCOMMITTED". * @since 1.4.0 */ def jdbc(url: String, table: String, connectionProperties: Properties): Unit = { assertNotPartitioned("jdbc") assertNotBucketed("jdbc") // connectionProperties should override settings in extraOptions. this.extraOptions ++= connectionProperties.asScala // explicit url and dbtable should override all this.extraOptions += ("url" -> url, "dbtable" -> table) format("jdbc").save() } /** * Saves the content of the `DataFrame` in JSON format (<a href="http://jsonlines.org/"> * JSON Lines text format or newline-delimited JSON</a>) at the specified path. * This is equivalent to: * {{{ * format("json").save(path) * }}} * * You can set the following JSON-specific option(s) for writing JSON files: * <ul> * <li>`compression` (default `null`): compression codec to use when saving to file. This can be * one of the known case-insensitive shorten names (`none`, `bzip2`, `gzip`, `lz4`, * `snappy` and `deflate`). </li> * <li>`dateFormat` (default `yyyy-MM-dd`): sets the string that indicates a date format. * Custom date formats follow the formats at `java.text.SimpleDateFormat`. This applies to * date type.</li> * <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss.SSSXXX`): sets the string that * indicates a timestamp format. Custom date formats follow the formats at * `java.text.SimpleDateFormat`. This applies to timestamp type.</li> * <li>`encoding` (by default it is not set): specifies encoding (charset) of saved json * files. If it is not set, the UTF-8 charset will be used. </li> * <li>`lineSep` (default `\\n`): defines the line separator that should be used for writing.</li> * </ul> * * @since 1.4.0 */ def json(path: String): Unit = { format("json").save(path) } /** * Saves the content of the `DataFrame` in Parquet format at the specified path. * This is equivalent to: * {{{ * format("parquet").save(path) * }}} * * You can set the following Parquet-specific option(s) for writing Parquet files: * <ul> * <li>`compression` (default is the value specified in `spark.sql.parquet.compression.codec`): * compression codec to use when saving to file. This can be one of the known case-insensitive * shorten names(`none`, `snappy`, `gzip`, and `lzo`). This will override * `spark.sql.parquet.compression.codec`.</li> * </ul> * * @since 1.4.0 */ def parquet(path: String): Unit = { format("parquet").save(path) } /** * Saves the content of the `DataFrame` in ORC format at the specified path. * This is equivalent to: * {{{ * format("orc").save(path) * }}} * * You can set the following ORC-specific option(s) for writing ORC files: * <ul> * <li>`compression` (default is the value specified in `spark.sql.orc.compression.codec`): * compression codec to use when saving to file. This can be one of the known case-insensitive * shorten names(`none`, `snappy`, `zlib`, and `lzo`). This will override * `orc.compress` and `spark.sql.orc.compression.codec`. If `orc.compress` is given, * it overrides `spark.sql.orc.compression.codec`.</li> * </ul> * * @since 1.5.0 * @note Currently, this method can only be used after enabling Hive support */ def orc(path: String): Unit = { format("orc").save(path) } /** * Saves the content of the `DataFrame` in a text file at the specified path. * The DataFrame must have only one column that is of string type. * Each row becomes a new line in the output file. For example: * {{{ * // Scala: * df.write.text("/path/to/output") * * // Java: * df.write().text("/path/to/output") * }}} * * You can set the following option(s) for writing text files: * <ul> * <li>`compression` (default `null`): compression codec to use when saving to file. This can be * one of the known case-insensitive shorten names (`none`, `bzip2`, `gzip`, `lz4`, * `snappy` and `deflate`). </li> * <li>`lineSep` (default `\\n`): defines the line separator that should be used for writing.</li> * </ul> * * @since 1.6.0 */ def text(path: String): Unit = { format("text").save(path) } /** * Saves the content of the `DataFrame` in CSV format at the specified path. * This is equivalent to: * {{{ * format("csv").save(path) * }}} * * You can set the following CSV-specific option(s) for writing CSV files: * <ul> * <li>`sep` (default `,`): sets a single character as a separator for each * field and value.</li> * <li>`quote` (default `"`): sets a single character used for escaping quoted values where * the separator can be part of the value. If an empty string is set, it uses `u0000` * (null character).</li> * <li>`escape` (default `\\`): sets a single character used for escaping quotes inside * an already quoted value.</li> * <li>`charToEscapeQuoteEscaping` (default `escape` or `\\0`): sets a single character used for * escaping the escape for the quote character. The default value is escape character when escape * and quote characters are different, `\\0` otherwise.</li> * <li>`escapeQuotes` (default `true`): a flag indicating whether values containing * quotes should always be enclosed in quotes. Default is to escape all values containing * a quote character.</li> * <li>`quoteAll` (default `false`): a flag indicating whether all values should always be * enclosed in quotes. Default is to only escape values containing a quote character.</li> * <li>`header` (default `false`): writes the names of columns as the first line.</li> * <li>`nullValue` (default empty string): sets the string representation of a null value.</li> * <li>`compression` (default `null`): compression codec to use when saving to file. This can be * one of the known case-insensitive shorten names (`none`, `bzip2`, `gzip`, `lz4`, * `snappy` and `deflate`). </li> * <li>`dateFormat` (default `yyyy-MM-dd`): sets the string that indicates a date format. * Custom date formats follow the formats at `java.text.SimpleDateFormat`. This applies to * date type.</li> * <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss.SSSXXX`): sets the string that * indicates a timestamp format. Custom date formats follow the formats at * `java.text.SimpleDateFormat`. This applies to timestamp type.</li> * <li>`ignoreLeadingWhiteSpace` (default `true`): a flag indicating whether or not leading * whitespaces from values being written should be skipped.</li> * <li>`ignoreTrailingWhiteSpace` (default `true`): a flag indicating defines whether or not * trailing whitespaces from values being written should be skipped.</li> * </ul> * * @since 2.0.0 */ def csv(path: String): Unit = { format("csv").save(path) } /** * Wrap a DataFrameWriter action to track the QueryExecution and time cost, then report to the * user-registered callback functions. */ private def runCommand(session: SparkSession, name: String)(command: LogicalPlan): Unit = { val qe = session.sessionState.executePlan(command) try { val start = System.nanoTime() // call `QueryExecution.toRDD` to trigger the execution of commands. SQLExecution.withNewExecutionId(session, qe)(qe.toRdd) val end = System.nanoTime() session.listenerManager.onSuccess(name, qe, end - start) } catch { case e: Exception => session.listenerManager.onFailure(name, qe, e) throw e } } /////////////////////////////////////////////////////////////////////////////////////// // Builder pattern config options /////////////////////////////////////////////////////////////////////////////////////// private var source: String = df.sparkSession.sessionState.conf.defaultDataSourceName private var mode: SaveMode = SaveMode.ErrorIfExists private val extraOptions = new scala.collection.mutable.HashMap[String, String] private var partitioningColumns: Option[Seq[String]] = None private var bucketColumnNames: Option[Seq[String]] = None private var numBuckets: Option[Int] = None private var sortColumnNames: Option[Seq[String]] = None }
lxsmnv/spark
sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
Scala
apache-2.0
27,221
package com.softwaremill.bootzooka.test import com.softwaremill.bootzooka.common.sql.SqlDatabase import com.softwaremill.bootzooka.email.application.{DummyEmailService, EmailTemplatingEngine} import com.softwaremill.bootzooka.user.application.{UserDao, UserService} import com.softwaremill.bootzooka.user.domain.User import org.scalatest.concurrent.ScalaFutures import scala.concurrent.ExecutionContext trait TestHelpersWithDb extends TestHelpers with ScalaFutures { lazy val emailService = new DummyEmailService() lazy val emailTemplatingEngine = new EmailTemplatingEngine lazy val userDao = new UserDao(sqlDatabase) lazy val userService = new UserService(userDao, emailService, emailTemplatingEngine, passwordHashing) def sqlDatabase: SqlDatabase implicit lazy val ec: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global def newRandomStoredUser(password: Option[String] = None): User = { val u = newRandomUser(password) userDao.add(u).futureValue u } }
ldrygala/bootzooka
backend/src/test/scala/com/softwaremill/bootzooka/test/TestHelpersWithDb.scala
Scala
apache-2.0
1,041
package com.github.bruneli.phyqty /* * Copyright 2016 Renaud Bruneliere * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author bruneli */ case class DecimalMultiplier(symbol: String, exponent: Int) extends LinearTransform { def value(magnitude: Double): Double = magnitude * math.pow(10.0, exponent) override def inverseTransform: DecimalMultiplier = DecimalMultiplier(s"1/$symbol", -exponent) override def *(that: PhyUnitConverter): PhyUnitConverter = that match { case IdentityConverter => this case decimal: DecimalMultiplier => if (this.exponent + decimal.exponent == 0) { IdentityConverter } else { DecimalMultiplier(s"$symbol${that.symbol}", this.exponent + decimal.exponent) } case linear: LinearMultiplier => LinearMultiplier(s"$symbol${that.symbol}", this.value(1.0) * linear.multiplier) case _ => ??? } override def /(that: PhyUnitConverter): PhyUnitConverter = that match { case IdentityConverter => this case decimal: DecimalMultiplier => if (this.exponent - decimal.exponent == 0) { IdentityConverter } else { DecimalMultiplier(s"$symbol${that.symbol}", this.exponent - decimal.exponent) } case linear: LinearMultiplier => LinearMultiplier(s"$symbol/${that.symbol}", this.value(1.0) / linear.multiplier) case _ => ??? } override def equals(obj: Any): Boolean = obj match { case DecimalMultiplier(thatSymbol, thatExponent) => this.exponent == thatExponent case _ => false } } object DecimalMultiplier { val (eta, femto) = coupledDecimalMultipliers("E", "f", 15) val (tera, pico) = coupledDecimalMultipliers("G", "p", 12) val (giga, nano) = coupledDecimalMultipliers("T", "n", 9) val (mega, micro) = coupledDecimalMultipliers("M", "micro", 6) val (kilo, milli) = coupledDecimalMultipliers("k", "m", 3) val (hecto, centi) = coupledDecimalMultipliers("hecto", "c", 2) val (deca, deci) = coupledDecimalMultipliers("deca", "d", 1) def coupledDecimalMultipliers(symbol: String, inverseSymbol: String, exponent: Int) = { val multiplier = DecimalMultiplier(symbol, exponent) val inverse = DecimalMultiplier(inverseSymbol, -exponent) multiplier -> inverse } }
bruneli/phyqty
src/main/scala/com/github/bruneli/phyqty/DecimalMultiplier.scala
Scala
apache-2.0
2,777
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. * */ package io.github.mandar2812.dynaml.jupyter import almond.util.ThreadUtil.singleThreadedExecutionContext import almond.channels.zeromq.ZeromqThreads import almond.kernel.install.Install import almond.kernel.{Kernel, KernelThreads} import almond.logger.{Level, LoggerContext} import caseapp._ object DynaMLKernel extends CaseApp[Options] { def run(options: Options, args: RemainingArgs): Unit = { if (options.install) Install.installOrError( defaultId = "dynaml-scala", defaultDisplayName = "Dynaml/Scala", language = "scala", options = options.installOptions ) match { case Left(e) => Console.err.println(s"Error: $e") sys.exit(1) case Right(dir) => println(s"Installed DynaML Jupyter kernel under $dir") sys.exit(0) } val connectionFile = options.connectionFile.getOrElse { Console.err.println( "No connection file passed, and installation not asked. Run with --install to install the kernel, " + "or pass a connection file via --connection-file to run the kernel." ) sys.exit(1) } val logCtx = Level.fromString(options.log) match { case Left(err) => Console.err.println(err) sys.exit(1) case Right(level) => LoggerContext.stderr(level) } val log = logCtx(getClass) val zeromqThreads = ZeromqThreads.create("dynaml-scala-kernel") val kernelThreads = KernelThreads.create("dynaml-scala-kernel") val interpreterEc = singleThreadedExecutionContext("dynaml-scala-interpreter") log.info("Running kernel") Kernel.create(new DynaMLJupyter(), interpreterEc, kernelThreads, logCtx) .flatMap(_.runOnConnectionFile(connectionFile, "dynaml", zeromqThreads)) .unsafeRunSync() } }
transcendent-ai-labs/DynaML
dynaml-notebook/src/main/scala/io/github/mandar2812/dynaml/jupyter/DynaMLKernel.scala
Scala
apache-2.0
2,578
package com.datawizards.dmg.metadata import com.datawizards.dmg.annotations.column case class Company( @column(name="companyName") name: String, address: String, industry: String )
mateuszboryn/data-model-generator
src/test/scala/com/datawizards/dmg/metadata/Company.scala
Scala
apache-2.0
191
/** * Copyright (C) 2010 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.oxf.util import collection.JavaConverters._ import java.util.{List โ‡’ JList, Map โ‡’ JMap} import org.apache.commons.pool.{BasePoolableObjectFactory, ObjectPool} import org.orbeon.oxf.cache.InternalCacheKey import org.orbeon.oxf.cache.ObjectCache import org.orbeon.oxf.xml.dom4j.LocationData import org.orbeon.oxf.xml.NamespaceMapping import org.orbeon.saxon.Configuration import org.orbeon.saxon.functions.FunctionLibrary import org.orbeon.saxon.functions.FunctionLibraryList import org.orbeon.saxon.om.{Item, ValueRepresentation} import org.orbeon.saxon.sxpath._ import org.orbeon.saxon.trans.XPathException import org.orbeon.saxon.value.SequenceExtent import scala.util.control.NonFatal /** * XPath expressions cache. */ object XPathCache { import XPath._ private val XPathCacheName = "cache.xpath" private val XPathCacheDefaultSize = 200 private val Logger = LoggerFactory.createLogger(getClass) case class XPathContext( namespaceMapping : NamespaceMapping, variableToValueMap : JMap[String, ValueRepresentation], functionLibrary : FunctionLibrary, functionContext : FunctionContext, baseURI : String, locationData : LocationData ) object XPathContext { def apply( ns : NamespaceMapping = NamespaceMapping.EmptyMapping, vars : Map[String, ValueRepresentation] = Map.empty, functionLibrary : FunctionLibrary = null, functionContext : FunctionContext = null, baseURI : String = null, locationData : LocationData = null, reporter : Reporter = null ): XPathContext = XPathContext( namespaceMapping = ns, variableToValueMap = vars.asJava, functionLibrary = null, functionContext = null, baseURI = null, locationData = null ) } def isDynamicXPathError(t: Throwable) = t match { case e: XPathException if ! e.isStaticError โ‡’ true case _ โ‡’ false } // Evaluate an XPath expression on the document and return a List of native Java objects (i.e. String, Boolean, // etc.), but NodeInfo wrappers are preserved. // 7 external usages def evaluate( contextItem : Item, xpathString : String, namespaceMapping : NamespaceMapping, variableToValueMap : JMap[String, ValueRepresentation], functionLibrary : FunctionLibrary, functionContext : FunctionContext, baseURI : String, locationData : LocationData, reporter : Reporter ): JList[AnyRef] = evaluate( Seq(contextItem).asJava, 1, xpathString, namespaceMapping, variableToValueMap, functionLibrary, functionContext, baseURI, locationData, reporter ) // Evaluate an XPath expression on the document and return a List of native Java objects (i.e. String, Boolean, // etc.), but NodeInfo wrappers are preserved. // 2 external usages def evaluate( contextItems : JList[Item], contextPosition : Int, xpathString : String, namespaceMapping : NamespaceMapping, variableToValueMap : JMap[String, ValueRepresentation], functionLibrary : FunctionLibrary, functionContext : FunctionContext, baseURI : String, locationData : LocationData, reporter : Reporter ): JList[AnyRef] = { val xpathExpression = getXPathExpression( XPath.GlobalConfiguration, contextItems, contextPosition, xpathString, namespaceMapping, variableToValueMap, functionLibrary, baseURI, isAVT = false, locationData ) withEvaluation(xpathString, xpathExpression, locationData, reporter) { xpathExpression.evaluateKeepNodeInfo(functionContext) } } // If passed a sequence of size 1, return the contained object. This makes sense since XPath 2 says that "An item is // identical to a singleton sequence containing that item." It's easier for callers to switch on the item time. def normalizeSingletons(seq: Seq[AnyRef]): AnyRef = if (seq.size == 1) seq.head else seq // Evaluate an XPath expression on the document and keep Item objects in the result // 2 external usages def evaluateKeepItems( xpathString : String, contextItem : Item, contextPosition : Int = 1)(implicit xpathContext : XPathContext ): Seq[Item] = evaluateKeepItems( contextItems = List(contextItem).asJava, contextPosition = contextPosition, xpathString = xpathString, namespaceMapping = xpathContext.namespaceMapping, variableToValueMap = xpathContext.variableToValueMap, functionLibrary = xpathContext.functionLibrary, functionContext = xpathContext.functionContext, baseURI = xpathContext.baseURI, locationData = xpathContext.locationData, reporter = null ).asScala // Evaluate an XPath expression on the document and keep Item objects in the result // 4 external usages def evaluateKeepItems( contextItems : JList[Item], contextPosition : Int, xpathString : String, namespaceMapping : NamespaceMapping, variableToValueMap : JMap[String, ValueRepresentation], functionLibrary : FunctionLibrary, functionContext : FunctionContext, baseURI : String, locationData : LocationData, reporter : Reporter ): JList[Item] = { val xpathExpression = getXPathExpression( XPath.GlobalConfiguration, contextItems, contextPosition, xpathString, namespaceMapping, variableToValueMap, functionLibrary, baseURI, isAVT = false, locationData ) withEvaluation(xpathString, xpathExpression, locationData, reporter) { xpathExpression.evaluateKeepItems(functionContext) } } // Evaluate an XPath expression on the document and keep Item objects in the result // 1 external usage def evaluateSingleKeepItems( contextItems : JList[Item], contextPosition : Int, xpathString : String, namespaceMapping : NamespaceMapping, variableToValueMap : JMap[String, ValueRepresentation], functionLibrary : FunctionLibrary, functionContext : FunctionContext, baseURI : String, locationData : LocationData, reporter : Reporter ): Item = { val xpathExpression = getXPathExpression( XPath.GlobalConfiguration, contextItems, contextPosition, xpathString, namespaceMapping, variableToValueMap, functionLibrary, baseURI, isAVT = false, locationData ) withEvaluation(xpathString, xpathExpression, locationData, reporter) { xpathExpression.evaluateSingleKeepItemOrNull(functionContext) } } // Evaluate the expression as a variable value usable by Saxon in further XPath expressions // 1 external usage def evaluateAsExtent( contextItems : JList[Item], contextPosition : Int, xpathString : String, namespaceMapping : NamespaceMapping, variableToValueMap : JMap[String, ValueRepresentation], functionLibrary : FunctionLibrary, functionContext : FunctionContext, baseURI : String, locationData : LocationData, reporter : Reporter ): SequenceExtent = { val xpathExpression = getXPathExpression( XPath.GlobalConfiguration, contextItems, contextPosition, xpathString, namespaceMapping, variableToValueMap, functionLibrary, baseURI, isAVT = false, locationData ) withEvaluation(xpathString, xpathExpression, locationData, reporter) { xpathExpression.evaluateAsExtent(functionContext) } } // Evaluate an XPath expression on the document // 2 external usages def evaluateSingleWithContext( xpathContext : XPathContext, contextItem : Item, xpathString : String, reporter : Reporter ): AnyRef = evaluateSingle( Seq(contextItem).asJava, 1, xpathString, xpathContext.namespaceMapping, xpathContext.variableToValueMap, xpathContext.functionLibrary, xpathContext.functionContext, xpathContext.baseURI, xpathContext.locationData, reporter ) // Evaluate an XPath expression on the document // 2 external usages def evaluateSingle( contextItem : Item, xpathString : String, namespaceMapping : NamespaceMapping, variableToValueMap : JMap[String, ValueRepresentation], functionLibrary : FunctionLibrary, functionContext : FunctionContext, baseURI : String, locationData : LocationData, reporter : Reporter ): AnyRef = evaluateSingle( Seq(contextItem).asJava, 1, xpathString, namespaceMapping, variableToValueMap, functionLibrary, functionContext, baseURI, locationData, reporter ) // Evaluate an XPath expression on the document // 2 external usages def evaluateSingle( contextItems : JList[Item], contextPosition : Int, xpathString : String, namespaceMapping : NamespaceMapping, variableToValueMap : JMap[String, ValueRepresentation], functionLibrary : FunctionLibrary, functionContext : FunctionContext, baseURI : String, locationData : LocationData, reporter : Reporter ) = { val xpathExpression = getXPathExpression( XPath.GlobalConfiguration, contextItems, contextPosition, xpathString, namespaceMapping, variableToValueMap, functionLibrary, baseURI, isAVT = false, locationData ) withEvaluation(xpathString, xpathExpression, locationData, reporter) { xpathExpression.evaluateSingleKeepNodeInfoOrNull(functionContext) } } // Evaluate an XPath expression on the document as an attribute value template, and return its string value // 1 external usage def evaluateAsAvt(xpathContext: XPathContext, contextItem: Item, xpathString: String, reporter: Reporter): String = evaluateAsAvt( Seq(contextItem).asJava, 1, xpathString, xpathContext.namespaceMapping, xpathContext.variableToValueMap, xpathContext.functionLibrary, xpathContext.functionContext, xpathContext.baseURI, xpathContext.locationData, reporter ) // Evaluate an XPath expression on the document as an attribute value template, and return its string value // 1 external usage def evaluateAsAvt( contextItem : Item, xpathString : String, namespaceMapping : NamespaceMapping, variableToValueMap : JMap[String, ValueRepresentation], functionLibrary : FunctionLibrary, functionContext : FunctionContext, baseURI : String, locationData : LocationData, reporter: Reporter ): String = evaluateAsAvt( Seq(contextItem).asJava, 1, xpathString, namespaceMapping, variableToValueMap, functionLibrary, functionContext, baseURI, locationData, reporter ) // Evaluate an XPath expression on the document as an attribute value template, and return its string value // 3 external usages def evaluateAsAvt( contextItems : JList[Item], contextPosition : Int, xpathString : String, namespaceMapping : NamespaceMapping, variableToValueMap : JMap[String, ValueRepresentation], functionLibrary : FunctionLibrary, functionContext : FunctionContext, baseURI : String, locationData : LocationData, reporter : Reporter ) : String = { val xpathExpression = getXPathExpression( XPath.GlobalConfiguration, contextItems, contextPosition, xpathString, namespaceMapping, variableToValueMap, functionLibrary, baseURI, isAVT = true, locationData ) withEvaluation(xpathString, xpathExpression, locationData, reporter) { Option(xpathExpression.evaluateSingleKeepNodeInfoOrNull(functionContext)) map (_.toString) orNull // FIXME: can ever return null? } } // Evaluate an XPath expression and return its string value // 3 external usages def evaluateAsString( contextItem : Item, xpathString : String, namespaceMapping : NamespaceMapping, variableToValueMap : JMap[String, ValueRepresentation], functionLibrary : FunctionLibrary, functionContext : FunctionContext, baseURI : String, locationData : LocationData, reporter : Reporter ): String = evaluateAsString( Seq(contextItem).asJava, 1, xpathString, namespaceMapping, variableToValueMap, functionLibrary, functionContext, baseURI, locationData, reporter ) // Evaluate an XPath expression and return its string value // 6 external usages def evaluateAsString( contextItems : JList[Item], contextPosition : Int, xpathString : String, namespaceMapping : NamespaceMapping, variableToValueMap : JMap[String, ValueRepresentation], functionLibrary : FunctionLibrary, functionContext : FunctionContext, baseURI : String, locationData : LocationData, reporter : Reporter ): String = { val xpathExpression = getXPathExpression( XPath.GlobalConfiguration, contextItems, contextPosition, makeStringExpression(xpathString), namespaceMapping, variableToValueMap, functionLibrary, baseURI, isAVT = false, locationData ) withEvaluation(xpathString, xpathExpression, locationData, reporter) { Option(xpathExpression.evaluateSingleKeepNodeInfoOrNull(functionContext)) map (_.toString) orNull } } // No call from XForms def getXPathExpression( configuration : Configuration, contextItem : Item, xpathString : String, locationData : LocationData ): PooledXPathExpression = getXPathExpression(configuration, contextItem, xpathString, null, null, null, null, locationData) // No call from XForms def getXPathExpression( configuration : Configuration, contextItem : Item, xpathString : String, namespaceMapping : NamespaceMapping, locationData : LocationData ): PooledXPathExpression = getXPathExpression(configuration, contextItem, xpathString, namespaceMapping, null, null, null, locationData) // No call from XForms def getXPathExpression( configuration : Configuration, contextItem : Item, xpathString : String, namespaceMapping : NamespaceMapping, variableToValueMap : JMap[String, ValueRepresentation], functionLibrary : FunctionLibrary, baseURI : String, locationData : LocationData ): PooledXPathExpression = getXPathExpression( configuration, Seq(contextItem).asJava, 1, xpathString, namespaceMapping, variableToValueMap, functionLibrary, baseURI, isAVT = false, locationData ) private def getXPathExpression( configuration : Configuration, contextItems : JList[Item], contextPosition : Int, xpathString : String, namespaceMapping : NamespaceMapping, variableToValueMap : JMap[String, ValueRepresentation], functionLibrary : FunctionLibrary, baseURI : String, isAVT : Boolean, locationData : LocationData ): PooledXPathExpression = { try { // Find pool from cache val validity = 0L val cache = ObjectCache.instance(XPathCacheName, XPathCacheDefaultSize) val cacheKeyString = new StringBuilder(xpathString) if (functionLibrary ne null) {// This is ok cacheKeyString.append('|') cacheKeyString.append(functionLibrary.hashCode.toString) } // NOTE: Mike Kay confirms on 2007-07-04 that compilation depends on the namespace context, so we need // to use it as part of the cache key. if (namespaceMapping ne null) { // NOTE: Hash is mandatory in NamespaceMapping cacheKeyString.append('|') cacheKeyString.append(namespaceMapping.hash) } // NOTE: Make sure to copy the values in the key set, as the set returned by the map keeps a pointer to the // Map! This can cause the XPath cache to keep a reference to variable values, which in turn can keep a // reference all the way to e.g. an XFormsContainingDocument. val variableNames = Option(variableToValueMap) map (_.keySet.asScala.toList) getOrElse List() if (variableNames.nonEmpty) { // There are some variables in scope. They must be part of the key // TODO: Put this in static state as this can be determined statically once and for all for (variableName โ† variableNames) { cacheKeyString.append('|') cacheKeyString.append(variableName) } } // Add this to the key as evaluating "name" as XPath or as AVT is very different! cacheKeyString.append('|') cacheKeyString.append(isAVT.toString) // TODO: Add baseURI to cache key (currently, baseURI is pretty much unused) val pooledXPathExpression = { val cacheKey = new InternalCacheKey("XPath Expression2", cacheKeyString.toString) var pool = cache.findValid(cacheKey, validity).asInstanceOf[ObjectPool[PooledXPathExpression]] if (pool eq null) { pool = createXPathPool(configuration, xpathString, namespaceMapping, variableNames, functionLibrary, baseURI, isAVT, locationData) cache.add(cacheKey, validity, pool) } // Get object from pool pool.borrowObject } // Set context items and position pooledXPathExpression.setContextItems(contextItems, contextPosition) // Set variables pooledXPathExpression.setVariables(variableToValueMap) pooledXPathExpression } catch { case NonFatal(t) โ‡’ throw handleXPathException(t, xpathString, "preparing XPath expression", locationData) } } private def createXPathPool( xpathConfiguration : Configuration, xpathString : String, namespaceMapping : NamespaceMapping, variableNames : List[String], functionLibrary : FunctionLibrary, baseURI : String, isAVT : Boolean, locationData : LocationData ): ObjectPool[PooledXPathExpression] = { // TODO: pool should have at least one hard reference val factory = new XPathCachePoolableObjectFactory( configurationOrDefault(xpathConfiguration), xpathString, namespaceMapping, variableNames, functionLibrary, baseURI, isAVT, locationData ) val pool = new SoftReferenceObjectPool(factory) factory.pool = pool pool } def createPoolableXPathExpression( independentContext : IndependentContext, xpathString : String, isAVT : Boolean, pool : ObjectPool[PooledXPathExpression], variables : List[(String, XPathVariable)] ): PooledXPathExpression = new PooledXPathExpression( compileExpressionWithStaticContext(independentContext, xpathString, isAVT), pool, variables ) // Not sure if/when configuration can be null, but it shouldn't be private def configurationOrDefault(configuration: Configuration) = Option(configuration) getOrElse XPath.GlobalConfiguration private class XPathCachePoolableObjectFactory( xpathConfiguration : Configuration, xpathString : String, namespaceMapping : NamespaceMapping, variableNames : List[String], functionLibrary : FunctionLibrary, baseURI : String, isAVT : Boolean, locationData : LocationData ) extends BasePoolableObjectFactory[PooledXPathExpression] { // NOTE: storing the FunctionLibrary in cache is ok if it doesn't hold dynamic references (case of global XFormsFunctionLibrary) var pool: ObjectPool[PooledXPathExpression] = _ // Create and compile an XPath expression object def makeObject: PooledXPathExpression = { if (Logger.isDebugEnabled) Logger.debug("makeObject(" + xpathString + ")") // Create context val independentContext = new IndependentContext(xpathConfiguration) independentContext.getConfiguration.setURIResolver(XPath.URIResolver) // Set the base URI if specified if (baseURI ne null) independentContext.setBaseURI(baseURI) // Declare namespaces if (namespaceMapping ne null) for ((prefix, uri) โ† namespaceMapping.mapping) independentContext.declareNamespace(prefix, uri) // Declare variables (we don't use the values here, just the names) val variables = if (variableNames ne null) for { name โ† variableNames variable = independentContext.declareVariable("", name) } yield name โ†’ variable else Nil // Add function library if (functionLibrary ne null) independentContext.getFunctionLibrary.asInstanceOf[FunctionLibraryList].libraryList.asInstanceOf[JList[FunctionLibrary]].add(0, functionLibrary) createPoolableXPathExpression(independentContext, xpathString, isAVT, pool, variables) } override def destroyObject(o: PooledXPathExpression): Unit = () } private def withEvaluation[T](xpathString: String, xpathExpression: PooledXPathExpression, locationData: LocationData, reporter: Reporter)(body: โ‡’ T): T = try { if (reporter ne null) { val startTime = System.nanoTime val result = body val totalTimeMicroSeconds = (System.nanoTime - startTime) / 1000 // never smaller than 1000 ns on OS X if (totalTimeMicroSeconds > 0) reporter(xpathString, totalTimeMicroSeconds) result } else body } catch { case NonFatal(t) โ‡’ throw handleXPathException(t, xpathString, "evaluating XPath expression", locationData) } finally xpathExpression.returnToPool() }
brunobuzzi/orbeon-forms
src/main/scala/org/orbeon/oxf/util/XPathCache.scala
Scala
lgpl-2.1
23,660
package chess package format case class UciMove(orig: Pos, dest: Pos, promotion: Option[PromotableRole] = None) { def keys = orig.key + dest.key def uci = keys + promotionString def keysPiotr = orig.piotrStr + dest.piotrStr def piotr = keysPiotr + promotionString def promotionString = promotion.fold("")(_.forsyth.toString) } object UciMove extends scalaz.std.OptionInstances with scalaz.syntax.ToTraverseOps { def apply(move: String): Option[UciMove] = for { orig โ† Pos.posAt(move take 2) dest โ† Pos.posAt(move drop 2 take 2) promotion = move lift 4 flatMap Role.promotable } yield UciMove(orig, dest, promotion) def piotr(move: String): Option[UciMove] = for { orig โ† move.headOption flatMap Pos.piotr dest โ† move lift 1 flatMap Pos.piotr promotion = move lift 2 flatMap Role.promotable } yield UciMove(orig, dest, promotion) def readList(moves: String): Option[List[UciMove]] = moves.split(' ').toList.map(apply).sequence def writeList(moves: List[UciMove]): String = moves.map(_.uci) mkString " " def readListPiotr(moves: String): Option[List[UciMove]] = moves.split(' ').toList.map(piotr).sequence def writeListPiotr(moves: List[UciMove]): String = moves.map(_.piotr) mkString " " }
psuter/scalachess
src/main/scala/format/UciMove.scala
Scala
mit
1,282
package im.actor.server.api.rpc.service.encryption import im.actor.api.rpc.encryption._ import im.actor.api.rpc._ import im.actor.server.{ ImplicitSessionRegion, ImplicitAuthService, BaseAppSuite } import scala.concurrent.Await import scala.concurrent.duration._ import scala.util.Random final class EncryptionServiceSpec extends BaseAppSuite with ImplicitAuthService with ImplicitSessionRegion { it should "create key group and load keys" in keyGroup it should "create and load ephermal keys" in ephermalKeys "SendEncryptedPackage" should "ignore ignored keys" in ignoredKeys lazy val service = new EncryptionServiceImpl def keyGroup() = { val (alice, aliceAuthId, aliceAuthSid, _) = createUser() val (bob, bobAuthId, bobAuthSid, _) = createUser() val aliceClientData = ClientData(aliceAuthId, 1, Some(AuthData(alice.id, aliceAuthSid, 42))) val bobClientData = ClientData(bobAuthId, 1, Some(AuthData(bob.id, bobAuthSid, 42))) val supportedEncryptions = Vector("sup1", "sup2", "sup3") val identityKey = ApiEncryptionKey(1L, "idalg", Some(Array[Byte](1, 2, 3)), Some(Array[Byte](1))) val keys = Vector(ApiEncryptionKey(2L, "keyalg", Some(Array[Byte](3, 4, 5)), Some(Array[Byte](3)))) val signatures = Vector( ApiEncryptionKeySignature(2L, "signalg", Array[Byte](4)), ApiEncryptionKeySignature(2L, "signalg", Array[Byte](5)), ApiEncryptionKeySignature(4L, "signalg", Array[Byte](5)) ) val keyGroupId = { implicit val clientData = aliceClientData whenReady(service.handleCreateNewKeyGroup( identityKey = identityKey, supportedEncryptions = supportedEncryptions, keys = keys, signatures = signatures ))(_.toOption.get.keyGroupId) } { implicit val clientData = bobClientData whenReady(service.handleLoadPublicKeyGroups(getUserOutPeer(alice.id, bobAuthId))) { resp โ‡’ inside(resp) { case Ok(ResponsePublicKeyGroups(Vector(kg))) โ‡’ kg.keyGroupId shouldBe keyGroupId kg.keys.map(_.keyId) shouldBe keys.map(_.keyId) kg.signatures.map(_.keyId) shouldBe signatures.map(_.keyId) kg.supportedEncryption shouldBe supportedEncryptions } } whenReady(service.handleLoadPublicKey(getUserOutPeer(alice.id, bobAuthId), keyGroupId, Vector(keys.head.keyId))) { resp โ‡’ inside(resp) { case Ok(ResponsePublicKeys( ks, signs )) โ‡’ ks.map(_.keyId) shouldBe keys.map(_.keyId) signs.map(_.keyId) shouldBe signatures.take(2).map(_.keyId) } } } } def ephermalKeys() = { val (alice, aliceAuthId, aliceAuthSid, _) = createUser() val (bob, bobAuthId, bobAuthSid, _) = createUser() val aliceClientData = ClientData(aliceAuthId, 1, Some(AuthData(alice.id, aliceAuthSid, 42))) val bobClientData = ClientData(bobAuthId, 1, Some(AuthData(bob.id, bobAuthSid, 42))) val supportedEncryptions = Vector("sup1", "sup2", "sup3") val identityKey = ApiEncryptionKey(1L, "idalg", Some(Array[Byte](1, 2, 3)), Some(Array[Byte](1))) val keys = Vector(ApiEncryptionKey(2L, "keyalg", Some(Array[Byte](3, 4, 5)), Some(Array[Byte](3)))) val signatures = Vector( ApiEncryptionKeySignature(2L, "signalg", Array[Byte](4)), ApiEncryptionKeySignature(2L, "signalg", Array[Byte](5)), ApiEncryptionKeySignature(4L, "signalg", Array[Byte](5)) ) val ephKeys = Vector( ApiEncryptionKey(5L, "ephkeyalg", Some(Array[Byte](8, 9, 10)), Some(Array[Byte](3))), ApiEncryptionKey(6L, "ephkeyalg", Some(Array[Byte](8, 9, 10)), Some(Array[Byte](3))) ) val ephSignatures = Vector( ApiEncryptionKeySignature(5L, "ephsignalg", Array[Byte](40)), ApiEncryptionKeySignature(5L, "ephsignalg", Array[Byte](41)), ApiEncryptionKeySignature(6L, "ephsignalg", Array[Byte](60)) ) val keyGroupId = { implicit val clientData = aliceClientData val keyGroupId = whenReady(service.handleCreateNewKeyGroup( identityKey = identityKey, supportedEncryptions = supportedEncryptions, keys = keys, signatures = signatures ))(_.toOption.get.keyGroupId) whenReady(service.handleUploadPreKey( keyGroupId, ephKeys, ephSignatures ))(identity) keyGroupId } { implicit val clientData = bobClientData whenReady(service.handleLoadPrePublicKeys( getUserOutPeer(alice.id, bobAuthId), keyGroupId )) { resp โ‡’ inside(resp) { case Ok(ResponsePublicKeys(Vector(k), sigs)) โ‡’ ephKeys.map(_.keyId) should contain(k.keyId) sigs.map(_.keyId).distinct shouldBe Vector(k.keyId) } } whenReady(service.handleLoadPublicKey( getUserOutPeer(alice.id, bobAuthId), keyGroupId, Vector(ephKeys.head.keyId) )) { resp โ‡’ inside(resp) { case Ok(ResponsePublicKeys(Vector(k), signs)) โ‡’ k.keyId shouldBe ephKeys.head.keyId signs.map(_.keyId) shouldBe ephSignatures.take(2).map(_.keyId) } } } } def ignoredKeys() = { val (alice, aliceAuthId, aliceAuthSid, _) = createUser() val (bob, bobAuthId, bobAuthSid, _) = createUser() val aliceClientData = ClientData(aliceAuthId, 1, Some(AuthData(alice.id, aliceAuthSid, 42))) val bobClientData = ClientData(bobAuthId, 1, Some(AuthData(bob.id, bobAuthSid, 42))) val (aliceKeyGroupId1, aliceKeys1) = createKeyGroup()(aliceClientData) val (aliceKeyGroupId2, _) = createKeyGroup()(aliceClientData) val (bobKeyGroupId, _) = createKeyGroup()(bobClientData) { implicit val clientData = bobClientData whenReady(service.handleSendEncryptedPackage( randomId = Random.nextLong(), destPeers = Vector(getUserOutPeer(alice.id, bobAuthId)), ignoredKeyGroups = Vector(ApiKeyGroupId(alice.id, 1), ApiKeyGroupId(alice.id, aliceKeyGroupId2)), encryptedBox = ApiEncryptedBox( keys = aliceKeys1 map (key โ‡’ ApiEncyptedBoxKey(alice.id, aliceKeyGroupId1, key.keyAlg, key.keyMaterial.get)), algType = "", encPackage = Array(), senderKeyGroupId = bobKeyGroupId, Vector.empty ) )) { resp โ‡’ inside(resp) { case Ok(resp: ResponseSendEncryptedPackage) โ‡’ resp.missedKeyGroups shouldBe empty resp.obsoleteKeyGroups shouldBe empty } } } } private def createKeyGroup()(implicit clientData: ClientData) = { val supportedEncryptions = Vector("sup1") val identityKey = ApiEncryptionKey(Random.nextLong(), "idalg", Some(Array[Byte](1, 2, 3)), Some(Array[Byte](1))) val keys = Vector(ApiEncryptionKey(Random.nextLong(), "keyalg", Some(Array[Byte](3, 4, 5)), Some(Array[Byte](3)))) val signatures = Vector( ApiEncryptionKeySignature(keys.head.keyId, "signalg", Array[Byte](4)) ) val keyGroupId = Await.result(service.handleCreateNewKeyGroup( identityKey = identityKey, supportedEncryptions = supportedEncryptions, keys = keys, signatures = signatures ), 5.seconds).toOption.get.keyGroupId (keyGroupId, keys) } }
actorapp/actor-platform
actor-server/actor-tests/src/test/scala/im/actor/server/api/rpc/service/encryption/EncryptionServiceSpec.scala
Scala
agpl-3.0
7,306
package org.pico.statsd.impl import java.net.InetSocketAddress import java.nio.ByteBuffer import java.nio.channels.DatagramChannel import org.pico.disposal.std.autoCloseable._ import org.pico.event.{Bus, Sink, SinkSource} import org.pico.statsd.StatsdClientException case class UdpEmitFailed( address: InetSocketAddress, buffer: ByteBuffer, sentBytes: Int) object UdpEmitter { def apply(addressLookup: () => InetSocketAddress): SinkSource[ByteBuffer, UdpEmitFailed] = { val clientChannel: DatagramChannel = try { DatagramChannel.open } catch { case e: Exception => throw StatsdClientException("Failed to start StatsD client", e) } val errors = Bus[UdpEmitFailed] val sink = Sink[ByteBuffer] { buffer => val address = addressLookup() val sentBytes = clientChannel.send(buffer, address) if (buffer.limit() != sentBytes) { errors.publish(UdpEmitFailed(address, buffer, sentBytes)) } } sink.disposes(clientChannel) SinkSource.from[ByteBuffer, UdpEmitFailed](sink, errors) } }
pico-works/pico-statsd
pico-statsd/src/main/scala/org/pico/statsd/impl/UdpEmitter.scala
Scala
mit
1,073
/* * Copyright 2018 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.play.config import javax.inject.Inject import play.api.Configuration class OptimizelyConfig @Inject() (configuration: Configuration) { val url: Option[String] = for { baseUrl <- configuration.getOptional[String]("optimizely.url") projectId <- configuration.getOptional[String]("optimizely.projectId") } yield { s"$baseUrl$projectId.js" } }
nicf82/play-ui
src/main/play-26/uk/gov/hmrc/play/config/OptimizelyConfig.scala
Scala
apache-2.0
1,003
package com.wixpress.common.specs2.impostisers import java.lang.invoke.{MethodHandles, MethodType} import java.lang.reflect.{Constructor, Method} /** * Invokes methods as defined in their [[getDeclaringClass]], bypassing any overrides. * This is useful for invoking the actual implementations of non-abstract methods of dynamyc proxies (generated by Java reflection or ByteBuddy). * Uses method handles and `invokespecial`. * Based on https://stackoverflow.com/a/58800339/10035812 */ object SpecialMethodInvoker extends SpecialMethodInvoker { private val java8ClassVersion = 52 private val javaClassVersion = System.getProperty("java.class.version").toFloat private val instance = if(javaClassVersion <= java8ClassVersion) java8Invoker // in some jdk11 implementations we get IllegalAccessException: "no private access for invokespecial" // so we fallback to the java 8 method, which only warns of "An illegal reflective access operation has occurred" else java9AndLaterInvoker orElse java8Invoker override def invoke(impl: AnyRef, method: Method, args: Array[AnyRef]): AnyRef = instance.invoke(impl, method, args) private lazy val java8Invoker = new SpecialMethodInvoker { private val constructor: Constructor[MethodHandles.Lookup] = classOf[MethodHandles.Lookup].getDeclaredConstructor(classOf[Class[_]]) constructor.setAccessible(true) override def invoke(impl: AnyRef, method: Method, args: Array[AnyRef]): AnyRef = { val clazz = method.getDeclaringClass constructor.newInstance(clazz) .in(clazz) .unreflectSpecial(method, clazz) .bindTo(impl) .invokeWithArguments(args:_*) } } private lazy val java9AndLaterInvoker = new SpecialMethodInvoker { override def invoke(proxy: AnyRef, method: Method, args: Array[AnyRef]): AnyRef = { MethodHandles.lookup .findSpecial( method.getDeclaringClass, method.getName, MethodType.methodType(method.getReturnType, method.getParameterTypes), method.getDeclaringClass) .bindTo(proxy) .invokeWithArguments(args:_*) } } } sealed trait SpecialMethodInvoker { outer => def invoke(impl: AnyRef, method: Method, args: Array[AnyRef]): AnyRef def orElse(that: SpecialMethodInvoker) = new SpecialMethodInvoker { override def invoke(impl: AnyRef, method: Method, args: Array[AnyRef]): AnyRef = { try outer.invoke(impl, method, args) catch { case _: Throwable => that.invoke(impl, method, args) } } } }
wix/specs2-jmock
src/main/scala/com/wixpress/common/specs2/impostisers/SpecialMethodInvoker.scala
Scala
bsd-3-clause
2,707
/** * Copyright 2017 https://github.com/sndnv * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package controllers import javax.inject.Inject import akka.actor.Address import core3.http.controllers.noauth.ClientController import noisecluster.jvm.control.ServiceState import noisecluster.jvm.control.cluster.Messages._ import noisecluster.jvm.control.cluster._ import play.api.Environment import play.api.data.Forms._ import play.api.data._ import play.api.libs.json._ import play.filters.csrf.CSRF import scala.concurrent.{ExecutionContext, Future} import scala.util.control.NonFatal class System @Inject()(control: SourceService, appService: vili.ApplicationService)(implicit ec: ExecutionContext, environment: Environment) extends ClientController() { private implicit val serviceStateWrites = Writes[ServiceState] { state => JsString(state.toString) } private implicit val nodeStateWrites = Json.writes[NodeState] private implicit val nodeInfoWrites = Json.writes[NodeInfo] private implicit val akkaAddressWrites = Writes[Address] { address => JsString(address.toString) } private implicit val memberInfoWrites = Json.writes[MemberInfo] private implicit val clusterStateWrites = Json.writes[ClusterState] def root() = PublicAction( { (request, _) => implicit val r = request Future.successful(Redirect("/home")) } ) def home() = PublicAction( { (request, _) => implicit val r = request implicit val token = CSRF.getToken Future.successful(Ok(views.html.home("Home"))) } ) def nodes() = PublicAction( { (request, _) => implicit val r = request implicit val token = CSRF.getToken Future.successful(Ok(views.html.nodes("Nodes"))) } ) def cluster() = PublicAction( { (request, _) => implicit val r = request implicit val token = CSRF.getToken Future.successful(Ok(views.html.cluster("Cluster"))) } ) def status() = PublicAction( { (request, _) => implicit val r = request control.getClusterState.map { state => Ok( Json.obj( "sources" -> control.activeSources, "targets" -> control.activeTargets, "state" -> state ) ) } } ) def processMessage = PublicAction( { (request, _) => implicit val r = request System.Forms.message.bindFromRequest.fold( form => { throw new IllegalArgumentException(s"Failed to validate input: [${form.errors}]") } , params => { try { val messages: Seq[ControlMessage] = (params.service.toLowerCase, params.action.toLowerCase) match { case ("audio", "play") => if(params.target.getOrElse("") == "self") { Seq(StartAudio(), UnmuteHost()) } else { Seq(StartTransport(), UnmuteHost()) } case ("audio", "quiet") => if(params.target.getOrElse("") == "self") { Seq(MuteHost(), StopAudio()) } else { Seq(MuteHost(), StopTransport()) } case ("host", "volume") => params.level match { case Some(level) => Seq(SetHostVolume(level)) case None => throw new IllegalArgumentException( s"No [level] parameter supplied to service [host] and action [volume]" ) } case ("host", "mute") => Seq(MuteHost()) case ("host", "unmute") => Seq(UnmuteHost()) case ("audio", "start") => Seq(StartAudio()) case ("audio", "stop") => Seq(StopAudio()) case ("transport", "start") => Seq(StartTransport()) case ("transport", "stop") => Seq(StopTransport()) case ("application", "stop") => Seq(StopApplication(restart = false)) case ("application", "restart") => Seq(StopApplication(restart = true)) case ("host", "stop") => Seq(StopHost(restart = false)) case ("host", "restart") => Seq(StopHost(restart = true)) case _ => throw new IllegalArgumentException( s"Unexpected service [${params.service}] and/or action [${params.action}] requested" ) } params.target match { case Some("self") => messages.foreach { message => control.processMessage(message) } case Some(target) => messages.foreach { message => control.forwardMessage(target, message) } case None => messages.foreach { message => control.forwardMessage(message) } } Future.successful(NoContent) } catch { case NonFatal(e) => e.printStackTrace() Future.successful(InternalServerError(s"Exception encountered: [$e]")) } } ) } ) def processClusterAction = PublicAction( { (request, _) => implicit val r = request System.Forms.action.bindFromRequest.fold( form => { throw new IllegalArgumentException(s"Failed to validate input: [${form.errors}]") } , params => { (params.action.toLowerCase match { case "down" => control.setTargetToDown(params.target) case "leave" => control.setTargetToLeaving(params.target) case _ => throw new IllegalArgumentException(s"Unexpected action [${params.action}] requested") }).map { result => if (result) { NoContent } else { InternalServerError("Unable to complete operation") } }.recover { case NonFatal(e) => InternalServerError(s"Exception encountered: [$e]") } } ) } ) } object System { case class MessageRequest(target: Option[String], service: String, action: String, level: Option[Int]) case class ClusterActionRequest(target: String, action: String) object Forms { val message = Form( mapping( "target" -> optional(nonEmptyText), "service" -> nonEmptyText, "action" -> nonEmptyText, "level" -> optional(number) )(MessageRequest.apply)(MessageRequest.unapply) ) val action = Form( mapping( "target" -> nonEmptyText, "action" -> nonEmptyText )(ClusterActionRequest.apply)(ClusterActionRequest.unapply) ) } }
sndnv/noisecluster
vili/app/controllers/System.scala
Scala
apache-2.0
7,058
package demo.components.semanticui import chandu0101.macros.tojs.GhPagesMacros import chandu0101.scalajs.react.components.semanticui._ import demo.components.CodeExample import japgolly.scalajs.react._ import japgolly.scalajs.react.vdom.html_<^._ object SuiContainerDemo { val code = GhPagesMacros.exampleSource // EXAMPLE:START case class Backend($ : BackendScope[Unit, Unit]) { def render() = <.div( CodeExample(code, "SuiContainer")( <.b("A standard container"), SuiContainer()( <.p("Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Aenean commodo ligula eget dolor. Aenean massa strong. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Donec quam felis, ultricies nec, pellentesque eu, pretium quis, sem. Nulla consequat massa quis enim. Donec pede justo, fringilla vel, aliquet nec, vulputate eget, arcu. In enim justo, rhoncus ut, imperdiet a, venenatis vitae, justo. Nullam dictum felis eu pede link mollis pretium. Integer tincidunt. Cras dapibus. Vivamus elementum semper nisi. Aenean vulputate eleifend tellus. Aenean leo ligula, porttitor eu, consequat vitae, eleifend ac, enim. Aliquam lorem ante, dapibus in, viverra quis, feugiat a, tellus. Phasellus viverra nulla ut metus varius laoreet. Quisque rutrum. Aenean imperdiet. Etiam ultricies nisi vel augue. Curabitur ullamcorper ultricies nisi.") ) ) ) } val component = ScalaComponent .builder[Unit]("SuiContainerDemo") .renderBackend[Backend] .build // EXAMPLE:END def apply() = component() }
chandu0101/scalajs-react-components
demo/src/main/scala/demo/components/semanticui/SuiContainerDemo.scala
Scala
apache-2.0
1,613
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.hive import java.io.File import java.nio.charset.StandardCharsets import java.nio.file.{Files, Paths} import scala.sys.process._ import org.apache.hadoop.conf.Configuration import org.apache.spark.{SecurityManager, SparkConf, TestUtils} import org.apache.spark.sql.{QueryTest, Row, SparkSession} import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.catalog.CatalogTableType import org.apache.spark.sql.test.SQLTestUtils import org.apache.spark.util.Utils /** * Test HiveExternalCatalog backward compatibility. * * Note that, this test suite will automatically download spark binary packages of different * versions to a local directory `/tmp/spark-test`. If there is already a spark folder with * expected version under this local directory, e.g. `/tmp/spark-test/spark-2.0.3`, we will skip the * downloading for this spark version. */ class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils { private val wareHousePath = Utils.createTempDir(namePrefix = "warehouse") private val tmpDataDir = Utils.createTempDir(namePrefix = "test-data") // For local test, you can set `sparkTestingDir` to a static value like `/tmp/test-spark`, to // avoid downloading Spark of different versions in each run. private val sparkTestingDir = new File("/tmp/test-spark") private val unusedJar = TestUtils.createJarWithClasses(Seq.empty) override def afterAll(): Unit = { try { Utils.deleteRecursively(wareHousePath) Utils.deleteRecursively(tmpDataDir) Utils.deleteRecursively(sparkTestingDir) } finally { super.afterAll() } } private def tryDownloadSpark(version: String, path: String): Unit = { // Try a few mirrors first; fall back to Apache archive val mirrors = (0 until 2).flatMap { _ => try { Some(getStringFromUrl("https://www.apache.org/dyn/closer.lua?preferred=true")) } catch { // If we can't get a mirror URL, skip it. No retry. case _: Exception => None } } val sites = mirrors.distinct :+ "https://archive.apache.org/dist" logInfo(s"Trying to download Spark $version from $sites") for (site <- sites) { val filename = s"spark-$version-bin-hadoop2.7.tgz" val url = s"$site/spark/spark-$version/$filename" logInfo(s"Downloading Spark $version from $url") try { getFileFromUrl(url, path, filename) val downloaded = new File(sparkTestingDir, filename).getCanonicalPath val targetDir = new File(sparkTestingDir, s"spark-$version").getCanonicalPath Seq("mkdir", targetDir).! val exitCode = Seq("tar", "-xzf", downloaded, "-C", targetDir, "--strip-components=1").! Seq("rm", downloaded).! // For a corrupted file, `tar` returns non-zero values. However, we also need to check // the extracted file because `tar` returns 0 for empty file. val sparkSubmit = new File(sparkTestingDir, s"spark-$version/bin/spark-submit") if (exitCode == 0 && sparkSubmit.exists()) { return } else { Seq("rm", "-rf", targetDir).! } } catch { case ex: Exception => logWarning(s"Failed to download Spark $version from $url: ${ex.getMessage}") } } fail(s"Unable to download Spark $version") } private def genDataDir(name: String): String = { new File(tmpDataDir, name).getCanonicalPath } private def getFileFromUrl(urlString: String, targetDir: String, filename: String): Unit = { val conf = new SparkConf // if the caller passes the name of an existing file, we want doFetchFile to write over it with // the contents from the specified url. conf.set("spark.files.overwrite", "true") val securityManager = new SecurityManager(conf) val hadoopConf = new Configuration val outDir = new File(targetDir) if (!outDir.exists()) { outDir.mkdirs() } // propagate exceptions up to the caller of getFileFromUrl Utils.doFetchFile(urlString, outDir, filename, conf, securityManager, hadoopConf) } private def getStringFromUrl(urlString: String): String = { val contentFile = File.createTempFile("string-", ".txt") contentFile.deleteOnExit() // exceptions will propagate to the caller of getStringFromUrl getFileFromUrl(urlString, contentFile.getParent, contentFile.getName) val contentPath = Paths.get(contentFile.toURI) new String(Files.readAllBytes(contentPath), StandardCharsets.UTF_8) } override def beforeAll(): Unit = { super.beforeAll() val tempPyFile = File.createTempFile("test", ".py") // scalastyle:off line.size.limit Files.write(tempPyFile.toPath, s""" |from pyspark.sql import SparkSession |import os | |spark = SparkSession.builder.enableHiveSupport().getOrCreate() |version_index = spark.conf.get("spark.sql.test.version.index", None) | |spark.sql("create table data_source_tbl_{} using json as select 1 i".format(version_index)) | |spark.sql("create table hive_compatible_data_source_tbl_{} using parquet as select 1 i".format(version_index)) | |json_file = "${genDataDir("json_")}" + str(version_index) |spark.range(1, 2).selectExpr("cast(id as int) as i").write.json(json_file) |spark.sql("create table external_data_source_tbl_{}(i int) using json options (path '{}')".format(version_index, json_file)) | |parquet_file = "${genDataDir("parquet_")}" + str(version_index) |spark.range(1, 2).selectExpr("cast(id as int) as i").write.parquet(parquet_file) |spark.sql("create table hive_compatible_external_data_source_tbl_{}(i int) using parquet options (path '{}')".format(version_index, parquet_file)) | |json_file2 = "${genDataDir("json2_")}" + str(version_index) |spark.range(1, 2).selectExpr("cast(id as int) as i").write.json(json_file2) |spark.sql("create table external_table_without_schema_{} using json options (path '{}')".format(version_index, json_file2)) | |parquet_file2 = "${genDataDir("parquet2_")}" + str(version_index) |spark.range(1, 3).selectExpr("1 as i", "cast(id as int) as p", "1 as j").write.parquet(os.path.join(parquet_file2, "p=1")) |spark.sql("create table tbl_with_col_overlap_{} using parquet options(path '{}')".format(version_index, parquet_file2)) | |spark.sql("create view v_{} as select 1 i".format(version_index)) """.stripMargin.getBytes("utf8")) // scalastyle:on line.size.limit PROCESS_TABLES.testingVersions.zipWithIndex.foreach { case (version, index) => val sparkHome = new File(sparkTestingDir, s"spark-$version") if (!sparkHome.exists()) { tryDownloadSpark(version, sparkTestingDir.getCanonicalPath) } val args = Seq( "--name", "prepare testing tables", "--master", "local[2]", "--conf", "spark.ui.enabled=false", "--conf", "spark.master.rest.enabled=false", "--conf", s"spark.sql.warehouse.dir=${wareHousePath.getCanonicalPath}", "--conf", s"spark.sql.test.version.index=$index", "--driver-java-options", s"-Dderby.system.home=${wareHousePath.getCanonicalPath}", tempPyFile.getCanonicalPath) runSparkSubmit(args, Some(sparkHome.getCanonicalPath), false) } tempPyFile.delete() } test("backward compatibility") { val args = Seq( "--class", PROCESS_TABLES.getClass.getName.stripSuffix("$"), "--name", "HiveExternalCatalog backward compatibility test", "--master", "local[2]", "--conf", "spark.ui.enabled=false", "--conf", "spark.master.rest.enabled=false", "--conf", s"spark.sql.warehouse.dir=${wareHousePath.getCanonicalPath}", "--driver-java-options", s"-Dderby.system.home=${wareHousePath.getCanonicalPath}", unusedJar.toString) runSparkSubmit(args) } } object PROCESS_TABLES extends QueryTest with SQLTestUtils { // Tests the latest version of every release line. val testingVersions = Seq("2.3.3", "2.4.0") protected var spark: SparkSession = _ def main(args: Array[String]): Unit = { val session = SparkSession.builder() .enableHiveSupport() .getOrCreate() spark = session import session.implicits._ testingVersions.indices.foreach { index => Seq( s"data_source_tbl_$index", s"hive_compatible_data_source_tbl_$index", s"external_data_source_tbl_$index", s"hive_compatible_external_data_source_tbl_$index", s"external_table_without_schema_$index").foreach { tbl => val tableMeta = spark.sharedState.externalCatalog.getTable("default", tbl) // make sure we can insert and query these tables. session.sql(s"insert into $tbl select 2") checkAnswer(session.sql(s"select * from $tbl"), Row(1) :: Row(2) :: Nil) checkAnswer(session.sql(s"select i from $tbl where i > 1"), Row(2)) // make sure we can rename table. val newName = tbl + "_renamed" sql(s"ALTER TABLE $tbl RENAME TO $newName") val readBack = spark.sharedState.externalCatalog.getTable("default", newName) val actualTableLocation = readBack.storage.locationUri.get.getPath val expectedLocation = if (tableMeta.tableType == CatalogTableType.EXTERNAL) { tableMeta.storage.locationUri.get.getPath } else { spark.sessionState.catalog.defaultTablePath(TableIdentifier(newName, None)).getPath } assert(actualTableLocation == expectedLocation) // make sure we can alter table location. withTempDir { dir => val path = dir.toURI.toString.stripSuffix("/") sql(s"ALTER TABLE ${tbl}_renamed SET LOCATION '$path'") val readBack = spark.sharedState.externalCatalog.getTable("default", tbl + "_renamed") val actualTableLocation = readBack.storage.locationUri.get.getPath val expected = dir.toURI.getPath.stripSuffix("/") assert(actualTableLocation == expected) } } // test permanent view checkAnswer(sql(s"select i from v_$index"), Row(1)) // SPARK-22356: overlapped columns between data and partition schema in data source tables val tbl_with_col_overlap = s"tbl_with_col_overlap_$index" assert(spark.table(tbl_with_col_overlap).columns === Array("i", "p", "j")) checkAnswer(spark.table(tbl_with_col_overlap), Row(1, 1, 1) :: Row(1, 1, 1) :: Nil) assert(sql("desc " + tbl_with_col_overlap).select("col_name") .as[String].collect().mkString(",").contains("i,p,j")) } } }
WindCanDie/spark
sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
Scala
apache-2.0
11,544
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.sources import org.apache.spark.rdd.RDD import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.UTF8String private[sql] abstract class DataSourceTest extends QueryTest { protected def sqlTest(sqlString: String, expectedAnswer: Seq[Row], enableRegex: Boolean = false) { test(sqlString) { withSQLConf(SQLConf.SUPPORT_QUOTED_REGEX_COLUMN_NAME.key -> enableRegex.toString) { checkAnswer(spark.sql(sqlString), expectedAnswer) } } } } class DDLScanSource extends RelationProvider { override def createRelation( sqlContext: SQLContext, parameters: Map[String, String]): BaseRelation = { SimpleDDLScan( parameters("from").toInt, parameters("TO").toInt, parameters("Table"))(sqlContext.sparkSession) } } case class SimpleDDLScan( from: Int, to: Int, table: String)(@transient val sparkSession: SparkSession) extends BaseRelation with TableScan { override def sqlContext: SQLContext = sparkSession.sqlContext override def schema: StructType = StructType(Seq( StructField("intType", IntegerType, nullable = false).withComment(s"test comment $table"), StructField("stringType", StringType, nullable = false), StructField("dateType", DateType, nullable = false), StructField("timestampType", TimestampType, nullable = false), StructField("doubleType", DoubleType, nullable = false), StructField("bigintType", LongType, nullable = false), StructField("tinyintType", ByteType, nullable = false), StructField("decimalType", DecimalType.USER_DEFAULT, nullable = false), StructField("fixedDecimalType", DecimalType(5, 1), nullable = false), StructField("binaryType", BinaryType, nullable = false), StructField("booleanType", BooleanType, nullable = false), StructField("smallIntType", ShortType, nullable = false), StructField("floatType", FloatType, nullable = false), StructField("mapType", MapType(StringType, StringType)), StructField("arrayType", ArrayType(StringType)), StructField("structType", StructType(StructField("f1", StringType) :: StructField("f2", IntegerType) :: Nil ) ) )) override def needConversion: Boolean = false override def buildScan(): RDD[Row] = { // Rely on a type erasure hack to pass RDD[InternalRow] back as RDD[Row] sparkSession.sparkContext.parallelize(from to to).map { e => InternalRow(UTF8String.fromString(s"people$e"), e * 2) }.asInstanceOf[RDD[Row]] } }
akopich/spark
sql/core/src/test/scala/org/apache/spark/sql/sources/DataSourceTest.scala
Scala
apache-2.0
3,495
package com.signalcollect import language.higherKinds import org.scalacheck.Arbitrary import org.scalacheck.Arbitrary._ import org.scalacheck.Gen import org.scalacheck.Gen._ import org.scalatest.FlatSpec import org.scalatest.ShouldMatchers import org.scalatest.prop.Checkers import org.scalatest.mock.EasyMockSugar import com.signalcollect.examples.PageRankVertex import com.signalcollect.examples.PageRankEdge import com.signalcollect.interfaces.SignalMessageWithSourceId import com.signalcollect.util.TestAnnouncements class VertexSpec extends FlatSpec with ShouldMatchers with Checkers with EasyMockSugar with TestAnnouncements { lazy val smallInt = Gen.chooseNum(0, 100) lazy val smallDouble = Gen.chooseNum(0.0, 10.0) lazy val signalMapEntry = for { k <- smallInt v <- smallDouble } yield (k, v) lazy val signalMap = containerOf[List, (Int, Double)](signalMapEntry).map(_.toMap) lazy val outEdgeIds = containerOf[Set, Int](smallInt) implicit def arbSignalMap[Map[Int, Double]] = Arbitrary(signalMap) implicit def arbEdgeIds[Set[Int]] = Arbitrary(outEdgeIds) "PageRankVertex" should "correctly collect and signal" in { check( (incomingSignals: Map[Int, Double], outgoingEdges: Set[Int]) => { val id = "test" val mockGraphEditor = mock[GraphEditor[Any, Any]] val v = new PageRankVertex(id) for (id <- outgoingEdges) { v.addEdge(new PageRankEdge(id), mockGraphEditor) } v.afterInitialization(mockGraphEditor) for ((sourceId, signal) <- incomingSignals) { v.deliverSignalWithSourceId(signal, sourceId, mockGraphEditor) } if (!incomingSignals.isEmpty) { assert(v.scoreCollect > 0, "vertex received messages, should want to collect") v.executeCollectOperation(mockGraphEditor) v.state should equal(0.15 + 0.85 * incomingSignals.values.sum +- 0.0001) if (!outgoingEdges.isEmpty) { assert(v.scoreSignal > 0, "vertex updated state, should want to signal") expecting { for (targetId <- outgoingEdges) { call(mockGraphEditor.sendToWorkerForVertexIdHash( SignalMessageWithSourceId(targetId, id, v.state / outgoingEdges.size), targetId.hashCode)) } } whenExecuting(mockGraphEditor) { v.executeSignalOperation(mockGraphEditor) } } } true }, minSuccessful(1000)) } }
mageru/signal-collect
src/test/scala/com/signalcollect/VertexSpec.scala
Scala
apache-2.0
2,505
package chana.reactor case object AskView
wandoulabs/chana
src/main/scala/chana/reactor/AskView.scala
Scala
apache-2.0
43
// Generated by <a href="http://scalaxb.org/">scalaxb</a>. package eveapi.xml.account.eve.CharacterAffiliation import scala.concurrent.Future /** usage: val obj = scalaxb.fromXML[eveapi.xml.account.eve.CharacterAffiliation.Foo](node) val document = scalaxb.toXML[eveapi.xml.account.eve.CharacterAffiliation.Foo](obj, "foo", eveapi.xml.account.eve.CharacterAffiliation.defaultScope) **/ object `package` extends XMLProtocol { } trait XMLProtocol extends scalaxb.XMLStandardTypes { implicit lazy val executionContext = scala.concurrent.ExecutionContext.Implicits.global val defaultScope = scalaxb.toScope(Some("xs") -> "http://www.w3.org/2001/XMLSchema", Some("xsi") -> "http://www.w3.org/2001/XMLSchema-instance") implicit lazy val CharacterAffiliationEveapiFormat: scalaxb.XMLFormat[eveapi.xml.account.eve.CharacterAffiliation.Eveapi] = new DefaultCharacterAffiliationEveapiFormat {} implicit lazy val CharacterAffiliationResultFormat: scalaxb.XMLFormat[eveapi.xml.account.eve.CharacterAffiliation.Result] = new DefaultCharacterAffiliationResultFormat {} implicit lazy val CharacterAffiliationRowsetFormat: scalaxb.XMLFormat[eveapi.xml.account.eve.CharacterAffiliation.Rowset] = new DefaultCharacterAffiliationRowsetFormat {} implicit lazy val CharacterAffiliationRowFormat: scalaxb.XMLFormat[eveapi.xml.account.eve.CharacterAffiliation.Row] = new DefaultCharacterAffiliationRowFormat {} trait DefaultCharacterAffiliationEveapiFormat extends scalaxb.ElemNameParser[eveapi.xml.account.eve.CharacterAffiliation.Eveapi] { val targetNamespace: Option[String] = None def parser(node: scala.xml.Node, stack: List[scalaxb.ElemName]): Parser[eveapi.xml.account.eve.CharacterAffiliation.Eveapi] = phrase((scalaxb.ElemName(None, "currentTime")) ~ (scalaxb.ElemName(None, "result")) ~ (scalaxb.ElemName(None, "cachedUntil")) ^^ { case p1 ~ p2 ~ p3 => eveapi.xml.account.eve.CharacterAffiliation.Eveapi(scalaxb.fromXML[String](p1, scalaxb.ElemName(node) :: stack), scalaxb.fromXML[eveapi.xml.account.eve.CharacterAffiliation.Result](p2, scalaxb.ElemName(node) :: stack), scalaxb.fromXML[String](p3, scalaxb.ElemName(node) :: stack), scala.collection.immutable.ListMap(List( (node \\ "@version").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[BigInt](x, scalaxb.ElemName(node) :: stack)) } map { "@version" -> _ } ).flatten[(String, scalaxb.DataRecord[Any])]: _*)) }) override def writesAttribute(__obj: eveapi.xml.account.eve.CharacterAffiliation.Eveapi, __scope: scala.xml.NamespaceBinding): scala.xml.MetaData = { var attr: scala.xml.MetaData = scala.xml.Null __obj.attributes.toList map { case ("@version", _) => attr = scala.xml.Attribute(null, "version", __obj.version.toString, attr) case (key, x) => attr = scala.xml.Attribute((x.namespace map { __scope.getPrefix(_) }).orNull, x.key.orNull, x.value.toString, attr) } attr } def writesChildNodes(__obj: eveapi.xml.account.eve.CharacterAffiliation.Eveapi, __scope: scala.xml.NamespaceBinding): Seq[scala.xml.Node] = Seq.concat(scalaxb.toXML[String](__obj.currentTime, None, Some("currentTime"), __scope, false), scalaxb.toXML[eveapi.xml.account.eve.CharacterAffiliation.Result](__obj.result, None, Some("result"), __scope, false), scalaxb.toXML[String](__obj.cachedUntil, None, Some("cachedUntil"), __scope, false)) } trait DefaultCharacterAffiliationResultFormat extends scalaxb.ElemNameParser[eveapi.xml.account.eve.CharacterAffiliation.Result] { val targetNamespace: Option[String] = None def parser(node: scala.xml.Node, stack: List[scalaxb.ElemName]): Parser[eveapi.xml.account.eve.CharacterAffiliation.Result] = phrase((scalaxb.ElemName(None, "rowset")) ^^ { case p1 => eveapi.xml.account.eve.CharacterAffiliation.Result(scalaxb.fromXML[eveapi.xml.account.eve.CharacterAffiliation.Rowset](p1, scalaxb.ElemName(node) :: stack)) }) def writesChildNodes(__obj: eveapi.xml.account.eve.CharacterAffiliation.Result, __scope: scala.xml.NamespaceBinding): Seq[scala.xml.Node] = (scalaxb.toXML[eveapi.xml.account.eve.CharacterAffiliation.Rowset](__obj.rowset, None, Some("rowset"), __scope, false)) } trait DefaultCharacterAffiliationRowsetFormat extends scalaxb.ElemNameParser[eveapi.xml.account.eve.CharacterAffiliation.Rowset] { val targetNamespace: Option[String] = None def parser(node: scala.xml.Node, stack: List[scalaxb.ElemName]): Parser[eveapi.xml.account.eve.CharacterAffiliation.Rowset] = phrase(safeRep(scalaxb.ElemName(None, "row")) ^^ { case p1 => eveapi.xml.account.eve.CharacterAffiliation.Rowset(p1.toSeq map { scalaxb.fromXML[eveapi.xml.account.eve.CharacterAffiliation.Row](_, scalaxb.ElemName(node) :: stack) }, scala.collection.immutable.ListMap(List( (node \\ "@columns").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[String](x, scalaxb.ElemName(node) :: stack)) } map { "@columns" -> _ }, (node \\ "@key").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[String](x, scalaxb.ElemName(node) :: stack)) } map { "@key" -> _ }, (node \\ "@name").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[String](x, scalaxb.ElemName(node) :: stack)) } map { "@name" -> _ } ).flatten[(String, scalaxb.DataRecord[Any])]: _*)) }) override def writesAttribute(__obj: eveapi.xml.account.eve.CharacterAffiliation.Rowset, __scope: scala.xml.NamespaceBinding): scala.xml.MetaData = { var attr: scala.xml.MetaData = scala.xml.Null __obj.attributes.toList map { case ("@columns", _) => attr = scala.xml.Attribute(null, "columns", __obj.columns.toString, attr) case ("@key", _) => attr = scala.xml.Attribute(null, "key", __obj.key.toString, attr) case ("@name", _) => attr = scala.xml.Attribute(null, "name", __obj.name.toString, attr) case (key, x) => attr = scala.xml.Attribute((x.namespace map { __scope.getPrefix(_) }).orNull, x.key.orNull, x.value.toString, attr) } attr } def writesChildNodes(__obj: eveapi.xml.account.eve.CharacterAffiliation.Rowset, __scope: scala.xml.NamespaceBinding): Seq[scala.xml.Node] = (__obj.row flatMap { scalaxb.toXML[eveapi.xml.account.eve.CharacterAffiliation.Row](_, None, Some("row"), __scope, false) }) } trait DefaultCharacterAffiliationRowFormat extends scalaxb.XMLFormat[eveapi.xml.account.eve.CharacterAffiliation.Row] with scalaxb.CanWriteChildNodes[eveapi.xml.account.eve.CharacterAffiliation.Row] { val targetNamespace: Option[String] = None import scalaxb.ElemName._ def reads(seq: scala.xml.NodeSeq, stack: List[scalaxb.ElemName]): Either[String, eveapi.xml.account.eve.CharacterAffiliation.Row] = seq match { case node: scala.xml.Node => Right(eveapi.xml.account.eve.CharacterAffiliation.Row(scala.collection.immutable.ListMap(List( (node \\ "@allianceID").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[BigInt](x, scalaxb.ElemName(node) :: stack)) } map { "@allianceID" -> _ }, (node \\ "@allianceName").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[String](x, scalaxb.ElemName(node) :: stack)) } map { "@allianceName" -> _ }, (node \\ "@characterID").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[BigInt](x, scalaxb.ElemName(node) :: stack)) } map { "@characterID" -> _ }, (node \\ "@characterName").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[String](x, scalaxb.ElemName(node) :: stack)) } map { "@characterName" -> _ }, (node \\ "@corporationID").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[BigInt](x, scalaxb.ElemName(node) :: stack)) } map { "@corporationID" -> _ }, (node \\ "@corporationName").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[String](x, scalaxb.ElemName(node) :: stack)) } map { "@corporationName" -> _ }, (node \\ "@factionID").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[BigInt](x, scalaxb.ElemName(node) :: stack)) } map { "@factionID" -> _ }, (node \\ "@factionName").headOption map { x => scalaxb.DataRecord(x, node, scalaxb.fromXML[String](x, scalaxb.ElemName(node) :: stack)) } map { "@factionName" -> _ } ).flatten[(String, scalaxb.DataRecord[Any])]: _*))) case _ => Left("reads failed: seq must be scala.xml.Node") } override def writesAttribute(__obj: eveapi.xml.account.eve.CharacterAffiliation.Row, __scope: scala.xml.NamespaceBinding): scala.xml.MetaData = { var attr: scala.xml.MetaData = scala.xml.Null __obj.attributes.toList map { case ("@allianceID", _) => attr = scala.xml.Attribute(null, "allianceID", __obj.allianceID.toString, attr) case ("@allianceName", _) => attr = scala.xml.Attribute(null, "allianceName", __obj.allianceName.toString, attr) case ("@characterID", _) => attr = scala.xml.Attribute(null, "characterID", __obj.characterID.toString, attr) case ("@characterName", _) => attr = scala.xml.Attribute(null, "characterName", __obj.characterName.toString, attr) case ("@corporationID", _) => attr = scala.xml.Attribute(null, "corporationID", __obj.corporationID.toString, attr) case ("@corporationName", _) => attr = scala.xml.Attribute(null, "corporationName", __obj.corporationName.toString, attr) case ("@factionID", _) => attr = scala.xml.Attribute(null, "factionID", __obj.factionID.toString, attr) case ("@factionName", _) => attr = scala.xml.Attribute(null, "factionName", __obj.factionName.toString, attr) case (key, x) => attr = scala.xml.Attribute((x.namespace map { __scope.getPrefix(_) }).orNull, x.key.orNull, x.value.toString, attr) } attr } def writesChildNodes(__obj: eveapi.xml.account.eve.CharacterAffiliation.Row, __scope: scala.xml.NamespaceBinding): Seq[scala.xml.Node] = Nil } }
scala-eveapi/eveapi
xml/src/main/scala/eveapi/xml/eve/CharacterAffiliation/xmlprotocol.scala
Scala
mit
10,089
// // Copyright 2010-2020 Paytronix Systems, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package com.paytronix.utils.validation import scalaz.{@@, NonEmptyList, Tag, Failure, Success, ValidationNel} import scalaz.Id.Id import scalaz.Kleisli.kleisli import scalaz.Tags.First import scalaz.std.option.optionFirst import scalaz.std.string.stringInstance import scalaz.syntax.foldable.ToFoldableOps /* .foldLeft, .intercalate */ import shapeless.{::, HList, HNil, Poly2} import shapeless.ops.hlist.RightFolder import com.paytronix.utils.scala.result.{Failed, FailedG, Okay, Result, ResultG} import NonEmptyList.nels /** * Basic primitives for composing validations of values that can transform the value as it's being validated (e.g. instead of just testing a * value is numeric, convert it to an int as well). * * The fundamental type being used is `Validated[A]`, which is a type alias for `Either[List[ValidationError], A]` where the Left side indicates * the value did not pass validation and contains one or more errors for the value, and the Right side indicates successful validation. * * Functions which take some value and produce a validated one are called `A => Validated[B]` with `A` being the input type (e.g. `String`) and * `B` being the output type (e.g. `Int`). Most validation functions have the same type, and do not perform any particular modification of the value. * For example, a function that validates the length of a string would be of type `String => Validated[String]`. * * Such functions can be composed together using the convenience operator `and`, for example: * {{{ * nonBlank() and int() and greaterThan(0) * }}} * * Where `nonEmpty` validates a string is not empty, `int` validates a string is a valid integer and converts it to an `Int`, and `greaterThan` tests * that an `Int` is not less than or equal to a particular value. * * Validations can be applied to values via the `is` operator (provided by the implicit `valueOps`): * {{{ * Some("foobar") is (nonBlank() and noLongerThan(3)) == Right("foobar") * }}} */ object base { /** The type of a validated value. `Left(errors)` indicates the value did not pass validation, and `Right(v)` indicates it did pass. */ type Validated[+A] = ValidationNel[ValidationError, A] /** Type of field paths, represented by a list of field names. Empty list indicates a scalar value */ type ErrorPath = List[String] /** * Type of validation errors, which contain some error code, potentially some arguments for formatting the error code, and the * formatted version. */ final case class ValidationError ( /** Some regular code string, e.g. "null_field" indicating what type of error */ code: String, /** A human readable default equivalent in english */ text: String, /** Path to the possibly-nested error. `Nil` for a scalar value. See the `field` function in [[com.paytronix.utils.validation.fields]] */ location: ErrorPath = Nil, /** A textual representation of the invalid input value, or `None` if the input value is too complex to display in text */ invalidInput: Option[String] = None ) { /** Make a new `ValidationError` with the given segment prepended to the location, indicating it is for a nested value */ def nest(segment: String): ValidationError = copy(location = segment :: location) /** Supply the textual representation of the invalid input value */ def withInvalidInput(s: String): ValidationError = copy(invalidInput = Some(s)) override def toString: String = ( (location match { case Nil => "" case _ => "At " + location.mkString("/") + ": " }) + code + ": " + text ) } object ValidationError { /** Create a validation error with no code value */ def apply(message: String): ValidationError = ValidationError(message, message) } /** Make a `Success` with a value */ def success[A](value: A): Validated[A] = Success(value) /** Make a `Failure` with a `NonEmptyList` of the given errors */ def failure(error: ValidationError, errors: ValidationError*): Validated[Nothing] = Failure(nels(error, errors: _*)) /** * Combine a series of `Validated` values from a `HList` into a single `Validated` value containing either the validated * values in a `HList`, or all the validation errors encountered. * * For example: * {{{ * validate ( * field("foo", "foo" is nonBlank()) :: * field("bar", 1 is positive()) :: * HNil * ) * == Success("foo" :: 1 :: HNil): Validated[String :: Int :: HNil] * * validate ( * field("foo", "foo" is nonBlank()) :: * field("bar", -1 is positive()) :: * HNil * ) * == Failure(NonEmptyList(ValidationError("At bar: invalid_negative_or_zero: positive value required")) * }}} */ def validate[L <: HList](in: L)(implicit folder: RightFolder[L, Validated[HNil], combineValidated.type]): folder.Out = in.foldRight(Success(HNil): Validated[HNil])(combineValidated) object combineValidated extends Poly2 { implicit def caseValidatedValidated[A, B <: HList] = at[Validated[A], Validated[B]] { (l, r) => identity[Validated[A :: B]] { (l, r) match { case (Failure(newErrors), Failure(existingErrors)) => Failure(newErrors append existingErrors) case (Failure(newErrors), _ ) => Failure(newErrors) case (_, Failure(existingErrors)) => Failure(existingErrors) case (Success(newValue), Success(existingValues)) => Success(newValue :: existingValues) } } } } /** Attach a field name to any errors in a `Validated` value */ def field[A](name: String, result: Validated[A]): Validated[A] = result.leftMap(_.map(_.nest(name))) /** Attach a field name to any errors in a `Validated` value */ def field[A, B](name: String, func: A => Validated[B]): A => Validated[B] = in => field(name, func(in)) /** Wrap a validation function `A => Validated[B]` such that any errors it yields will have a field name added */ def field[A, B >: A, C](name: String, container: String => A, func: B => Validated[C]): Validated[C] = field(name, func(container(name))) type ValidationErrorMap = Map[ErrorPath, NonEmptyList[ValidationError]] /** Convert a list of `ValidationError`s to a map by field name */ def validationErrorsToMap(in: NonEmptyList[ValidationError]): ValidationErrorMap = in.foldLeft[ValidationErrorMap](Map.empty) { (m, ve) => val errors = m.get(ve.location) match { case Some(errors) => ve <:: errors case None => nels(ve) } m + (ve.location -> errors) } /** * Convert a list of `ValidationError`s to error text, suitable for display to a console. * * Default formatting is like: * {{{ * field 1: error 1 * error 2 * field 2: error 1 * field three: error 1 * error 2 * }}} * * but can be customized: * * `groupSeparator` goes between each group. if bundled is true then there is a group per field. if `false`, then per error. defaults to "\n" * `fieldSeparator` goes between the field and the error text. defaults to ": " * `errorSeparator` goes between each error. defaults to "\n" * `locationSeparator` goes between each component of the field location. * `invalidInputPrefix` goes before the invalid input, if `withInvalidInputs` is `true`. * `invalidInputSuffix` goes after the invalid input, if `withInvalidInputs` is `true`. * `indented` controls whether an automatic indent before each error after the first for a given field is inserted, and only works when bundled is `true` (the default) * `bundled` indicates whether multiple errors for a single field will be bundled together under a single field heading. defaults to `true` * `withInvalidInputs` indicates whether the original input value (if available) will be added to the end of each error message. defaults to `true` * * Another example that emits a single line output: * {{{ * validationErrorsToString(..., groupSeparator=". ", fieldSeparator=": ", errorSeparator="; ", indented=false) + "." * == "field 1: error 1; error 2. field 2: error1. field three: error 1; error 2." * }}} */ def validationErrorsToString ( in: NonEmptyList[ValidationError], groupSeparator: String = "\n", fieldSeparator: String = ": ", errorSeparator: String = "\n", locationSeparator: String = "/", invalidInputPrefix: String = " (invalid input was: ", invalidInputSuffix: String = ")", indented: Boolean = true, bundled: Boolean = true, withInvalidInputs: Boolean = true ): String = { val m = validationErrorsToMap(in) ( for ((key, keyString) <- m.keys.toSeq.map(k => (k, k.mkString(locationSeparator))).sortBy(_._2)) yield { val errors = m(key).map { error => error.text + (error.invalidInput match { case Some(s) if withInvalidInputs => invalidInputPrefix + s + invalidInputSuffix case _ => "" }) } val prefix = if (keyString != "") keyString + fieldSeparator else "" if (bundled) { prefix + errors.intercalate(errorSeparator + (if (indented) (" " * prefix.length) else "")) } else { errors.map(prefix + _).intercalate(errorSeparator) } } ).mkString(groupSeparator) } /** Validation error for missing values */ val missingValueError = ValidationError("null_field", "value is required") /** Validation error for values that are present but shouldn't be */ val nonMissingValueError = ValidationError("non_null_field", "value should not be present") /** The ValidationError given when no more specific error is available */ val generalError = ValidationError("invalid_value", "value is invalid") /** * Apply some validation but if it succeeds ignore the result and use the input. * That is, just assert the condition, but do not use the conversion of some validation */ def onlyAssert[A](f: A => Validated[_]): A => Validated[A] = in => f(in).map(_ => in) /** Apply a validation only if a boolean is true */ def when[A](condition: Boolean)(f: A => Validated[A]): A => Validated[A] = in => if (condition) f(in) else Success(in) /** Apply some predicate function, failing validation if the predicate fails */ def predicateE[A](error: ValidationError)(p: A => Boolean): A => Validated[A] = a => if (p(a)) Success(a) else Failure(nels(error)) /** Accept the value with the first validation function `A => Validated[B]` which doesn't fail */ def any[A, B](fs: NonEmptyList[A => Validated[B]]) = anyE(generalError)(fs) /** Accept the value with the first validation function `A => Validated[B]` which doesn't fail */ def anyE[A, B](error: ValidationError)(fs: NonEmptyList[A => Validated[B]]): A => Validated[B] = { def unFirst[A](in: A @@ First): A = Tag.unsubst[A, Id, First](in) a => unFirst(fs.foldMap(f => First(f(a).toOption))) match { case Some(result) => Success(result) case None => Failure(nels(error)) } } /** Extend a validation kleisli `A => Validated[B]` with the `and` combinator, equivalent to reversed kleisli composition */ implicit class validationFunctionOps[A, B](f: A => Validated[B]) { /** Compose a validation function with another, from left to right */ def and[C](g: B => Validated[C]): A => Validated[C] = a => f(a) match { case Success(b) => g(b) case Failure(errors) => Failure(errors) } /** Map the output value of the validation function */ def map[C](g: B => C): A => Validated[C] = a => f(a).map(g) } /** Extend a `Validated[A]` with the `and` combinator */ implicit class validatedOps[A](lhs: Validated[A]) { /** Apply a `ValidationFunction` to a `Validated` value */ def and[B](rhs: A => Validated[B]): Validated[B] = lhs match { case Success(b) => rhs(b) case Failure(errors) => Failure(errors) } /** Convert the `Validated` to a `ResultG` containing the same information */ def toResultG: ResultG[NonEmptyList[ValidationError], A] = lhs match { case Success(a) => Okay(a) case Failure(errors) => FailedG("validation failed", errors) } /** Convert the `Validated` to a simple `Result` with all validation errors packed into the message */ def toResult: Result[A] = lhs match { case Success(a) => Okay(a) case Failure(errors) => Failed(validationErrorsToString(errors)) } /** Yield the `Success` value or throw an exception with the validation errors as the message */ def orThrow: A = toResult.orThrow } /** Implicitly extend any value with an `is` operator that can be used to apply a validation function to the value */ implicit class valueOps[A](lhs: A) { /** `value is (validationFunction)` applies the given validation(s) to the value */ def is[B](f: A => Validated[B]): Validated[B] = f(lhs) /** alias for `is` that works better for plurals */ def are[B](f: A => Validated[B]): Validated[B] = is(f) } }
paytronix/utils-open
validation/src/main/scala/com/paytronix/utils/validation/base.scala
Scala
apache-2.0
14,795
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.analysis import java.sql.Timestamp import org.apache.spark.sql.catalyst.analysis.TypeCoercion._ import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.plans.PlanTest import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.catalyst.rules.{Rule, RuleExecutor} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.CalendarInterval class TypeCoercionSuite extends AnalysisTest { // scalastyle:off line.size.limit // The following table shows all implicit data type conversions that are not visible to the user. // +----------------------+----------+-----------+-------------+----------+------------+-----------+------------+------------+-------------+------------+----------+---------------+------------+----------+-------------+----------+----------------------+---------------------+-------------+--------------+ // | Source Type\\CAST TO | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | BinaryType | BooleanType | StringType | DateType | TimestampType | ArrayType | MapType | StructType | NullType | CalendarIntervalType | DecimalType | NumericType | IntegralType | // +----------------------+----------+-----------+-------------+----------+------------+-----------+------------+------------+-------------+------------+----------+---------------+------------+----------+-------------+----------+----------------------+---------------------+-------------+--------------+ // | ByteType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(3, 0) | ByteType | ByteType | // | ShortType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(5, 0) | ShortType | ShortType | // | IntegerType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(10, 0) | IntegerType | IntegerType | // | LongType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(20, 0) | LongType | LongType | // | DoubleType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(30, 15) | DoubleType | IntegerType | // | FloatType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(14, 7) | FloatType | IntegerType | // | Dec(10, 2) | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(10, 2) | Dec(10, 2) | IntegerType | // | BinaryType | X | X | X | X | X | X | X | BinaryType | X | StringType | X | X | X | X | X | X | X | X | X | X | // | BooleanType | X | X | X | X | X | X | X | X | BooleanType | StringType | X | X | X | X | X | X | X | X | X | X | // | StringType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | BinaryType | X | StringType | DateType | TimestampType | X | X | X | X | X | DecimalType(38, 18) | DoubleType | X | // | DateType | X | X | X | X | X | X | X | X | X | StringType | DateType | TimestampType | X | X | X | X | X | X | X | X | // | TimestampType | X | X | X | X | X | X | X | X | X | StringType | DateType | TimestampType | X | X | X | X | X | X | X | X | // | ArrayType | X | X | X | X | X | X | X | X | X | X | X | X | ArrayType* | X | X | X | X | X | X | X | // | MapType | X | X | X | X | X | X | X | X | X | X | X | X | X | MapType* | X | X | X | X | X | X | // | StructType | X | X | X | X | X | X | X | X | X | X | X | X | X | X | StructType* | X | X | X | X | X | // | NullType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | BinaryType | BooleanType | StringType | DateType | TimestampType | ArrayType | MapType | StructType | NullType | CalendarIntervalType | DecimalType(38, 18) | DoubleType | IntegerType | // | CalendarIntervalType | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | CalendarIntervalType | X | X | X | // +----------------------+----------+-----------+-------------+----------+------------+-----------+------------+------------+-------------+------------+----------+---------------+------------+----------+-------------+----------+----------------------+---------------------+-------------+--------------+ // Note: StructType* is castable when all the internal child types are castable according to the table. // Note: ArrayType* is castable when the element type is castable according to the table. // Note: MapType* is castable when both the key type and the value type are castable according to the table. // scalastyle:on line.size.limit private def shouldCast(from: DataType, to: AbstractDataType, expected: DataType): Unit = { // Check default value val castDefault = TypeCoercion.ImplicitTypeCasts.implicitCast(default(from), to) assert(DataType.equalsIgnoreCompatibleNullability( castDefault.map(_.dataType).getOrElse(null), expected), s"Failed to cast $from to $to") // Check null value val castNull = TypeCoercion.ImplicitTypeCasts.implicitCast(createNull(from), to) assert(DataType.equalsIgnoreCaseAndNullability( castNull.map(_.dataType).getOrElse(null), expected), s"Failed to cast $from to $to") } private def shouldNotCast(from: DataType, to: AbstractDataType): Unit = { // Check default value val castDefault = TypeCoercion.ImplicitTypeCasts.implicitCast(default(from), to) assert(castDefault.isEmpty, s"Should not be able to cast $from to $to, but got $castDefault") // Check null value val castNull = TypeCoercion.ImplicitTypeCasts.implicitCast(createNull(from), to) assert(castNull.isEmpty, s"Should not be able to cast $from to $to, but got $castNull") } private def default(dataType: DataType): Expression = dataType match { case ArrayType(internalType: DataType, _) => CreateArray(Seq(Literal.default(internalType))) case MapType(keyDataType: DataType, valueDataType: DataType, _) => CreateMap(Seq(Literal.default(keyDataType), Literal.default(valueDataType))) case _ => Literal.default(dataType) } private def createNull(dataType: DataType): Expression = dataType match { case ArrayType(internalType: DataType, _) => CreateArray(Seq(Literal.create(null, internalType))) case MapType(keyDataType: DataType, valueDataType: DataType, _) => CreateMap(Seq(Literal.create(null, keyDataType), Literal.create(null, valueDataType))) case _ => Literal.create(null, dataType) } val integralTypes: Seq[DataType] = Seq(ByteType, ShortType, IntegerType, LongType) val fractionalTypes: Seq[DataType] = Seq(DoubleType, FloatType, DecimalType.SYSTEM_DEFAULT, DecimalType(10, 2)) val numericTypes: Seq[DataType] = integralTypes ++ fractionalTypes val atomicTypes: Seq[DataType] = numericTypes ++ Seq(BinaryType, BooleanType, StringType, DateType, TimestampType) val complexTypes: Seq[DataType] = Seq(ArrayType(IntegerType), ArrayType(StringType), MapType(StringType, StringType), new StructType().add("a1", StringType), new StructType().add("a1", StringType).add("a2", IntegerType)) val allTypes: Seq[DataType] = atomicTypes ++ complexTypes ++ Seq(NullType, CalendarIntervalType) // Check whether the type `checkedType` can be cast to all the types in `castableTypes`, // but cannot be cast to the other types in `allTypes`. private def checkTypeCasting(checkedType: DataType, castableTypes: Seq[DataType]): Unit = { val nonCastableTypes = allTypes.filterNot(castableTypes.contains) castableTypes.foreach { tpe => shouldCast(checkedType, tpe, tpe) } nonCastableTypes.foreach { tpe => shouldNotCast(checkedType, tpe) } } private def checkWidenType( widenFunc: (DataType, DataType) => Option[DataType], t1: DataType, t2: DataType, expected: Option[DataType], isSymmetric: Boolean = true): Unit = { var found = widenFunc(t1, t2) assert(found == expected, s"Expected $expected as wider common type for $t1 and $t2, found $found") // Test both directions to make sure the widening is symmetric. if (isSymmetric) { found = widenFunc(t2, t1) assert(found == expected, s"Expected $expected as wider common type for $t2 and $t1, found $found") } } test("implicit type cast - ByteType") { val checkedType = ByteType checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType)) shouldCast(checkedType, DecimalType, DecimalType.ByteDecimal) shouldCast(checkedType, NumericType, checkedType) shouldCast(checkedType, IntegralType, checkedType) } test("implicit type cast - ShortType") { val checkedType = ShortType checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType)) shouldCast(checkedType, DecimalType, DecimalType.ShortDecimal) shouldCast(checkedType, NumericType, checkedType) shouldCast(checkedType, IntegralType, checkedType) } test("implicit type cast - IntegerType") { val checkedType = IntegerType checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType)) shouldCast(IntegerType, DecimalType, DecimalType.IntDecimal) shouldCast(checkedType, NumericType, checkedType) shouldCast(checkedType, IntegralType, checkedType) } test("implicit type cast - LongType") { val checkedType = LongType checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType)) shouldCast(checkedType, DecimalType, DecimalType.LongDecimal) shouldCast(checkedType, NumericType, checkedType) shouldCast(checkedType, IntegralType, checkedType) } test("implicit type cast - FloatType") { val checkedType = FloatType checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType)) shouldCast(checkedType, DecimalType, DecimalType.FloatDecimal) shouldCast(checkedType, NumericType, checkedType) shouldNotCast(checkedType, IntegralType) } test("implicit type cast - DoubleType") { val checkedType = DoubleType checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType)) shouldCast(checkedType, DecimalType, DecimalType.DoubleDecimal) shouldCast(checkedType, NumericType, checkedType) shouldNotCast(checkedType, IntegralType) } test("implicit type cast - DecimalType(10, 2)") { val checkedType = DecimalType(10, 2) checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType)) shouldCast(checkedType, DecimalType, checkedType) shouldCast(checkedType, NumericType, checkedType) shouldNotCast(checkedType, IntegralType) } test("implicit type cast - BinaryType") { val checkedType = BinaryType checkTypeCasting(checkedType, castableTypes = Seq(checkedType, StringType)) shouldNotCast(checkedType, DecimalType) shouldNotCast(checkedType, NumericType) shouldNotCast(checkedType, IntegralType) } test("implicit type cast - BooleanType") { val checkedType = BooleanType checkTypeCasting(checkedType, castableTypes = Seq(checkedType, StringType)) shouldNotCast(checkedType, DecimalType) shouldNotCast(checkedType, NumericType) shouldNotCast(checkedType, IntegralType) } test("implicit type cast - StringType") { val checkedType = StringType val nonCastableTypes = complexTypes ++ Seq(BooleanType, NullType, CalendarIntervalType) checkTypeCasting(checkedType, castableTypes = allTypes.filterNot(nonCastableTypes.contains)) shouldCast(checkedType, DecimalType, DecimalType.SYSTEM_DEFAULT) shouldCast(checkedType, NumericType, NumericType.defaultConcreteType) shouldNotCast(checkedType, IntegralType) } test("implicit type cast - DateType") { val checkedType = DateType checkTypeCasting(checkedType, castableTypes = Seq(checkedType, StringType, TimestampType)) shouldNotCast(checkedType, DecimalType) shouldNotCast(checkedType, NumericType) shouldNotCast(checkedType, IntegralType) } test("implicit type cast - TimestampType") { val checkedType = TimestampType checkTypeCasting(checkedType, castableTypes = Seq(checkedType, StringType, DateType)) shouldNotCast(checkedType, DecimalType) shouldNotCast(checkedType, NumericType) shouldNotCast(checkedType, IntegralType) } test("implicit type cast - ArrayType(StringType)") { val checkedType = ArrayType(StringType) val nonCastableTypes = complexTypes ++ Seq(BooleanType, NullType, CalendarIntervalType) checkTypeCasting(checkedType, castableTypes = allTypes.filterNot(nonCastableTypes.contains).map(ArrayType(_))) nonCastableTypes.map(ArrayType(_)).foreach(shouldNotCast(checkedType, _)) shouldNotCast(ArrayType(DoubleType, containsNull = false), ArrayType(LongType, containsNull = false)) shouldNotCast(checkedType, DecimalType) shouldNotCast(checkedType, NumericType) shouldNotCast(checkedType, IntegralType) } test("implicit type cast - MapType(StringType, StringType)") { val checkedType = MapType(StringType, StringType) checkTypeCasting(checkedType, castableTypes = Seq(checkedType)) shouldNotCast(checkedType, DecimalType) shouldNotCast(checkedType, NumericType) shouldNotCast(checkedType, IntegralType) } test("implicit type cast - StructType().add(\\"a1\\", StringType)") { val checkedType = new StructType().add("a1", StringType) checkTypeCasting(checkedType, castableTypes = Seq(checkedType)) shouldNotCast(checkedType, DecimalType) shouldNotCast(checkedType, NumericType) shouldNotCast(checkedType, IntegralType) } test("implicit type cast - NullType") { val checkedType = NullType checkTypeCasting(checkedType, castableTypes = allTypes) shouldCast(checkedType, DecimalType, DecimalType.SYSTEM_DEFAULT) shouldCast(checkedType, NumericType, NumericType.defaultConcreteType) shouldCast(checkedType, IntegralType, IntegralType.defaultConcreteType) } test("implicit type cast - CalendarIntervalType") { val checkedType = CalendarIntervalType checkTypeCasting(checkedType, castableTypes = Seq(checkedType)) shouldNotCast(checkedType, DecimalType) shouldNotCast(checkedType, NumericType) shouldNotCast(checkedType, IntegralType) } test("eligible implicit type cast - TypeCollection") { shouldCast(NullType, TypeCollection(StringType, BinaryType), StringType) shouldCast(StringType, TypeCollection(StringType, BinaryType), StringType) shouldCast(BinaryType, TypeCollection(StringType, BinaryType), BinaryType) shouldCast(StringType, TypeCollection(BinaryType, StringType), StringType) shouldCast(IntegerType, TypeCollection(IntegerType, BinaryType), IntegerType) shouldCast(IntegerType, TypeCollection(BinaryType, IntegerType), IntegerType) shouldCast(BinaryType, TypeCollection(BinaryType, IntegerType), BinaryType) shouldCast(BinaryType, TypeCollection(IntegerType, BinaryType), BinaryType) shouldCast(IntegerType, TypeCollection(StringType, BinaryType), StringType) shouldCast(IntegerType, TypeCollection(BinaryType, StringType), StringType) shouldCast(DecimalType.SYSTEM_DEFAULT, TypeCollection(IntegerType, DecimalType), DecimalType.SYSTEM_DEFAULT) shouldCast(DecimalType(10, 2), TypeCollection(IntegerType, DecimalType), DecimalType(10, 2)) shouldCast(DecimalType(10, 2), TypeCollection(DecimalType, IntegerType), DecimalType(10, 2)) shouldCast(IntegerType, TypeCollection(DecimalType(10, 2), StringType), DecimalType(10, 2)) shouldCast(StringType, TypeCollection(NumericType, BinaryType), DoubleType) shouldCast( ArrayType(StringType, false), TypeCollection(ArrayType(StringType), StringType), ArrayType(StringType, false)) shouldCast( ArrayType(StringType, true), TypeCollection(ArrayType(StringType), StringType), ArrayType(StringType, true)) } test("ineligible implicit type cast - TypeCollection") { shouldNotCast(IntegerType, TypeCollection(DateType, TimestampType)) } test("tightest common bound for types") { def widenTest(t1: DataType, t2: DataType, expected: Option[DataType]): Unit = checkWidenType(TypeCoercion.findTightestCommonType, t1, t2, expected) // Null widenTest(NullType, NullType, Some(NullType)) // Boolean widenTest(NullType, BooleanType, Some(BooleanType)) widenTest(BooleanType, BooleanType, Some(BooleanType)) widenTest(IntegerType, BooleanType, None) widenTest(LongType, BooleanType, None) // Integral widenTest(NullType, ByteType, Some(ByteType)) widenTest(NullType, IntegerType, Some(IntegerType)) widenTest(NullType, LongType, Some(LongType)) widenTest(ShortType, IntegerType, Some(IntegerType)) widenTest(ShortType, LongType, Some(LongType)) widenTest(IntegerType, LongType, Some(LongType)) widenTest(LongType, LongType, Some(LongType)) // Floating point widenTest(NullType, FloatType, Some(FloatType)) widenTest(NullType, DoubleType, Some(DoubleType)) widenTest(FloatType, DoubleType, Some(DoubleType)) widenTest(FloatType, FloatType, Some(FloatType)) widenTest(DoubleType, DoubleType, Some(DoubleType)) // Integral mixed with floating point. widenTest(IntegerType, FloatType, Some(FloatType)) widenTest(IntegerType, DoubleType, Some(DoubleType)) widenTest(IntegerType, DoubleType, Some(DoubleType)) widenTest(LongType, FloatType, Some(FloatType)) widenTest(LongType, DoubleType, Some(DoubleType)) // No up-casting for fixed-precision decimal (this is handled by arithmetic rules) widenTest(DecimalType(2, 1), DecimalType(3, 2), None) widenTest(DecimalType(2, 1), DoubleType, None) widenTest(DecimalType(2, 1), IntegerType, None) widenTest(DoubleType, DecimalType(2, 1), None) // StringType widenTest(NullType, StringType, Some(StringType)) widenTest(StringType, StringType, Some(StringType)) widenTest(IntegerType, StringType, None) widenTest(LongType, StringType, None) // TimestampType widenTest(NullType, TimestampType, Some(TimestampType)) widenTest(TimestampType, TimestampType, Some(TimestampType)) widenTest(DateType, TimestampType, Some(TimestampType)) widenTest(IntegerType, TimestampType, None) widenTest(StringType, TimestampType, None) // ComplexType widenTest(NullType, MapType(IntegerType, StringType, false), Some(MapType(IntegerType, StringType, false))) widenTest(NullType, StructType(Seq()), Some(StructType(Seq()))) widenTest(StringType, MapType(IntegerType, StringType, true), None) widenTest(ArrayType(IntegerType), StructType(Seq()), None) widenTest( StructType(Seq(StructField("a", IntegerType))), StructType(Seq(StructField("b", IntegerType))), None) widenTest( StructType(Seq(StructField("a", IntegerType, nullable = false))), StructType(Seq(StructField("a", DoubleType, nullable = false))), Some(StructType(Seq(StructField("a", DoubleType, nullable = false))))) widenTest( StructType(Seq(StructField("a", IntegerType, nullable = false))), StructType(Seq(StructField("a", IntegerType, nullable = false))), Some(StructType(Seq(StructField("a", IntegerType, nullable = false))))) widenTest( StructType(Seq(StructField("a", IntegerType, nullable = false))), StructType(Seq(StructField("a", IntegerType, nullable = true))), Some(StructType(Seq(StructField("a", IntegerType, nullable = true))))) widenTest( StructType(Seq(StructField("a", IntegerType, nullable = true))), StructType(Seq(StructField("a", IntegerType, nullable = false))), Some(StructType(Seq(StructField("a", IntegerType, nullable = true))))) widenTest( StructType(Seq(StructField("a", IntegerType, nullable = true))), StructType(Seq(StructField("a", IntegerType, nullable = true))), Some(StructType(Seq(StructField("a", IntegerType, nullable = true))))) withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { widenTest( StructType(Seq(StructField("a", IntegerType))), StructType(Seq(StructField("A", IntegerType))), None) } withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { checkWidenType( TypeCoercion.findTightestCommonType, StructType(Seq(StructField("a", IntegerType), StructField("B", IntegerType))), StructType(Seq(StructField("A", IntegerType), StructField("b", IntegerType))), Some(StructType(Seq(StructField("a", IntegerType), StructField("B", IntegerType)))), isSymmetric = false) } widenTest( ArrayType(IntegerType, containsNull = true), ArrayType(IntegerType, containsNull = false), Some(ArrayType(IntegerType, containsNull = true))) widenTest( MapType(IntegerType, StringType, valueContainsNull = true), MapType(IntegerType, StringType, valueContainsNull = false), Some(MapType(IntegerType, StringType, valueContainsNull = true))) widenTest( new StructType() .add("arr", ArrayType(IntegerType, containsNull = true), nullable = false), new StructType() .add("arr", ArrayType(IntegerType, containsNull = false), nullable = true), Some(new StructType() .add("arr", ArrayType(IntegerType, containsNull = true), nullable = true))) } test("wider common type for decimal and array") { def widenTestWithStringPromotion( t1: DataType, t2: DataType, expected: Option[DataType], isSymmetric: Boolean = true): Unit = { checkWidenType(TypeCoercion.findWiderTypeForTwo, t1, t2, expected, isSymmetric) } def widenTestWithoutStringPromotion( t1: DataType, t2: DataType, expected: Option[DataType], isSymmetric: Boolean = true): Unit = { checkWidenType( TypeCoercion.findWiderTypeWithoutStringPromotionForTwo, t1, t2, expected, isSymmetric) } // Decimal widenTestWithStringPromotion( DecimalType(2, 1), DecimalType(3, 2), Some(DecimalType(3, 2))) widenTestWithStringPromotion( DecimalType(2, 1), DoubleType, Some(DoubleType)) widenTestWithStringPromotion( DecimalType(2, 1), IntegerType, Some(DecimalType(11, 1))) widenTestWithStringPromotion( DecimalType(2, 1), LongType, Some(DecimalType(21, 1))) // ArrayType widenTestWithStringPromotion( ArrayType(ShortType, containsNull = true), ArrayType(DoubleType, containsNull = false), Some(ArrayType(DoubleType, containsNull = true))) widenTestWithStringPromotion( ArrayType(TimestampType, containsNull = false), ArrayType(StringType, containsNull = true), Some(ArrayType(StringType, containsNull = true))) widenTestWithStringPromotion( ArrayType(ArrayType(IntegerType), containsNull = false), ArrayType(ArrayType(LongType), containsNull = false), Some(ArrayType(ArrayType(LongType), containsNull = false))) widenTestWithStringPromotion( ArrayType(MapType(IntegerType, FloatType), containsNull = false), ArrayType(MapType(LongType, DoubleType), containsNull = false), Some(ArrayType(MapType(LongType, DoubleType), containsNull = false))) widenTestWithStringPromotion( ArrayType(new StructType().add("num", ShortType), containsNull = false), ArrayType(new StructType().add("num", LongType), containsNull = false), Some(ArrayType(new StructType().add("num", LongType), containsNull = false))) widenTestWithStringPromotion( ArrayType(IntegerType, containsNull = false), ArrayType(DecimalType.IntDecimal, containsNull = false), Some(ArrayType(DecimalType.IntDecimal, containsNull = true))) // MapType widenTestWithStringPromotion( MapType(ShortType, TimestampType, valueContainsNull = true), MapType(DoubleType, StringType, valueContainsNull = false), Some(MapType(DoubleType, StringType, valueContainsNull = true))) widenTestWithStringPromotion( MapType(IntegerType, ArrayType(TimestampType), valueContainsNull = false), MapType(LongType, ArrayType(StringType), valueContainsNull = true), Some(MapType(LongType, ArrayType(StringType), valueContainsNull = true))) widenTestWithStringPromotion( MapType(IntegerType, MapType(ShortType, TimestampType), valueContainsNull = false), MapType(LongType, MapType(DoubleType, StringType), valueContainsNull = false), Some(MapType(LongType, MapType(DoubleType, StringType), valueContainsNull = false))) widenTestWithStringPromotion( MapType(IntegerType, new StructType().add("num", ShortType), valueContainsNull = false), MapType(LongType, new StructType().add("num", LongType), valueContainsNull = false), Some(MapType(LongType, new StructType().add("num", LongType), valueContainsNull = false))) widenTestWithStringPromotion( MapType(StringType, IntegerType, valueContainsNull = false), MapType(StringType, DecimalType.IntDecimal, valueContainsNull = false), Some(MapType(StringType, DecimalType.IntDecimal, valueContainsNull = true))) widenTestWithStringPromotion( MapType(IntegerType, StringType, valueContainsNull = false), MapType(DecimalType.IntDecimal, StringType, valueContainsNull = false), None) // StructType widenTestWithStringPromotion( new StructType() .add("num", ShortType, nullable = true).add("ts", StringType, nullable = false), new StructType() .add("num", DoubleType, nullable = false).add("ts", TimestampType, nullable = true), Some(new StructType() .add("num", DoubleType, nullable = true).add("ts", StringType, nullable = true))) widenTestWithStringPromotion( new StructType() .add("arr", ArrayType(ShortType, containsNull = false), nullable = false), new StructType() .add("arr", ArrayType(DoubleType, containsNull = true), nullable = false), Some(new StructType() .add("arr", ArrayType(DoubleType, containsNull = true), nullable = false))) widenTestWithStringPromotion( new StructType() .add("map", MapType(ShortType, TimestampType, valueContainsNull = true), nullable = false), new StructType() .add("map", MapType(DoubleType, StringType, valueContainsNull = false), nullable = false), Some(new StructType() .add("map", MapType(DoubleType, StringType, valueContainsNull = true), nullable = false))) widenTestWithStringPromotion( new StructType().add("num", IntegerType, nullable = false), new StructType().add("num", DecimalType.IntDecimal, nullable = false), Some(new StructType().add("num", DecimalType.IntDecimal, nullable = true))) widenTestWithStringPromotion( new StructType().add("num", IntegerType), new StructType().add("num", LongType).add("str", StringType), None) widenTestWithoutStringPromotion( new StructType().add("num", IntegerType), new StructType().add("num", LongType).add("str", StringType), None) withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { widenTestWithStringPromotion( new StructType().add("a", IntegerType), new StructType().add("A", LongType), None) widenTestWithoutStringPromotion( new StructType().add("a", IntegerType), new StructType().add("A", LongType), None) } withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { widenTestWithStringPromotion( new StructType().add("a", IntegerType), new StructType().add("A", LongType), Some(new StructType().add("a", LongType)), isSymmetric = false) widenTestWithoutStringPromotion( new StructType().add("a", IntegerType), new StructType().add("A", LongType), Some(new StructType().add("a", LongType)), isSymmetric = false) } // Without string promotion widenTestWithoutStringPromotion(IntegerType, StringType, None) widenTestWithoutStringPromotion(StringType, TimestampType, None) widenTestWithoutStringPromotion(ArrayType(LongType), ArrayType(StringType), None) widenTestWithoutStringPromotion(ArrayType(StringType), ArrayType(TimestampType), None) widenTestWithoutStringPromotion( MapType(LongType, IntegerType), MapType(StringType, IntegerType), None) widenTestWithoutStringPromotion( MapType(IntegerType, LongType), MapType(IntegerType, StringType), None) widenTestWithoutStringPromotion( MapType(StringType, IntegerType), MapType(TimestampType, IntegerType), None) widenTestWithoutStringPromotion( MapType(IntegerType, StringType), MapType(IntegerType, TimestampType), None) widenTestWithoutStringPromotion( new StructType().add("a", IntegerType), new StructType().add("a", StringType), None) widenTestWithoutStringPromotion( new StructType().add("a", StringType), new StructType().add("a", IntegerType), None) // String promotion widenTestWithStringPromotion(IntegerType, StringType, Some(StringType)) widenTestWithStringPromotion(StringType, TimestampType, Some(StringType)) widenTestWithStringPromotion( ArrayType(LongType), ArrayType(StringType), Some(ArrayType(StringType))) widenTestWithStringPromotion( ArrayType(StringType), ArrayType(TimestampType), Some(ArrayType(StringType))) widenTestWithStringPromotion( MapType(LongType, IntegerType), MapType(StringType, IntegerType), Some(MapType(StringType, IntegerType))) widenTestWithStringPromotion( MapType(IntegerType, LongType), MapType(IntegerType, StringType), Some(MapType(IntegerType, StringType))) widenTestWithStringPromotion( MapType(StringType, IntegerType), MapType(TimestampType, IntegerType), Some(MapType(StringType, IntegerType))) widenTestWithStringPromotion( MapType(IntegerType, StringType), MapType(IntegerType, TimestampType), Some(MapType(IntegerType, StringType))) widenTestWithStringPromotion( new StructType().add("a", IntegerType), new StructType().add("a", StringType), Some(new StructType().add("a", StringType))) widenTestWithStringPromotion( new StructType().add("a", StringType), new StructType().add("a", IntegerType), Some(new StructType().add("a", StringType))) } private def ruleTest(rule: Rule[LogicalPlan], initial: Expression, transformed: Expression) { ruleTest(Seq(rule), initial, transformed) } private def ruleTest( rules: Seq[Rule[LogicalPlan]], initial: Expression, transformed: Expression): Unit = { val testRelation = LocalRelation(AttributeReference("a", IntegerType)()) val analyzer = new RuleExecutor[LogicalPlan] { override val batches = Seq(Batch("Resolution", FixedPoint(3), rules: _*)) } comparePlans( analyzer.execute(Project(Seq(Alias(initial, "a")()), testRelation)), Project(Seq(Alias(transformed, "a")()), testRelation)) } test("cast NullType for expressions that implement ExpectsInputTypes") { import TypeCoercionSuite._ ruleTest(new TypeCoercion.ImplicitTypeCasts(conf), AnyTypeUnaryExpression(Literal.create(null, NullType)), AnyTypeUnaryExpression(Literal.create(null, NullType))) ruleTest(new TypeCoercion.ImplicitTypeCasts(conf), NumericTypeUnaryExpression(Literal.create(null, NullType)), NumericTypeUnaryExpression(Literal.create(null, DoubleType))) } test("cast NullType for binary operators") { import TypeCoercionSuite._ ruleTest(new TypeCoercion.ImplicitTypeCasts(conf), AnyTypeBinaryOperator(Literal.create(null, NullType), Literal.create(null, NullType)), AnyTypeBinaryOperator(Literal.create(null, NullType), Literal.create(null, NullType))) ruleTest(new TypeCoercion.ImplicitTypeCasts(conf), NumericTypeBinaryOperator(Literal.create(null, NullType), Literal.create(null, NullType)), NumericTypeBinaryOperator(Literal.create(null, DoubleType), Literal.create(null, DoubleType))) } test("coalesce casts") { val rule = TypeCoercion.FunctionArgumentConversion val intLit = Literal(1) val longLit = Literal.create(1L) val doubleLit = Literal(1.0) val stringLit = Literal.create("c", StringType) val nullLit = Literal.create(null, NullType) val floatNullLit = Literal.create(null, FloatType) val floatLit = Literal.create(1.0f, FloatType) val timestampLit = Literal.create("2017-04-12", TimestampType) val decimalLit = Literal(new java.math.BigDecimal("1000000000000000000000")) val tsArrayLit = Literal(Array(new Timestamp(System.currentTimeMillis()))) val strArrayLit = Literal(Array("c")) val intArrayLit = Literal(Array(1)) ruleTest(rule, Coalesce(Seq(doubleLit, intLit, floatLit)), Coalesce(Seq(doubleLit, Cast(intLit, DoubleType), Cast(floatLit, DoubleType)))) ruleTest(rule, Coalesce(Seq(longLit, intLit, decimalLit)), Coalesce(Seq(Cast(longLit, DecimalType(22, 0)), Cast(intLit, DecimalType(22, 0)), decimalLit))) ruleTest(rule, Coalesce(Seq(nullLit, intLit)), Coalesce(Seq(Cast(nullLit, IntegerType), intLit))) ruleTest(rule, Coalesce(Seq(timestampLit, stringLit)), Coalesce(Seq(Cast(timestampLit, StringType), stringLit))) ruleTest(rule, Coalesce(Seq(nullLit, floatNullLit, intLit)), Coalesce(Seq(Cast(nullLit, FloatType), floatNullLit, Cast(intLit, FloatType)))) ruleTest(rule, Coalesce(Seq(nullLit, intLit, decimalLit, doubleLit)), Coalesce(Seq(Cast(nullLit, DoubleType), Cast(intLit, DoubleType), Cast(decimalLit, DoubleType), doubleLit))) ruleTest(rule, Coalesce(Seq(nullLit, floatNullLit, doubleLit, stringLit)), Coalesce(Seq(Cast(nullLit, StringType), Cast(floatNullLit, StringType), Cast(doubleLit, StringType), stringLit))) ruleTest(rule, Coalesce(Seq(timestampLit, intLit, stringLit)), Coalesce(Seq(Cast(timestampLit, StringType), Cast(intLit, StringType), stringLit))) ruleTest(rule, Coalesce(Seq(tsArrayLit, intArrayLit, strArrayLit)), Coalesce(Seq(Cast(tsArrayLit, ArrayType(StringType)), Cast(intArrayLit, ArrayType(StringType)), strArrayLit))) } test("CreateArray casts") { ruleTest(TypeCoercion.FunctionArgumentConversion, CreateArray(Literal(1.0) :: Literal(1) :: Literal.create(1.0, FloatType) :: Nil), CreateArray(Literal(1.0) :: Cast(Literal(1), DoubleType) :: Cast(Literal.create(1.0, FloatType), DoubleType) :: Nil)) ruleTest(TypeCoercion.FunctionArgumentConversion, CreateArray(Literal(1.0) :: Literal(1) :: Literal("a") :: Nil), CreateArray(Cast(Literal(1.0), StringType) :: Cast(Literal(1), StringType) :: Literal("a") :: Nil)) ruleTest(TypeCoercion.FunctionArgumentConversion, CreateArray(Literal.create(null, DecimalType(5, 3)) :: Literal(1) :: Nil), CreateArray(Literal.create(null, DecimalType(5, 3)).cast(DecimalType(13, 3)) :: Literal(1).cast(DecimalType(13, 3)) :: Nil)) ruleTest(TypeCoercion.FunctionArgumentConversion, CreateArray(Literal.create(null, DecimalType(5, 3)) :: Literal.create(null, DecimalType(22, 10)) :: Literal.create(null, DecimalType(38, 38)) :: Nil), CreateArray(Literal.create(null, DecimalType(5, 3)).cast(DecimalType(38, 38)) :: Literal.create(null, DecimalType(22, 10)).cast(DecimalType(38, 38)) :: Literal.create(null, DecimalType(38, 38)) :: Nil)) } test("CreateMap casts") { // type coercion for map keys ruleTest(TypeCoercion.FunctionArgumentConversion, CreateMap(Literal(1) :: Literal("a") :: Literal.create(2.0, FloatType) :: Literal("b") :: Nil), CreateMap(Cast(Literal(1), FloatType) :: Literal("a") :: Literal.create(2.0, FloatType) :: Literal("b") :: Nil)) ruleTest(TypeCoercion.FunctionArgumentConversion, CreateMap(Literal.create(null, DecimalType(5, 3)) :: Literal("a") :: Literal.create(2.0, FloatType) :: Literal("b") :: Nil), CreateMap(Literal.create(null, DecimalType(5, 3)).cast(DoubleType) :: Literal("a") :: Literal.create(2.0, FloatType).cast(DoubleType) :: Literal("b") :: Nil)) // type coercion for map values ruleTest(TypeCoercion.FunctionArgumentConversion, CreateMap(Literal(1) :: Literal("a") :: Literal(2) :: Literal(3.0) :: Nil), CreateMap(Literal(1) :: Literal("a") :: Literal(2) :: Cast(Literal(3.0), StringType) :: Nil)) ruleTest(TypeCoercion.FunctionArgumentConversion, CreateMap(Literal(1) :: Literal.create(null, DecimalType(38, 0)) :: Literal(2) :: Literal.create(null, DecimalType(38, 38)) :: Nil), CreateMap(Literal(1) :: Literal.create(null, DecimalType(38, 0)).cast(DecimalType(38, 38)) :: Literal(2) :: Literal.create(null, DecimalType(38, 38)) :: Nil)) // type coercion for both map keys and values ruleTest(TypeCoercion.FunctionArgumentConversion, CreateMap(Literal(1) :: Literal("a") :: Literal(2.0) :: Literal(3.0) :: Nil), CreateMap(Cast(Literal(1), DoubleType) :: Literal("a") :: Literal(2.0) :: Cast(Literal(3.0), StringType) :: Nil)) } test("greatest/least cast") { for (operator <- Seq[(Seq[Expression] => Expression)](Greatest, Least)) { ruleTest(TypeCoercion.FunctionArgumentConversion, operator(Literal(1.0) :: Literal(1) :: Literal.create(1.0, FloatType) :: Nil), operator(Literal(1.0) :: Cast(Literal(1), DoubleType) :: Cast(Literal.create(1.0, FloatType), DoubleType) :: Nil)) ruleTest(TypeCoercion.FunctionArgumentConversion, operator(Literal(1L) :: Literal(1) :: Literal(new java.math.BigDecimal("1000000000000000000000")) :: Nil), operator(Cast(Literal(1L), DecimalType(22, 0)) :: Cast(Literal(1), DecimalType(22, 0)) :: Literal(new java.math.BigDecimal("1000000000000000000000")) :: Nil)) ruleTest(TypeCoercion.FunctionArgumentConversion, operator(Literal(1.0) :: Literal.create(null, DecimalType(10, 5)) :: Literal(1) :: Nil), operator(Literal(1.0) :: Literal.create(null, DecimalType(10, 5)).cast(DoubleType) :: Literal(1).cast(DoubleType) :: Nil)) ruleTest(TypeCoercion.FunctionArgumentConversion, operator(Literal.create(null, DecimalType(15, 0)) :: Literal.create(null, DecimalType(10, 5)) :: Literal(1) :: Nil), operator(Literal.create(null, DecimalType(15, 0)).cast(DecimalType(20, 5)) :: Literal.create(null, DecimalType(10, 5)).cast(DecimalType(20, 5)) :: Literal(1).cast(DecimalType(20, 5)) :: Nil)) ruleTest(TypeCoercion.FunctionArgumentConversion, operator(Literal.create(2L, LongType) :: Literal(1) :: Literal.create(null, DecimalType(10, 5)) :: Nil), operator(Literal.create(2L, LongType).cast(DecimalType(25, 5)) :: Literal(1).cast(DecimalType(25, 5)) :: Literal.create(null, DecimalType(10, 5)).cast(DecimalType(25, 5)) :: Nil)) } } test("nanvl casts") { ruleTest(TypeCoercion.FunctionArgumentConversion, NaNvl(Literal.create(1.0f, FloatType), Literal.create(1.0, DoubleType)), NaNvl(Cast(Literal.create(1.0f, FloatType), DoubleType), Literal.create(1.0, DoubleType))) ruleTest(TypeCoercion.FunctionArgumentConversion, NaNvl(Literal.create(1.0, DoubleType), Literal.create(1.0f, FloatType)), NaNvl(Literal.create(1.0, DoubleType), Cast(Literal.create(1.0f, FloatType), DoubleType))) ruleTest(TypeCoercion.FunctionArgumentConversion, NaNvl(Literal.create(1.0, DoubleType), Literal.create(1.0, DoubleType)), NaNvl(Literal.create(1.0, DoubleType), Literal.create(1.0, DoubleType))) ruleTest(TypeCoercion.FunctionArgumentConversion, NaNvl(Literal.create(1.0f, FloatType), Literal.create(null, NullType)), NaNvl(Literal.create(1.0f, FloatType), Cast(Literal.create(null, NullType), FloatType))) ruleTest(TypeCoercion.FunctionArgumentConversion, NaNvl(Literal.create(1.0, DoubleType), Literal.create(null, NullType)), NaNvl(Literal.create(1.0, DoubleType), Cast(Literal.create(null, NullType), DoubleType))) } test("type coercion for If") { val rule = TypeCoercion.IfCoercion val intLit = Literal(1) val doubleLit = Literal(1.0) val trueLit = Literal.create(true, BooleanType) val falseLit = Literal.create(false, BooleanType) val stringLit = Literal.create("c", StringType) val floatLit = Literal.create(1.0f, FloatType) val timestampLit = Literal.create("2017-04-12", TimestampType) val decimalLit = Literal(new java.math.BigDecimal("1000000000000000000000")) ruleTest(rule, If(Literal(true), Literal(1), Literal(1L)), If(Literal(true), Cast(Literal(1), LongType), Literal(1L))) ruleTest(rule, If(Literal.create(null, NullType), Literal(1), Literal(1)), If(Literal.create(null, BooleanType), Literal(1), Literal(1))) ruleTest(rule, If(AssertTrue(trueLit), Literal(1), Literal(2)), If(Cast(AssertTrue(trueLit), BooleanType), Literal(1), Literal(2))) ruleTest(rule, If(AssertTrue(falseLit), Literal(1), Literal(2)), If(Cast(AssertTrue(falseLit), BooleanType), Literal(1), Literal(2))) ruleTest(rule, If(trueLit, intLit, doubleLit), If(trueLit, Cast(intLit, DoubleType), doubleLit)) ruleTest(rule, If(trueLit, floatLit, doubleLit), If(trueLit, Cast(floatLit, DoubleType), doubleLit)) ruleTest(rule, If(trueLit, floatLit, decimalLit), If(trueLit, Cast(floatLit, DoubleType), Cast(decimalLit, DoubleType))) ruleTest(rule, If(falseLit, stringLit, doubleLit), If(falseLit, stringLit, Cast(doubleLit, StringType))) ruleTest(rule, If(trueLit, timestampLit, stringLit), If(trueLit, Cast(timestampLit, StringType), stringLit)) } test("type coercion for CaseKeyWhen") { ruleTest(new TypeCoercion.ImplicitTypeCasts(conf), CaseKeyWhen(Literal(1.toShort), Seq(Literal(1), Literal("a"))), CaseKeyWhen(Cast(Literal(1.toShort), IntegerType), Seq(Literal(1), Literal("a"))) ) ruleTest(TypeCoercion.CaseWhenCoercion, CaseKeyWhen(Literal(true), Seq(Literal(1), Literal("a"))), CaseKeyWhen(Literal(true), Seq(Literal(1), Literal("a"))) ) ruleTest(TypeCoercion.CaseWhenCoercion, CaseWhen(Seq((Literal(true), Literal(1.2))), Literal.create(1, DecimalType(7, 2))), CaseWhen(Seq((Literal(true), Literal(1.2))), Cast(Literal.create(1, DecimalType(7, 2)), DoubleType)) ) ruleTest(TypeCoercion.CaseWhenCoercion, CaseWhen(Seq((Literal(true), Literal(100L))), Literal.create(1, DecimalType(7, 2))), CaseWhen(Seq((Literal(true), Cast(Literal(100L), DecimalType(22, 2)))), Cast(Literal.create(1, DecimalType(7, 2)), DecimalType(22, 2))) ) } test("type coercion for Stack") { val rule = TypeCoercion.StackCoercion ruleTest(rule, Stack(Seq(Literal(3), Literal(1), Literal(2), Literal(null))), Stack(Seq(Literal(3), Literal(1), Literal(2), Literal.create(null, IntegerType)))) ruleTest(rule, Stack(Seq(Literal(3), Literal(1.0), Literal(null), Literal(3.0))), Stack(Seq(Literal(3), Literal(1.0), Literal.create(null, DoubleType), Literal(3.0)))) ruleTest(rule, Stack(Seq(Literal(3), Literal(null), Literal("2"), Literal("3"))), Stack(Seq(Literal(3), Literal.create(null, StringType), Literal("2"), Literal("3")))) ruleTest(rule, Stack(Seq(Literal(3), Literal(null), Literal(null), Literal(null))), Stack(Seq(Literal(3), Literal(null), Literal(null), Literal(null)))) ruleTest(rule, Stack(Seq(Literal(2), Literal(1), Literal("2"), Literal(null), Literal(null))), Stack(Seq(Literal(2), Literal(1), Literal("2"), Literal.create(null, IntegerType), Literal.create(null, StringType)))) ruleTest(rule, Stack(Seq(Literal(2), Literal(1), Literal(null), Literal(null), Literal("2"))), Stack(Seq(Literal(2), Literal(1), Literal.create(null, StringType), Literal.create(null, IntegerType), Literal("2")))) ruleTest(rule, Stack(Seq(Literal(2), Literal(null), Literal(1), Literal("2"), Literal(null))), Stack(Seq(Literal(2), Literal.create(null, StringType), Literal(1), Literal("2"), Literal.create(null, IntegerType)))) ruleTest(rule, Stack(Seq(Literal(2), Literal(null), Literal(null), Literal(1), Literal("2"))), Stack(Seq(Literal(2), Literal.create(null, IntegerType), Literal.create(null, StringType), Literal(1), Literal("2")))) ruleTest(rule, Stack(Seq(Subtract(Literal(3), Literal(1)), Literal(1), Literal("2"), Literal(null), Literal(null))), Stack(Seq(Subtract(Literal(3), Literal(1)), Literal(1), Literal("2"), Literal.create(null, IntegerType), Literal.create(null, StringType)))) } test("type coercion for Concat") { val rule = TypeCoercion.ConcatCoercion(conf) ruleTest(rule, Concat(Seq(Literal("ab"), Literal("cde"))), Concat(Seq(Literal("ab"), Literal("cde")))) ruleTest(rule, Concat(Seq(Literal(null), Literal("abc"))), Concat(Seq(Cast(Literal(null), StringType), Literal("abc")))) ruleTest(rule, Concat(Seq(Literal(1), Literal("234"))), Concat(Seq(Cast(Literal(1), StringType), Literal("234")))) ruleTest(rule, Concat(Seq(Literal("1"), Literal("234".getBytes()))), Concat(Seq(Literal("1"), Cast(Literal("234".getBytes()), StringType)))) ruleTest(rule, Concat(Seq(Literal(1L), Literal(2.toByte), Literal(0.1))), Concat(Seq(Cast(Literal(1L), StringType), Cast(Literal(2.toByte), StringType), Cast(Literal(0.1), StringType)))) ruleTest(rule, Concat(Seq(Literal(true), Literal(0.1f), Literal(3.toShort))), Concat(Seq(Cast(Literal(true), StringType), Cast(Literal(0.1f), StringType), Cast(Literal(3.toShort), StringType)))) ruleTest(rule, Concat(Seq(Literal(1L), Literal(0.1))), Concat(Seq(Cast(Literal(1L), StringType), Cast(Literal(0.1), StringType)))) ruleTest(rule, Concat(Seq(Literal(Decimal(10)))), Concat(Seq(Cast(Literal(Decimal(10)), StringType)))) ruleTest(rule, Concat(Seq(Literal(BigDecimal.valueOf(10)))), Concat(Seq(Cast(Literal(BigDecimal.valueOf(10)), StringType)))) ruleTest(rule, Concat(Seq(Literal(java.math.BigDecimal.valueOf(10)))), Concat(Seq(Cast(Literal(java.math.BigDecimal.valueOf(10)), StringType)))) ruleTest(rule, Concat(Seq(Literal(new java.sql.Date(0)), Literal(new Timestamp(0)))), Concat(Seq(Cast(Literal(new java.sql.Date(0)), StringType), Cast(Literal(new Timestamp(0)), StringType)))) withSQLConf("spark.sql.function.concatBinaryAsString" -> "true") { ruleTest(rule, Concat(Seq(Literal("123".getBytes), Literal("456".getBytes))), Concat(Seq(Cast(Literal("123".getBytes), StringType), Cast(Literal("456".getBytes), StringType)))) } withSQLConf("spark.sql.function.concatBinaryAsString" -> "false") { ruleTest(rule, Concat(Seq(Literal("123".getBytes), Literal("456".getBytes))), Concat(Seq(Literal("123".getBytes), Literal("456".getBytes)))) } } test("type coercion for Elt") { val rule = TypeCoercion.EltCoercion(conf) ruleTest(rule, Elt(Seq(Literal(1), Literal("ab"), Literal("cde"))), Elt(Seq(Literal(1), Literal("ab"), Literal("cde")))) ruleTest(rule, Elt(Seq(Literal(1.toShort), Literal("ab"), Literal("cde"))), Elt(Seq(Cast(Literal(1.toShort), IntegerType), Literal("ab"), Literal("cde")))) ruleTest(rule, Elt(Seq(Literal(2), Literal(null), Literal("abc"))), Elt(Seq(Literal(2), Cast(Literal(null), StringType), Literal("abc")))) ruleTest(rule, Elt(Seq(Literal(2), Literal(1), Literal("234"))), Elt(Seq(Literal(2), Cast(Literal(1), StringType), Literal("234")))) ruleTest(rule, Elt(Seq(Literal(3), Literal(1L), Literal(2.toByte), Literal(0.1))), Elt(Seq(Literal(3), Cast(Literal(1L), StringType), Cast(Literal(2.toByte), StringType), Cast(Literal(0.1), StringType)))) ruleTest(rule, Elt(Seq(Literal(2), Literal(true), Literal(0.1f), Literal(3.toShort))), Elt(Seq(Literal(2), Cast(Literal(true), StringType), Cast(Literal(0.1f), StringType), Cast(Literal(3.toShort), StringType)))) ruleTest(rule, Elt(Seq(Literal(1), Literal(1L), Literal(0.1))), Elt(Seq(Literal(1), Cast(Literal(1L), StringType), Cast(Literal(0.1), StringType)))) ruleTest(rule, Elt(Seq(Literal(1), Literal(Decimal(10)))), Elt(Seq(Literal(1), Cast(Literal(Decimal(10)), StringType)))) ruleTest(rule, Elt(Seq(Literal(1), Literal(BigDecimal.valueOf(10)))), Elt(Seq(Literal(1), Cast(Literal(BigDecimal.valueOf(10)), StringType)))) ruleTest(rule, Elt(Seq(Literal(1), Literal(java.math.BigDecimal.valueOf(10)))), Elt(Seq(Literal(1), Cast(Literal(java.math.BigDecimal.valueOf(10)), StringType)))) ruleTest(rule, Elt(Seq(Literal(2), Literal(new java.sql.Date(0)), Literal(new Timestamp(0)))), Elt(Seq(Literal(2), Cast(Literal(new java.sql.Date(0)), StringType), Cast(Literal(new Timestamp(0)), StringType)))) withSQLConf("spark.sql.function.eltOutputAsString" -> "true") { ruleTest(rule, Elt(Seq(Literal(1), Literal("123".getBytes), Literal("456".getBytes))), Elt(Seq(Literal(1), Cast(Literal("123".getBytes), StringType), Cast(Literal("456".getBytes), StringType)))) } withSQLConf("spark.sql.function.eltOutputAsString" -> "false") { ruleTest(rule, Elt(Seq(Literal(1), Literal("123".getBytes), Literal("456".getBytes))), Elt(Seq(Literal(1), Literal("123".getBytes), Literal("456".getBytes)))) } } test("BooleanEquality type cast") { val be = TypeCoercion.BooleanEquality // Use something more than a literal to avoid triggering the simplification rules. val one = Add(Literal(Decimal(1)), Literal(Decimal(0))) ruleTest(be, EqualTo(Literal(true), one), EqualTo(Cast(Literal(true), one.dataType), one) ) ruleTest(be, EqualTo(one, Literal(true)), EqualTo(one, Cast(Literal(true), one.dataType)) ) ruleTest(be, EqualNullSafe(Literal(true), one), EqualNullSafe(Cast(Literal(true), one.dataType), one) ) ruleTest(be, EqualNullSafe(one, Literal(true)), EqualNullSafe(one, Cast(Literal(true), one.dataType)) ) } test("BooleanEquality simplification") { val be = TypeCoercion.BooleanEquality ruleTest(be, EqualTo(Literal(true), Literal(1)), Literal(true) ) ruleTest(be, EqualTo(Literal(true), Literal(0)), Not(Literal(true)) ) ruleTest(be, EqualNullSafe(Literal(true), Literal(1)), And(IsNotNull(Literal(true)), Literal(true)) ) ruleTest(be, EqualNullSafe(Literal(true), Literal(0)), And(IsNotNull(Literal(true)), Not(Literal(true))) ) ruleTest(be, EqualTo(Literal(true), Literal(1L)), Literal(true) ) ruleTest(be, EqualTo(Literal(new java.math.BigDecimal(1)), Literal(true)), Literal(true) ) ruleTest(be, EqualTo(Literal(BigDecimal(0)), Literal(true)), Not(Literal(true)) ) ruleTest(be, EqualTo(Literal(Decimal(1)), Literal(true)), Literal(true) ) ruleTest(be, EqualTo(Literal.create(Decimal(1), DecimalType(8, 0)), Literal(true)), Literal(true) ) } private def checkOutput(logical: LogicalPlan, expectTypes: Seq[DataType]): Unit = { logical.output.zip(expectTypes).foreach { case (attr, dt) => assert(attr.dataType === dt) } } private val timeZoneResolver = ResolveTimeZone(new SQLConf) private def widenSetOperationTypes(plan: LogicalPlan): LogicalPlan = { timeZoneResolver(TypeCoercion.WidenSetOperationTypes(plan)) } test("WidenSetOperationTypes for except and intersect") { val firstTable = LocalRelation( AttributeReference("i", IntegerType)(), AttributeReference("u", DecimalType.SYSTEM_DEFAULT)(), AttributeReference("b", ByteType)(), AttributeReference("d", DoubleType)()) val secondTable = LocalRelation( AttributeReference("s", StringType)(), AttributeReference("d", DecimalType(2, 1))(), AttributeReference("f", FloatType)(), AttributeReference("l", LongType)()) val expectedTypes = Seq(StringType, DecimalType.SYSTEM_DEFAULT, FloatType, DoubleType) val r1 = widenSetOperationTypes( Except(firstTable, secondTable, isAll = false)).asInstanceOf[Except] val r2 = widenSetOperationTypes( Intersect(firstTable, secondTable, isAll = false)).asInstanceOf[Intersect] checkOutput(r1.left, expectedTypes) checkOutput(r1.right, expectedTypes) checkOutput(r2.left, expectedTypes) checkOutput(r2.right, expectedTypes) // Check if a Project is added assert(r1.left.isInstanceOf[Project]) assert(r1.right.isInstanceOf[Project]) assert(r2.left.isInstanceOf[Project]) assert(r2.right.isInstanceOf[Project]) } test("WidenSetOperationTypes for union") { val firstTable = LocalRelation( AttributeReference("i", IntegerType)(), AttributeReference("u", DecimalType.SYSTEM_DEFAULT)(), AttributeReference("b", ByteType)(), AttributeReference("d", DoubleType)()) val secondTable = LocalRelation( AttributeReference("s", StringType)(), AttributeReference("d", DecimalType(2, 1))(), AttributeReference("f", FloatType)(), AttributeReference("l", LongType)()) val thirdTable = LocalRelation( AttributeReference("m", StringType)(), AttributeReference("n", DecimalType.SYSTEM_DEFAULT)(), AttributeReference("p", FloatType)(), AttributeReference("q", DoubleType)()) val forthTable = LocalRelation( AttributeReference("m", StringType)(), AttributeReference("n", DecimalType.SYSTEM_DEFAULT)(), AttributeReference("p", ByteType)(), AttributeReference("q", DoubleType)()) val expectedTypes = Seq(StringType, DecimalType.SYSTEM_DEFAULT, FloatType, DoubleType) val unionRelation = widenSetOperationTypes( Union(firstTable :: secondTable :: thirdTable :: forthTable :: Nil)).asInstanceOf[Union] assert(unionRelation.children.length == 4) checkOutput(unionRelation.children.head, expectedTypes) checkOutput(unionRelation.children(1), expectedTypes) checkOutput(unionRelation.children(2), expectedTypes) checkOutput(unionRelation.children(3), expectedTypes) assert(unionRelation.children.head.isInstanceOf[Project]) assert(unionRelation.children(1).isInstanceOf[Project]) assert(unionRelation.children(2).isInstanceOf[Project]) assert(unionRelation.children(3).isInstanceOf[Project]) } test("Transform Decimal precision/scale for union except and intersect") { def checkOutput(logical: LogicalPlan, expectTypes: Seq[DataType]): Unit = { logical.output.zip(expectTypes).foreach { case (attr, dt) => assert(attr.dataType === dt) } } val left1 = LocalRelation( AttributeReference("l", DecimalType(10, 8))()) val right1 = LocalRelation( AttributeReference("r", DecimalType(5, 5))()) val expectedType1 = Seq(DecimalType(10, 8)) val r1 = widenSetOperationTypes(Union(left1, right1)).asInstanceOf[Union] val r2 = widenSetOperationTypes( Except(left1, right1, isAll = false)).asInstanceOf[Except] val r3 = widenSetOperationTypes( Intersect(left1, right1, isAll = false)).asInstanceOf[Intersect] checkOutput(r1.children.head, expectedType1) checkOutput(r1.children.last, expectedType1) checkOutput(r2.left, expectedType1) checkOutput(r2.right, expectedType1) checkOutput(r3.left, expectedType1) checkOutput(r3.right, expectedType1) val plan1 = LocalRelation(AttributeReference("l", DecimalType(10, 5))()) val rightTypes = Seq(ByteType, ShortType, IntegerType, LongType, FloatType, DoubleType) val expectedTypes = Seq(DecimalType(10, 5), DecimalType(10, 5), DecimalType(15, 5), DecimalType(25, 5), DoubleType, DoubleType) rightTypes.zip(expectedTypes).foreach { case (rType, expectedType) => val plan2 = LocalRelation( AttributeReference("r", rType)()) val r1 = widenSetOperationTypes(Union(plan1, plan2)).asInstanceOf[Union] val r2 = widenSetOperationTypes( Except(plan1, plan2, isAll = false)).asInstanceOf[Except] val r3 = widenSetOperationTypes( Intersect(plan1, plan2, isAll = false)).asInstanceOf[Intersect] checkOutput(r1.children.last, Seq(expectedType)) checkOutput(r2.right, Seq(expectedType)) checkOutput(r3.right, Seq(expectedType)) val r4 = widenSetOperationTypes(Union(plan2, plan1)).asInstanceOf[Union] val r5 = widenSetOperationTypes( Except(plan2, plan1, isAll = false)).asInstanceOf[Except] val r6 = widenSetOperationTypes( Intersect(plan2, plan1, isAll = false)).asInstanceOf[Intersect] checkOutput(r4.children.last, Seq(expectedType)) checkOutput(r5.left, Seq(expectedType)) checkOutput(r6.left, Seq(expectedType)) } } test("rule for date/timestamp operations") { val dateTimeOperations = TypeCoercion.DateTimeOperations val date = Literal(new java.sql.Date(0L)) val timestamp = Literal(new Timestamp(0L)) val interval = Literal(new CalendarInterval(0, 0)) val str = Literal("2015-01-01") ruleTest(dateTimeOperations, Add(date, interval), Cast(TimeAdd(date, interval), DateType)) ruleTest(dateTimeOperations, Add(interval, date), Cast(TimeAdd(date, interval), DateType)) ruleTest(dateTimeOperations, Add(timestamp, interval), Cast(TimeAdd(timestamp, interval), TimestampType)) ruleTest(dateTimeOperations, Add(interval, timestamp), Cast(TimeAdd(timestamp, interval), TimestampType)) ruleTest(dateTimeOperations, Add(str, interval), Cast(TimeAdd(str, interval), StringType)) ruleTest(dateTimeOperations, Add(interval, str), Cast(TimeAdd(str, interval), StringType)) ruleTest(dateTimeOperations, Subtract(date, interval), Cast(TimeSub(date, interval), DateType)) ruleTest(dateTimeOperations, Subtract(timestamp, interval), Cast(TimeSub(timestamp, interval), TimestampType)) ruleTest(dateTimeOperations, Subtract(str, interval), Cast(TimeSub(str, interval), StringType)) // interval operations should not be effected ruleTest(dateTimeOperations, Add(interval, interval), Add(interval, interval)) ruleTest(dateTimeOperations, Subtract(interval, interval), Subtract(interval, interval)) } /** * There are rules that need to not fire before child expressions get resolved. * We use this test to make sure those rules do not fire early. */ test("make sure rules do not fire early") { // InConversion val inConversion = TypeCoercion.InConversion(conf) ruleTest(inConversion, In(UnresolvedAttribute("a"), Seq(Literal(1))), In(UnresolvedAttribute("a"), Seq(Literal(1))) ) ruleTest(inConversion, In(Literal("test"), Seq(UnresolvedAttribute("a"), Literal(1))), In(Literal("test"), Seq(UnresolvedAttribute("a"), Literal(1))) ) ruleTest(inConversion, In(Literal("a"), Seq(Literal(1), Literal("b"))), In(Cast(Literal("a"), StringType), Seq(Cast(Literal(1), StringType), Cast(Literal("b"), StringType))) ) } test("SPARK-15776 Divide expression's dataType should be casted to Double or Decimal " + "in aggregation function like sum") { val rules = Seq(FunctionArgumentConversion, Division) // Casts Integer to Double ruleTest(rules, sum(Divide(4, 3)), sum(Divide(Cast(4, DoubleType), Cast(3, DoubleType)))) // Left expression is Double, right expression is Int. Another rule ImplicitTypeCasts will // cast the right expression to Double. ruleTest(rules, sum(Divide(4.0, 3)), sum(Divide(4.0, 3))) // Left expression is Int, right expression is Double ruleTest(rules, sum(Divide(4, 3.0)), sum(Divide(Cast(4, DoubleType), Cast(3.0, DoubleType)))) // Casts Float to Double ruleTest( rules, sum(Divide(4.0f, 3)), sum(Divide(Cast(4.0f, DoubleType), Cast(3, DoubleType)))) // Left expression is Decimal, right expression is Int. Another rule DecimalPrecision will cast // the right expression to Decimal. ruleTest(rules, sum(Divide(Decimal(4.0), 3)), sum(Divide(Decimal(4.0), 3))) } test("SPARK-17117 null type coercion in divide") { val rules = Seq(FunctionArgumentConversion, Division, new ImplicitTypeCasts(conf)) val nullLit = Literal.create(null, NullType) ruleTest(rules, Divide(1L, nullLit), Divide(Cast(1L, DoubleType), Cast(nullLit, DoubleType))) ruleTest(rules, Divide(nullLit, 1L), Divide(Cast(nullLit, DoubleType), Cast(1L, DoubleType))) } test("binary comparison with string promotion") { val rule = TypeCoercion.PromoteStrings(conf) ruleTest(rule, GreaterThan(Literal("123"), Literal(1)), GreaterThan(Cast(Literal("123"), IntegerType), Literal(1))) ruleTest(rule, LessThan(Literal(true), Literal("123")), LessThan(Literal(true), Cast(Literal("123"), BooleanType))) ruleTest(rule, EqualTo(Literal(Array(1, 2)), Literal("123")), EqualTo(Literal(Array(1, 2)), Literal("123"))) ruleTest(rule, GreaterThan(Literal("1.5"), Literal(BigDecimal("0.5"))), GreaterThan(Cast(Literal("1.5"), DoubleType), Cast(Literal(BigDecimal("0.5")), DoubleType))) Seq(true, false).foreach { convertToTS => withSQLConf( "spark.sql.typeCoercion.compareDateTimestampInTimestamp" -> convertToTS.toString) { val date0301 = Literal(java.sql.Date.valueOf("2017-03-01")) val timestamp0301000000 = Literal(Timestamp.valueOf("2017-03-01 00:00:00")) val timestamp0301000001 = Literal(Timestamp.valueOf("2017-03-01 00:00:01")) if (convertToTS) { // `Date` should be treated as timestamp at 00:00:00 See SPARK-23549 ruleTest(rule, EqualTo(date0301, timestamp0301000000), EqualTo(Cast(date0301, TimestampType), timestamp0301000000)) ruleTest(rule, LessThan(date0301, timestamp0301000001), LessThan(Cast(date0301, TimestampType), timestamp0301000001)) } else { ruleTest(rule, LessThan(date0301, timestamp0301000000), LessThan(Cast(date0301, StringType), Cast(timestamp0301000000, StringType))) ruleTest(rule, LessThan(date0301, timestamp0301000001), LessThan(Cast(date0301, StringType), Cast(timestamp0301000001, StringType))) } } } } test("cast WindowFrame boundaries to the type they operate upon") { // Can cast frame boundaries to order dataType. ruleTest(WindowFrameCoercion, windowSpec( Seq(UnresolvedAttribute("a")), Seq(SortOrder(Literal(1L), Ascending)), SpecifiedWindowFrame(RangeFrame, Literal(3), Literal(2147483648L))), windowSpec( Seq(UnresolvedAttribute("a")), Seq(SortOrder(Literal(1L), Ascending)), SpecifiedWindowFrame(RangeFrame, Cast(3, LongType), Literal(2147483648L))) ) // Cannot cast frame boundaries to order dataType. ruleTest(WindowFrameCoercion, windowSpec( Seq(UnresolvedAttribute("a")), Seq(SortOrder(Literal.default(DateType), Ascending)), SpecifiedWindowFrame(RangeFrame, Literal(10.0), Literal(2147483648L))), windowSpec( Seq(UnresolvedAttribute("a")), Seq(SortOrder(Literal.default(DateType), Ascending)), SpecifiedWindowFrame(RangeFrame, Literal(10.0), Literal(2147483648L))) ) // Should not cast SpecialFrameBoundary. ruleTest(WindowFrameCoercion, windowSpec( Seq(UnresolvedAttribute("a")), Seq(SortOrder(Literal(1L), Ascending)), SpecifiedWindowFrame(RangeFrame, CurrentRow, UnboundedFollowing)), windowSpec( Seq(UnresolvedAttribute("a")), Seq(SortOrder(Literal(1L), Ascending)), SpecifiedWindowFrame(RangeFrame, CurrentRow, UnboundedFollowing)) ) } } object TypeCoercionSuite { case class AnyTypeUnaryExpression(child: Expression) extends UnaryExpression with ExpectsInputTypes with Unevaluable { override def inputTypes: Seq[AbstractDataType] = Seq(AnyDataType) override def dataType: DataType = NullType } case class NumericTypeUnaryExpression(child: Expression) extends UnaryExpression with ExpectsInputTypes with Unevaluable { override def inputTypes: Seq[AbstractDataType] = Seq(NumericType) override def dataType: DataType = NullType } case class AnyTypeBinaryOperator(left: Expression, right: Expression) extends BinaryOperator with Unevaluable { override def dataType: DataType = NullType override def inputType: AbstractDataType = AnyDataType override def symbol: String = "anytype" } case class NumericTypeBinaryOperator(left: Expression, right: Expression) extends BinaryOperator with Unevaluable { override def dataType: DataType = NullType override def inputType: AbstractDataType = NumericType override def symbol: String = "numerictype" } }
rikima/spark
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
Scala
apache-2.0
70,039
package IFDS class BiDiTestHelper { val a = Node("a") val a1 = Node("a1") val a2 = Node("a2") val b = Node("b") val c = Node("c") val cs = Node("cs") val d = Node("d") val e = Node("e") val f = Node("f") val g = Node("g") val h = Node("h") val i = Node("i") val x = Node("x") val y = Node("y") val y1 = Node("y1") val y2 = Node("y2") val z = Node("z") val foo = Method("foo") val bar = Method("bar") def flow(source: String, targets: String*) = FlowFunc(1, source, targets: _*) def flow(times: Int, source: String, targets: String*) = FlowFunc(times, source, targets: _*) def happyPathBW: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from BiDiIFDSSolverTest.happyPath from Heros. val norCB = NormalEdge(c, b, Set()) val norBA = NormalEdge(b, a, Set(flow("0", "2"))) val retANull = ReturnEdge(None, a, None, foo, Set(flow("2"))) //////////// val normalEdges: Set[NormalEdge] = Set(norCB, norBA) val callEdges: Set[CallEdge] = Set() val callToReturnEdges: Set[CallToReturnEdge] = Set() val returnEdges: Set[ReturnEdge] = Set(retANull) val methodToStartPoints: Map[Method, Set[Node]] = Map( foo -> Set(c) ) val stmtToMethod: Map[Node, Method] = Map( a -> foo, b -> foo, c -> foo ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(b), true ) } def happyPathFW: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from BiDiIFDSSolverTest.happyPath from Heros. val norAB = NormalEdge(a, b, Set()) val norBC = NormalEdge(b, c, Set(flow("0", "1"))) val retCNull = ReturnEdge(None, c, None, foo, Set(flow("1"))) //////////// val normalEdges: Set[NormalEdge] = Set(norAB, norBC) val callEdges: Set[CallEdge] = Set() val callToReturnEdges: Set[CallToReturnEdge] = Set() val returnEdges: Set[ReturnEdge] = Set(retCNull) val methodToStartPoints: Map[Method, Set[Node]] = Map( foo -> Set(a) ) val stmtToMethod: Map[Node, Method] = Map( a -> foo, b -> foo, c -> foo ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(b), true ) } def unbalancedReturnsBothDirectionsFW: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from BiDiIFDSSolverTest from Heros. val norAB = NormalEdge(a, b, Set()) val norBC = NormalEdge(b, c, Set(flow("0", "1"))) val retCZ = ReturnEdge(Some(y), c, Some(z), foo, Set(flow("1", "2"))) val retZNull = ReturnEdge(None, z, None, bar, Set(flow("2"))) //////////// val normalEdges: Set[NormalEdge] = Set(norAB, norBC) val callEdges: Set[CallEdge] = Set() val callToReturnEdges: Set[CallToReturnEdge] = Set() val returnEdges: Set[ReturnEdge] = Set(retCZ, retZNull) val methodToStartPoints: Map[Method, Set[Node]] = Map( foo -> Set(a) ) val stmtToMethod: Map[Node, Method] = Map( a -> foo, b -> foo, c -> foo, z -> bar ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(b), true ) } def unbalancedReturnsBothDirectionsBW: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from BiDiIFDSSolverTest from Heros. val norCB = NormalEdge(c, b, Set()) val norBA = NormalEdge(b, a, Set(flow("0", "2"))) val retAX = ReturnEdge(Some(y), a, Some(x), foo, Set(flow("2", "3"))) val retXNull = ReturnEdge(None, x, None, bar, Set(flow("3"))) //////////// val normalEdges: Set[NormalEdge] = Set(norCB, norBA) val callEdges: Set[CallEdge] = Set() val callToReturnEdges: Set[CallToReturnEdge] = Set() val returnEdges: Set[ReturnEdge] = Set(retAX, retXNull) val methodToStartPoints: Map[Method, Set[Node]] = Map( foo -> Set(c) ) val stmtToMethod: Map[Node, Method] = Map( a -> foo, b -> foo, c -> foo, x -> bar ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(b), true ) } def unbalancedReturnsNonMatchingCallSitesFW: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from BiDiIFDSSolverTest from Heros. val norAB = NormalEdge(a, b, Set()) val norBC = NormalEdge(b, c, Set(flow("0", "1"))) val retCZ = ReturnEdge(Some(y1), c, Some(z), foo, Set(flow("1", "2"))) val retZNull = ReturnEdge(None, z, None, bar, Set()) //////////// val normalEdges: Set[NormalEdge] = Set(norAB, norBC) val callEdges: Set[CallEdge] = Set() val callToReturnEdges: Set[CallToReturnEdge] = Set() val returnEdges: Set[ReturnEdge] = Set(retCZ, retZNull) val methodToStartPoints: Map[Method, Set[Node]] = Map( foo -> Set(a) ) val stmtToMethod: Map[Node, Method] = Map( a -> foo, b -> foo, c -> foo, z -> bar ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(b), true ) } def unbalancedReturnsNonMatchingCallSitesBW: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from BiDiIFDSSolverTest from Heros. val norCB = NormalEdge(c, b, Set()) val norBA = NormalEdge(b, a, Set(flow("0", "2"))) val retAX = ReturnEdge(Some(y2), a, Some(x), foo, Set(flow("2", "3"))) val retXNull = ReturnEdge(None, x, None, bar, Set()) //////////// val normalEdges: Set[NormalEdge] = Set(norCB, norBA) val callEdges: Set[CallEdge] = Set() val callToReturnEdges: Set[CallToReturnEdge] = Set() val returnEdges: Set[ReturnEdge] = Set(retAX, retXNull) val methodToStartPoints: Map[Method, Set[Node]] = Map( foo -> Set(c) ) val stmtToMethod: Map[Node, Method] = Map( a -> foo, b -> foo, c -> foo, x -> bar ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(b), true ) } def returnsOnlyOneDirectionAndStopsFW: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from BiDiIFDSSolverTest from Heros. val norAB = NormalEdge(a, b, Set()) val norBC = NormalEdge(b, c, Set(flow("0", "1"))) val retCZ = ReturnEdge(Some(y), c, Some(z), foo, Set(flow("1", "2"))) val retZNull = ReturnEdge(None, z, None, bar, Set()) //////////// val normalEdges: Set[NormalEdge] = Set(norAB, norBC) val callEdges: Set[CallEdge] = Set() val callToReturnEdges: Set[CallToReturnEdge] = Set() val returnEdges: Set[ReturnEdge] = Set(retCZ, retZNull) val methodToStartPoints: Map[Method, Set[Node]] = Map( foo -> Set(a) ) val stmtToMethod: Map[Node, Method] = Map( a -> foo, b -> foo, c -> foo, z -> bar ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(b), true ) } def returnsOnlyOneDirectionAndStopsBW: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from BiDiIFDSSolverTest from Heros. val norCB = NormalEdge(c, b, Set()) val norBA = NormalEdge(b, a, Set(flow("0"))) val retAX = ReturnEdge(Some(y), a, Some(x), foo, Set()) val retXNull = ReturnEdge(None, x, None, bar, Set()) //////////// val normalEdges: Set[NormalEdge] = Set(norCB, norBA) val callEdges: Set[CallEdge] = Set() val callToReturnEdges: Set[CallToReturnEdge] = Set() val returnEdges: Set[ReturnEdge] = Set(retAX, retXNull) val methodToStartPoints: Map[Method, Set[Node]] = Map( foo -> Set(c) ) val stmtToMethod: Map[Node, Method] = Map( a -> foo, b -> foo, c -> foo, x -> bar ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(b), true ) } def reuseSummaryFW: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from BiDiIFDSSolverTest from Heros. val norAB = NormalEdge(a, b, Set(flow("0", "1"))) val calBBar = CallEdge(b, bar, Set(flow("1", "2"))) val ctrBC = CallToReturnEdge(b, c, Set(flow("1"))) val calCBar = CallEdge(c, bar, Set(flow("1", "2"))) val ctrCD = CallToReturnEdge(c, d, Set(flow("1"))) val retD = ReturnEdge(None, d, None, foo, Set(flow("1"))) val norXY = NormalEdge(x, y, Set(flow("2", "2"))) val retYC = ReturnEdge(Some(b), y, Some(c), bar, Set(flow("2", "1"))) val retYD = ReturnEdge(Some(c), y, Some(d), bar, Set(flow("2", "1"))) //////////// val normalEdges: Set[NormalEdge] = Set(norAB, norXY) val callEdges: Set[CallEdge] = Set(calBBar, calCBar) val callToReturnEdges: Set[CallToReturnEdge] = Set(ctrBC, ctrCD) val returnEdges: Set[ReturnEdge] = Set(retD, retYC, retYD) val methodToStartPoints: Map[Method, Set[Node]] = Map( bar -> Set(x) ) val stmtToMethod: Map[Node, Method] = Map( a -> foo, b -> foo, c -> foo, d -> foo, x -> bar, y -> bar ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(a), true ) } def reuseSummaryBW: SimpleIFDSProblem = { // This demonstrates the flow // This test is copied from BiDiIFDSSolverTest from Heros. val retANull = ReturnEdge(None, a, None, foo, Set(flow("0"))) //////////// val normalEdges: Set[NormalEdge] = Set() val callEdges: Set[CallEdge] = Set() val callToReturnEdges: Set[CallToReturnEdge] = Set() val returnEdges: Set[ReturnEdge] = Set(retANull) val methodToStartPoints: Map[Method, Set[Node]] = Map( ) val stmtToMethod: Map[Node, Method] = Map( a -> foo ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(a), true ) } def dontResumeIfReturnFlowIsKilledFW: SimpleIFDSProblem = { // forwardHelper.method("foo", // startPoints(), // normalStmt("a", flow("0", "1")).succ("b"), // exitStmt("b").returns(over("cs"), to("y"), kill("1"))); // // forwardHelper.method("bar", // startPoints(), // normalStmt("y").succ("z" /* none */)); // This demonstrates the flow // This test is copied from BiDiIFDSSolverTest from Heros. val norAB = NormalEdge(a, b, Set(flow("0", "1"))) val retBY = ReturnEdge(Some(cs), b, Some(y), foo, Set(flow("1"))) val norYZ = NormalEdge(y, z, Set()) //////////// val normalEdges: Set[NormalEdge] = Set(norAB, norYZ) val callEdges: Set[CallEdge] = Set() val callToReturnEdges: Set[CallToReturnEdge] = Set() val returnEdges: Set[ReturnEdge] = Set(retBY) val methodToStartPoints: Map[Method, Set[Node]] = Map( ) val stmtToMethod: Map[Node, Method] = Map( a -> foo, b -> foo, y -> bar, z -> bar ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(a), true ) } def dontResumeIfReturnFlowIsKilledBW: SimpleIFDSProblem = { // backwardHelper.method("foo", // startPoints(), // normalStmt("a", flow("0", "1")).succ("c"), // exitStmt("c").returns(over("cs"), to("x"), flow("1", "2"))); // // backwardHelper.method("bar", // startPoints(), // normalStmt("x").succ("z" /*none*/)); // This demonstrates the flow // This test is copied from BiDiIFDSSolverTest from Heros. val norAC = NormalEdge(a, c, Set(flow("0", "1"))) val retCX = ReturnEdge(Some(cs), c, Some(x), foo, Set(flow("1", "2"))) val norXZ = NormalEdge(x, z, Set()) //////////// val normalEdges: Set[NormalEdge] = Set(norAC, norXZ) val callEdges: Set[CallEdge] = Set() val callToReturnEdges: Set[CallToReturnEdge] = Set() val returnEdges: Set[ReturnEdge] = Set(retCX) val methodToStartPoints: Map[Method, Set[Node]] = Map( ) val stmtToMethod: Map[Node, Method] = Map( a -> foo, c -> foo, x -> bar, z -> bar ) new SimpleIFDSProblem( methodToStartPoints, normalEdges, returnEdges, callEdges, callToReturnEdges, stmtToMethod, Set(a), true ) } // def multipleSeedsFW: SimpleIFDSProblem = { // // This demonstrates the flow // // This test is copied from BiDiIFDSSolverTest from Heros. // val norA1B = NormalEdge(a1, b, Set(flow("0", "1"))) // val norA2B = NormalEdge(a2, b, Set(flow("0", "1"))) // val calBBar = CallEdge(b, bar, Set(flow(2, "1", "2"))) // val ctrBC = CallToReturnEdge(b, c, Set(flow(2, "1"))) // val calCBar = CallEdge(c, bar, Set(flow(2, "1", "2"))) // val ctrCD = CallToReturnEdge(c, d, Set(flow(2, "1"))) // val retDNull = ReturnEdge(None, d, None, foo, Set(flow(2, "1"))) // val norXY = NormalEdge(x, y, Set(flow("2", "2"))) // val retYC = ReturnEdge(Some(b), y, Some(c), bar, Set(flow(2, "2", "1"))) // val retYD = ReturnEdge(Some(c), y, Some(d), bar, Set(flow(2, "2", "1"))) // //////////// // val normalEdges: Set[NormalEdge] = Set(norA1B, norA2B, norXY) // val callEdges: Set[CallEdge] = Set(calBBar, calCBar) // val callToReturnEdges: Set[CallToReturnEdge] = Set(ctrBC, ctrCD) // val returnEdges: Set[ReturnEdge] = Set(retDNull, retYC, retYD) // val methodToStartPoints: Map[Method, Set[Node]] = Map( // bar -> Set(x) // ) // val stmtToMethod: Map[Node, Method] = Map( // a1 -> foo, // a2 -> foo, // b -> foo, // c -> foo, // d -> foo, // x -> bar, // y -> bar // ) // new SimpleIFDSProblem( // methodToStartPoints, // normalEdges, // returnEdges, // callEdges, // callToReturnEdges, // stmtToMethod, // Set(a1, a2), // true // ) // } // def multipleSeedsBW: SimpleIFDSProblem = { // // This demonstrates the flow // // This test is copied from BiDiIFDSSolverTest from Heros. // val retA1Null = ReturnEdge(None, a1, None, foo, Set(flow("0"))) // val retA2Null = ReturnEdge(None, a2, None, foo, Set(flow("0"))) // //////////// // val normalEdges: Set[NormalEdge] = Set() // val callEdges: Set[CallEdge] = Set() // val callToReturnEdges: Set[CallToReturnEdge] = Set() // val returnEdges: Set[ReturnEdge] = Set(retA1Null, retA2Null) // val methodToStartPoints: Map[Method, Set[Node]] = Map( // ) // val stmtToMethod: Map[Node, Method] = Map( // a1 -> foo, // a2 -> foo // ) // new SimpleIFDSProblem( // methodToStartPoints, // normalEdges, // returnEdges, // callEdges, // callToReturnEdges, // stmtToMethod, // Set(a1, a2), // true // ) // } }
packlnd/IFDS-RA
src/test/scala/IFDS/BiDiTestHelper.scala
Scala
mit
15,516
package rx.lang.scala.observables import org.junit.Test import org.junit.Assert._ import org.scalatestplus.junit.JUnitSuite import rx.lang.scala.observers.TestSubscriber import rx.lang.scala.{Notification, Observable} class AsyncOnSubscribeTests extends JUnitSuite { @Test def testStateful(): Unit = { val last = 2000L val o = Observable.create(AsyncOnSubscribe(() => 0L)((count,demand) => if(count > last) (Notification.OnCompleted, count) else { val to = math.min(count+demand, last+1) val next = Observable.from(count until to) (Notification.OnNext(next), to) } )) assertEquals((0L to last).toList, o.toBlocking.toList) } @Test def testStateless(): Unit = { val o = Observable.create(AsyncOnSubscribe.stateless(r => Notification.OnNext(Observable.just(42).repeat(r)))) assertEquals(List(42,42,42,42), o.take(4).toBlocking.toList) } @Test def testSingleState(): Unit = { val random = math.random val o = Observable.create(AsyncOnSubscribe.singleState(() => random)((s,r) => Notification.OnNext(Observable.just(random.toString).repeat(r)))) assertEquals(List(random.toString, random.toString), o.take(2).toBlocking.toList) } @Test def testUnsubscribe(): Unit = { val sideEffect = new java.util.concurrent.atomic.AtomicBoolean(false) val o = Observable.create(AsyncOnSubscribe(() => ())((s,r) => (Notification.OnCompleted, s), onUnsubscribe = s => sideEffect.set(true))) o.foreach(_ => ()) assertEquals(true, sideEffect.get()) } @Test def testError(): Unit = { val e = new IllegalStateException("Oh noes") val o = Observable.create(AsyncOnSubscribe(() => 0)((s,_) => (if(s>2) Notification.OnNext(Observable.just(s)) else Notification.OnError(e), s+1))) val testSubscriber = TestSubscriber[Int]() o.subscribe(testSubscriber) testSubscriber.assertError(e) } @Test // Ensure that the generator is executed for each subscription def testGenerator(): Unit = { val sideEffectCount = new java.util.concurrent.atomic.AtomicInteger(0) val o = Observable.create(AsyncOnSubscribe(() => sideEffectCount.incrementAndGet())((s, _) => (Notification.OnCompleted, s))) o.toBlocking.toList o.toBlocking.toList assertEquals(sideEffectCount.get(), 2) } }
ReactiveX/RxScala
src/test/scala/rx/lang/scala/observables/AsyncOnSubscribeTests.scala
Scala
apache-2.0
2,323
/* * Copyright 2019 Spotify AB. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.spotify.scio.smb.syntax import com.spotify.scio.annotations.experimental import com.spotify.scio.io.{ClosedTap, EmptyTap} import com.spotify.scio.values._ import org.apache.beam.sdk.coders.KvCoder import org.apache.beam.sdk.extensions.smb.SortedBucketIO import org.apache.beam.sdk.values.KV trait SortMergeBucketSCollectionSyntax { implicit def toSortMergeBucketKeyedSCollection[K, V]( data: SCollection[KV[K, V]] ): SortedBucketPairSCollection[K, V] = new SortedBucketPairSCollection(data) implicit def toSortMergeBucketSCollection[T]( data: SCollection[T] ): SortedBucketSCollection[T] = new SortedBucketSCollection(data) } final class SortedBucketSCollection[T](private val self: SCollection[T]) { /** * Save an `SCollection[T]` to a filesystem, where each file represents a bucket whose records are * lexicographically sorted by some key specified in the * [[org.apache.beam.sdk.extensions.smb.BucketMetadata]] corresponding to the provided * [[SortedBucketSink]] transform. * * @param write * the [[PTransform]] that applies a [[SortedBucketSink]] transform to the input data. It * contains information about key function, bucket and shard size, etc. */ @experimental def saveAsSortedBucket(write: SortedBucketIO.Write[_, T]): ClosedTap[Nothing] = { self.applyInternal(write) // @Todo: Implement taps for metadata/bucket elements ClosedTap[Nothing](EmptyTap) } } final class SortedBucketPairSCollection[K, V](private val self: SCollection[KV[K, V]]) { /** * Save an `SCollection[(K, V)]` to a filesystem, where each file represents a bucket whose * records are lexicographically sorted by some key specified in the * [[org.apache.beam.sdk.extensions.smb.BucketMetadata]] corresponding to the provided * [[SortedBucketSink]] transform and to the key K of each KV pair in this `SCollection`. * * @param write * the [[PTransform]] that applies a [[SortedBucketSink]] transform to the input data. It * contains information about key function, bucket and shard size, etc. * @param verifyKeyExtraction * if set, the SMB Sink will add two additional nodes to the job graph to sample this * SCollection and verify that each key K in the collection matches the result of the given * [[org.apache.beam.sdk.extensions.smb.BucketMetadata]] 's `extractKey` function. */ @experimental def saveAsPreKeyedSortedBucket( write: SortedBucketIO.Write[K, V], verifyKeyExtraction: Boolean = true ): ClosedTap[Nothing] = { val vCoder = self.internal.getCoder.asInstanceOf[KvCoder[K, V]].getValueCoder self.applyInternal(write.onKeyedCollection(vCoder, verifyKeyExtraction)) // @Todo: Implement taps for metadata/bucket elements ClosedTap[Nothing](EmptyTap) } }
spotify/scio
scio-smb/src/main/scala/com/spotify/scio/smb/syntax/SortMergeBucketSCollectionSyntax.scala
Scala
apache-2.0
3,414
package jellies.client trait MenuButton { def text: String def isVisible: Boolean def isClickable: Boolean def onClick(): Unit } object DummyMenuButton extends MenuButton { def text = ??? def isVisible = false def isClickable = ??? def onClick() = ??? }
zxqfl/jellies
js/src/main/scala/jellies/client/MenuButton.scala
Scala
mit
279
/** * Copyright (C) 2015 DANS - Data Archiving and Networked Services (info@dans.knaw.nl) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package nl.knaw.dans.easy.stage import com.yourmediashelf.fedora.client.FedoraCredentials import nl.knaw.dans.easy.stage.lib.Fedora import org.joda.time.DateTime import java.io.File import java.net.URI import java.nio.file.Path /** * * @param ownerId * @param submissionTimestamp * @param bagitDir * @param sdoSetDir empty directory * @param urn * @param doi * @param otherAccessDoi * @param fileUris ignored when skipPayload is true * @param state * @param archive * @param disciplines * @param databaseUrl * @param databaseUser * @param databasePassword * @param licenses * @param includeBagMetadata * @param skipPayload when no doi is provided payloads are skipped anyway * @param extraDescription */ case class Settings(ownerId: String, submissionTimestamp: DateTime = DateTime.now, bagitDir: File, sdoSetDir: File, urn: Option[String] = None, doi: Option[String] = None, otherAccessDoi: Boolean = false, fileUris: Map[Path, URI] = Map(), state: String, archive: String, disciplines: Map[String, String], databaseUrl: String, databaseUser: String, databasePassword: String, licenses: Map[String, File], includeBagMetadata: Boolean, skipPayload: Boolean, extraDescription: Option[String] = None, ) { override def toString: String = { s"Stage-Dataset.Settings(ownerId = $ownerId, submissionTimestamp = $submissionTimestamp, " + s"bagitDir = $bagitDir, sdoSetDir = $sdoSetDir, urn = ${ urn.getOrElse("<not defined>") }, " + s"doi = ${ doi.getOrElse("<not defined>") }, otherAccessDoi = $otherAccessDoi, " + s"fileUris = $fileUris, state = $state, archive = $archive, " + s"Database($databaseUrl, $databaseUser, ****), licenses = $licenses, includeBagMetadata = $includeBagMetadata, " + s"skipPayload = $skipPayload, extraDescription = $extraDescription)" } } object Settings { /** for EasyIngestFlow */ def apply(depositorId: String, submissionTimestamp: DateTime, bagDir: File, sdoSetDir: File, urn: Option[String], doi: Option[String], otherAccessDoi: Boolean, fileUris: Map[Path, URI], state: String, archive: String, credentials: FedoraCredentials, databaseUrl: String, databaseUser: String, databasePassword: String, licenses: Map[String, File], includeBagMetadata: Boolean, skipPayload: Boolean, extraDescription: Option[String], ): Settings = { Fedora.setFedoraConnectionSettings( credentials.getBaseUrl.toString, credentials.getUsername, credentials.getPassword ) new Settings( ownerId = depositorId, submissionTimestamp = submissionTimestamp, bagitDir = bagDir, sdoSetDir = sdoSetDir, urn = urn, doi = doi, otherAccessDoi = otherAccessDoi, fileUris = fileUris, state = state, archive = archive, disciplines = Fedora.disciplines, databaseUrl = databaseUrl, databaseUser = databaseUser, databasePassword = databasePassword, licenses = licenses, includeBagMetadata = includeBagMetadata, skipPayload = skipPayload, extraDescription = extraDescription, ) } }
DANS-KNAW/easy-stage-dataset
lib/src/main/scala/nl.knaw.dans.easy.stage/Settings.scala
Scala
apache-2.0
4,299
package com.codacy.client.stash.client.auth /** scalaj.http Copyright 2010 Jonathan Hoffman Modified by Rodrigo Fernandes (@rtfpessoa) to support OAuth1 with SHA1withRSA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import java.net.{URI, URL} import java.security._ import java.security.spec.PKCS8EncodedKeySpec import java.util.UUID import scalaj.http._ /** Utility methods used by [[scalaj.http.HttpRequest]] */ object OAuth1 { def sign(req: HttpRequest, consumer: Token, token: Option[Token], verifier: Option[String]): HttpRequest = { req.option(conn => { val baseParams: Seq[(String, String)] = Seq( ("oauth_timestamp", (System.currentTimeMillis / 1000).toString), ("oauth_nonce", UUID.randomUUID().toString) ) var (oauthParams, signature) = getSig(baseParams, req, consumer, token, verifier) oauthParams +:= (("oauth_signature", signature)) conn.setRequestProperty("Authorization", s"OAuth ${oauthParams .map { case (k, v) => s"""$k="${percentEncode(v)}"""" } .mkString(", ")}") }) } private def getSig( baseParams: Seq[(String, String)], req: HttpRequest, consumer: Token, token: Option[Token], verifier: Option[String] ): (Seq[(String, String)], String) = { var oauthParams = ("oauth_version", "1.0") +: ("oauth_consumer_key", consumer.key) +: ( "oauth_signature_method", "RSA-SHA1" ) +: baseParams token.foreach(t => oauthParams +:= (("oauth_token", t.key))) verifier.foreach(v => oauthParams +:= (("oauth_verifier", v))) // OAuth1.0 specifies that only querystring and x-www-form-urlencoded body parameters should be included in signature // req.params from multi-part requests are included in the multi-part request body and should NOT be included val allTheParams = if (req.connectFunc.isInstanceOf[MultiPartConnectFunc]) { oauthParams } else { req.params ++ oauthParams } val baseString = Seq(req.method.toUpperCase, normalizeUrl(new URL(req.url)), normalizeParams(allTheParams)) .map(percentEncode) .mkString("&") val privatekey = loadPrivateKey(consumer) val signature = signBytes(baseString, privatekey) (oauthParams, signature) } private def normalizeParams(params: Seq[(String, String)]): String = { percentEncode(params).sortWith(_ < _).mkString("&") } private def normalizeUrl(url: URL): String = { val uri = new URI(url.toString) val scheme = uri.getScheme.toLowerCase() var authority = uri.getAuthority.toLowerCase() val dropPort = (scheme.equals("http") && uri.getPort == 80) || (scheme .equals("https") && uri.getPort == 443) if (dropPort) { // Find the last : in the authority val index = authority.lastIndexOf(":") if (index >= 0) { authority = authority.substring(0, index) } } var path = uri.getRawPath if (path == null || path.length() <= 0) { path = "/" // Conforms to RFC 2616 section 3.2.2 } // We know that there is no query and no fragment here. s"$scheme://$authority$path" } private def percentEncode(params: Seq[(String, String)]): Seq[String] = { params.map { case (key, value) => s"${percentEncode(key)}=${percentEncode(value)}" } } private def percentEncode(s: String): String = { if (s == null) { "" } else { HttpConstants .urlEncode(s, HttpConstants.utf8) .replace("+", "%20") .replace("*", "%2A") .replace("%7E", "~") } } private def signBytes(text: String, privateKey: PrivateKey): String = { val textBytes = text.getBytes(HttpConstants.utf8) val signer = Signature.getInstance("SHA1withRSA") signer.initSign(privateKey) signer.update(textBytes) val signedBytes = signer.sign HttpConstants.base64(signedBytes).trim } private def loadPrivateKey(consumer: Token): PrivateKey = { val privateKeyBytes = Base64.decode(consumer.secret) val privateKeyFactory = KeyFactory.getInstance("RSA") val privateKeySpec = new PKCS8EncodedKeySpec(privateKeyBytes) privateKeyFactory.generatePrivate(privateKeySpec) } }
codacy/stash-scala-client
src/main/scala/com/codacy/client/stash/client/auth/OAuth1.scala
Scala
apache-2.0
4,675
import scala.annotation.experimental @experimental def x = 2 @experimental class A { def f = x // ok because A is experimental } @experimental class B { def f = x // ok because A is experimental } @experimental object C { def f = x // ok because A is experimental } @experimental class D { def f = { object B { x // ok because A is experimental } } } @experimental class E { def f = { def g = { x // ok because A is experimental } } } class F { def f = { def g = { x // error } } }
dotty-staging/dotty
tests/neg-custom-args/no-experimental/experimentalMembers.scala
Scala
apache-2.0
549
/* * Copyright (c) 2015 Snowplow Analytics Ltd. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, and * you may not use this file except in compliance with the Apache License * Version 2.0. You may obtain a copy of the Apache License Version 2.0 at * http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the Apache License Version 2.0 is distributed on an "AS * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the Apache License Version 2.0 for the specific language * governing permissions and limitations there under. */ package com.snowplowanalytics.snowplow package collectors // Snowplow import scalastream.sinks._ package object scalastream { /** * Whether the sink is for good rows or bad rows */ object InputType extends Enumeration { type InputType = Value val Good, Bad, Map = Value } /** * Case class for holding both good and * bad sinks for the Stream Collector. * * @param good * @param bad * @param map */ case class CollectorSinks(good: AbstractSink, bad: AbstractSink, map :AbstractSink) /** * Case class for holding the results of * splitAndSerializePayload. * * @param good All good results * @param bad All bad results */ case class EventSerializeResult(good: List[Array[Byte]], bad: List[Array[Byte]],map: List[Array[Byte]]) /** * Class for the result of splitting a too-large array of events in the body of a POST request * * @param goodBatches List of batches of events * @param failedBigEvents List of events that were too large */ case class SplitBatchResult(goodBatches: List[List[String]], failedBigEvents: List[String]) }
ClaraVista-IT/snowplow
2-collectors/scala-stream-collector/src/main/scala/com.snowplowanalytics.snowplow.collectors/package.scala
Scala
apache-2.0
1,831
/* * Copyright (c) 2015-2022 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Attribution Notice under the terms of the Apache License 2.0 * * This work was created by the collective efforts of the openCypher community. * Without limiting the terms of Section 6, any Derivative Work that is not * approved by the public consensus process of the openCypher Implementers Group * should not be described as โ€œCypherโ€ (and Cypherยฎ is a registered trademark of * Neo4j Inc.) or as "openCypher". Extensions by implementers or prototypes or * proposals for change that have been documented or implemented should only be * described as "implementation extensions to Cypher" or as "proposed changes to * Cypher that are not yet approved by the openCypher community". */ package org.opencypher.tools.tck.inspection.browser.web import org.opencypher.tools.tck.api.Pickle import org.opencypher.tools.tck.api.Scenario import org.opencypher.tools.tck.api.groups.ExampleItem import org.opencypher.tools.tck.api.groups.ScenarioItem import org.opencypher.tools.tck.api.groups.ScenarioOutline import org.opencypher.tools.tck.api.groups.TckTree import org.opencypher.tools.tck.api.groups.Total import scalatags.Text import scalatags.Text.all._ case class BrowserPages(browserModel: BrowserModel, browserRoutes: BrowserRoutes) extends PageBasic { def browserReportPage(): Text.all.doctype = { page( pageTitle("Browse"), div(code(browserModel.path)), sectionTitle("Counts"), browserCountsFrag(browserModel.tckTree) ) } def browserCountsFrag(tckTree: TckTree): Text.TypedTag[String] = { val groupsFiltered = tckTree.groupsOrderedDepthFirst filter { case _:ScenarioItem | _:ScenarioOutline | _:ExampleItem => false case _ => true } val totalCount = tckTree.groupedScenarios(Total).size val tableRows = groupsFiltered.map( group => tr( td(textIndent:=group.indent.em)( group.name ), td(textAlign.right)( a(href:=browserRoutes.listScenariosURL(this, group))(tckTree.groupedScenarios(group).size) ), td(textAlign.right)({ val pct = (tckTree.groupedScenarios(group).size * 100).toDouble / totalCount frag(f"$pct%3.1f %%") }) ) ) //output header val header = tr( th("Group"), th("Count"), th("of Total"), ) table(CSS.hoverTable)(header +: tableRows) } def scenarioPage(scenario: Scenario, withLocation: Boolean = true): Text.all.doctype = { page( // location if(withLocation) frag( div(CSS.locationLine)(scenarioLocationFrag(scenario)), blankLink(browserRoutes.openScenarioInEditorURL(this, scenario), div(CSS.fileLocation)( scenario.sourceFile.toAbsolutePath.toString + ":" + Pickle(scenario.source, withLocation = true).location.map(_.line).getOrElse(0) ) ) ) else frag(), // title div(CSS.scenarioTitleBox, CSS.scenarioTitleBig)(scenarioTitle(scenario)), // tags if(scenario.tags.isEmpty) frag() else div(CSS.tagLine)( div("Tags:"), scenario.tags.toSeq.sorted.map(tag => div(CSS.tag)(tag)) ), // steps scenario.steps.map(stepFrag) ) } }
opencypher/openCypher
tools/tck-inspection/src/main/scala_2.13/org/opencypher/tools/tck/inspection/browser/web/BrowserPages.scala
Scala
apache-2.0
3,964
package org.sgine.ui import com.badlogic.gdx.{Gdx, ApplicationListener} import com.badlogic.gdx.graphics.{FPSLogger, OrthographicCamera, Texture, GL10} import render.{ArrayBuffer, Vertex, TextureCoordinates} object RawTest extends ApplicationListener { lazy val camera = new OrthographicCamera(1024, 768) lazy val texture = new Texture("sgine.png") lazy val framerate = new FPSLogger() lazy val coords = TextureCoordinates.rect() lazy val vertices = Vertex.rect(texture.getWidth, texture.getHeight) lazy val verticesLength = vertices.length lazy val buffer = new ArrayBuffer(vertices ::: coords) val deltas = new Array[Float](60) var position = 0 var previous = 0.0f def create() = { Gdx.gl11.glShadeModel(GL10.GL_SMOOTH) Gdx.gl11.glClearColor(0.0f, 0.0f, 0.0f, 1.0f) Gdx.gl11.glClearDepthf(1.0f) Gdx.gl11.glEnable(GL10.GL_BLEND) Gdx.gl11.glEnable(GL10.GL_DEPTH_TEST) Gdx.gl11.glDepthFunc(GL10.GL_LEQUAL) Gdx.gl11.glEnable(GL10.GL_TEXTURE_2D) Gdx.gl11.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA) camera.update() } def resize(width: Int, height: Int) {} def render() = { Gdx.gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT) // val delta = 1.0f / 60.0f val delta = Gdx.graphics.getDeltaTime camera.rotate(delta * 150.0f, 0.0f, 0.0f, 1.0f) camera.update() camera(Gdx.gl11) texture.bind() buffer.bind() buffer.bindTextureCoordinates(verticesLength) buffer.drawVertices(0, verticesLength / 3) // deltas(position) = Gdx.graphics.getDeltaTime // position += 1 // if (position == deltas.length) { // position = 0 // val total = deltas.foldLeft(0.0f)((total, current) => current + total) // val average = total / 60.0f // val deviation = total - previous // previous = total // println("Time: %s (%s) - Deviation: %s".format(total, average, deviation)) // } framerate.log() } def pause() {} def resume() {} def dispose() {} def main(args: Array[String]): Unit = { Texture.setEnforcePotImages(false) // No need to enforce power-of-two images val configClass = Class.forName("com.badlogic.gdx.backends.lwjgl.LwjglApplicationConfiguration") val config = configClass.newInstance().asInstanceOf[AnyRef] configClass.getField("width").set(config, 1024) configClass.getField("height").set(config, 768) configClass.getField("useGL20").set(config, false) configClass.getField("title").set(config, "Raw Test") val clazz = Class.forName("com.badlogic.gdx.backends.lwjgl.LwjglApplication") val constructor = clazz.getConstructor(classOf[ApplicationListener], configClass) constructor.newInstance(List[AnyRef](this, config): _*).asInstanceOf[com.badlogic.gdx.Application] } }
Axiometry/sgine
ui/src/test/scala/org/sgine/ui/RawTest.scala
Scala
bsd-3-clause
2,801
/* This file is part of Static Web Gallery (SWG). MathMaster is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. MathMaster is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with SWG. If not, see <http://www.gnu.org/licenses/>. */ package eu.lateral.swg.db import eu.lateral.swg.utils._ import org.scalatest.Assertions import org.testng.Assert._ import org.testng.annotations.{Test,BeforeSuite,AfterSuite} import org.squeryl.PrimitiveTypeMode._ class DatabaseSuite extends Assertions{ implicit var implicitProject:Project = null @BeforeSuite def setUp(){ SessionManager.initializeDatabase("swg_test") implicitProject=Project.default } @AfterSuite def tearDown(){ delete("swg_test.h2.db") } @Test def testTrivial() { assertTrue(1 == 1) } @Test def testDefaultProject() { transaction{ assertEquals(SWGSchema.projects.lookup(1L).get.projectName,"default") } delete("swg_test") } @Test def testDefaultLanguages() { SessionManager.initializeDatabase("swg_test") transaction{ assertEquals(SWGSchema.languages.where(l => l.languageCode==="en").single.languageName,"English") assertEquals(from(SWGSchema.languages)(l => where(l.languageCode==="en") select(l.languageName)).head,"English") } } @Test def testLanguageNameForCode() { assertEquals(Languages.languageForCode("en"),Some("English")) assertEquals(Languages.languageForCode("undefined language"),None) } @Test def testProjectLanguages() { transaction{ Project.default.languages.exists(_.languageCode=="en") assertEquals(Project.default.languages.toSeq.length,1) Project.default.addLanguageByCode("sk") assertEquals(Project.default.languages.toSeq.length,2) Project.default.languages.exists(_.languageCode=="sk") Project.default.removeLanguageByCode("sk") assertEquals(Project.default.languages.toSeq.length,1) Project.default.languages.exists(_.languageCode=="en") } } }
orest-d/swg
swg/src/test/scala/eu/lateral/swg/db/DatabaseSuite.scala
Scala
gpl-3.0
2,431
/* * Copyright 2009-2010 LinkedIn, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.linkedin.norbert.cluster object NorbertClusterClientMain { private var cluster: ClusterClient = _ def main(args: Array[String]) { cluster = ClusterClient(args(0), args(1), 30000) loop } private def loop { print("> ") var line = Console.in.readLine.trim while (line != null) { try { if (line.length > 0) processCommand(line) } catch { case ex: Exception => println("Error: %s".format(ex)) } print("> ") line = Console.in.readLine.trim } } private def processCommand(line: String) { val command :: args = line.split(" ").toList.map(_.trim).filter(_.length > 0) command match { case "nodes" => val ts = System.currentTimeMillis val nodes = cluster.nodes if (nodes.size > 0) println(nodes.mkString("\\n")) else println("The cluster has no nodes") case "join" => args match { case nodeId :: url :: Nil => cluster.addNode(nodeId.toInt, url) println("Joined Norbert cluster") case nodeId :: url :: partitions => cluster.addNode(nodeId.toInt, url, Set() ++ partitions.map(_.toInt)) println("Joined Norbert cluster") case _ => println("Error: Invalid syntax: join nodeId url partition1 partition2...") } println("Joined Norbert cluster") case "leave" => if (args.length < 1) { println("Invalid syntax: leave nodeId") } else { cluster.removeNode(args.head.toInt) println("Left Norbert cluster") } case "exit" => exit case "quit" => exit case msg => "Unknown command: " + msg } } private def exit { cluster.shutdown System.exit(0) } }
rhavyn/norbert
examples/src/main/scala/com/linkedin/norbert/cluster/NorbertClusterClientMain.scala
Scala
apache-2.0
2,372
/* * Copyright 2016 Dennis Vriend * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.dnvriend.serializer.json import akka.serialization.Serializer import com.github.dnvriend.domain.OrderDomain import org.json4s.native.JsonMethods._ import org.json4s.native.Serialization import org.json4s.native.Serialization._ import org.json4s.{ DefaultFormats, Formats, NoTypeHints } case class EventWrapper(manifest: String, payload: String) class JsonSerializer extends Serializer { implicit val formats: Formats = DefaultFormats + OrderDomain.DirectDebitTypeSerializer override def identifier: Int = Int.MaxValue override def includeManifest: Boolean = true override def toBinary(o: AnyRef): Array[Byte] = write(EventWrapper(o.getClass.getName, write(o))).getBytes() override def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = { val wrapper: EventWrapper = parse(new String(bytes)).extract[EventWrapper] implicit val mf = Manifest.classType(Class.forName(wrapper.manifest)) read(wrapper.payload) } }
dnvriend/akka-serialization-test
src/main/scala/com/github/dnvriend/serializer/json/JsonSerializer.scala
Scala
apache-2.0
1,580
package com.github.diegopacheco.sandbox.scripts.scala.basic.func object FlatMapFunMain { object Time{ def bench(m:String)(f: => Unit){ val init = System.currentTimeMillis() f val end = System.currentTimeMillis() printf("{%s} execution in %d\\n", m,(end - init)) } } def main(args: Array[String]) { import Time._ val names = List("Diego","Andre","Alexandre","Jackson","Jose","Jeferson") val surnames = List("on","dre") bench("flatmap"){ val result = names.par.flatMap { n => surnames.par .filter(s => n endsWith s) .map(s => (n, s)) } println(result) } } }
diegopacheco/scala-playground
scala-playground/src/com/github/diegopacheco/sandbox/scripts/scala/basic/func/FlatMapFun.scala
Scala
unlicense
669
/* * Copyright 2014 Alan Rodas Bonjour * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.alanrodas.scaland.cli.runtime import com.alanrodas.scaland.cli._ /** * This trait is shared by all parameters of a command that can be tested to see if * they were passed by the user. */ sealed trait Definable { def isDefined : Boolean } /** * This is an abstract implementation of any kind of parameter, * weather flags or arguments. * * This class defines things such as the long and short name * of the argument, and weather or not the parameter has been user defined. * * There are two known subclasses of [[Parameter]]: [[Flag]] and [[Argument]]. * * @param name The name of the argument or flag, weather short or long * @param alias An additional optional name for the argument or flag. Should always * be the short name in case that it is defined. * @param isDefined ''true'' if it was defined by the user, ''false'' otherwise */ abstract class Parameter(val name : String, val alias : Option[String], val isDefined : Boolean) extends Definable { /** Returns the long name if this AbstractArgument has one, or None otherwise. */ val longName = if (name.length > 1) Some(name) else alias /** Returns ''true'' if it has a long name, ''false'' otherwise. */ val hasLongName = longName.nonEmpty /** Returns the short name if this AbstractArgument has one, or None otherwise. */ val shortName = if (name.length == 1) Some(name) else alias /** Returns ''true'' if it has a short name, ''false'' otherwise. */ val hasShortName = shortName.nonEmpty /** ''true'' if this parameter is a flag, ''false'' otherwise */ def isFlag : Boolean /** ''true'' if this parameter is an argument, ''false'' otherwise */ def isArgument : Boolean } /** * This class represents a flag call. * * Note that flags are always ''false'' when they are not defined by the user, * and ''true'' when they are defined. So if you want to test the value of a * flag, you should ask weather is defined or not. * * You can also make use of the [[implicits]] object that provides conversions between * a [[Flag]] and a [[Boolean]], so you can use the element as the value it represents. * * Usually you will not create instances of this class yourself. They will be created by * a [[CommandManager]] when processing the flags of a command execution. * * @param name The name of the argument or flag, weather short or long * @param alias An additional optional name for the argument or flag. Should always be * the short name in case that it is defined. * @param isDefined ''true'' if it was defined by the user, ''false'' otherwise */ case class Flag(override val name : String, override val alias : Option[String], override val isDefined : Boolean) extends Parameter(name, alias, isDefined) { def isFlag = true def isArgument = false } /** * This class represents an argument call. * * Note that argument provide one or more arguments as ''values''. This class also provides * ways to access those values in an easy fashion. * * You can also make use of the [[implicits]] object that provides conversions between * a [[Argument]] and a [[Boolean]], so you can use the element to test if it was defined or not. * * Usually you will not create instances of this class yourself. They will be created by * a [[CommandManager]] when processing the arguments of a command execution. * * @param name The name of the argument or flag, weather short or long * @param alias An additional optional name for the argument or flag. short if name was long, and long if it was short * @param values The sequence of values of the current argument * @param isDefined ''true'' if it was defined by the user, ''false'' otherwise */ case class Argument[T](override val name : String, override val alias : Option[String], values : Seq[T], override val isDefined : Boolean) extends Parameter(name, alias, isDefined) { def isFlag = false def isArgument = true /** * Returns the value with the given index. * * The returned element is an instance of [[Any]]. If the element has * been passed to the user, then, it's always a [[String]]. If not, then * the actual type depends on the default value. To retrieve the value as * an instance of a particular type, use the [[valueAs]] method. * * Note that the index ''i'' should be a valid index given the passed values, * or an [[IllegalArgumentException]] will be thrown. * * @param i The index of the element to fetch */ def value(i : Int) = { require(i >= 0, s"The index $i should be greater or equal to zero") require(i < numberOfValues, s"The index $i should be lower than $numberOfValues") values(i) } /** Return the first element of all the values. */ val value = values(0) /** * Returns the value with the given index as an instance of [[T]]. * * The returned element is an instance of [[T]]. If the i-th value is an instance of * [[T]], that means, it was not defined by the user, it will return the value as is. * If the value has been defined by the user, then, it is converted to a [[T]] from * a [[String]] using the provided converter. * * Note that the index ''i'' should be a valid index given the passed values, * or an [[IllegalArgumentException]] will be thrown. * Additionally, if the value is not of type [[T]] nor a [[String]], then a * [[ClassCastException]] is thrown. * * @param i: The index of the value to retrieve. * @param converter The function called to transform the value as an instance of ''R''. */ def valueAs[R](i : Int)(implicit converter : String => R) = convert(value(i))(converter) /** Returns the number of values passed to the argument. */ val numberOfValues = values.length } /** * Represents a value passed to a command. * * This class provides properties for the name of the value and * a boolean stating if the value was passed as an argument or if it * is the default value. * * @param name The name of the value passed to the command * @param value The value passed to the command * @param isDefined ''true'' if it was defined by the user, ''false'' otherwise */ case class Value[T](name : String, value : T, isDefined : Boolean) extends Definable { /** * Return the value of this element as an instance of [[R]]. * * If the value is already an instance of [[R]], then it is returned * as is. If it is a [[String]] (As in when it was passed by the user) * the converter function passed will be used to transform the value * to an [[R]]. This method throws a [[ClassCastException]] if the type * of the value of this element is not a [[String]] nor an [[R]] * * @tparam R The type that the value will be tried to cast to before returning it. * @param converter The function called to transform the value as an instance of ''R''. */ def valueAs[R](implicit converter : String => R) : R = convert(value)(converter) }
alanrodas/scaland
cli/src/main/scala/com/alanrodas/scaland/cli/runtime/parameters.scala
Scala
apache-2.0
7,536
package ch.wsl.box.cache.redis import akka.actor.ActorSystem import ch.wsl.box.services.file.{FileCacheKey, FileId, ImageCacheStorage} import com.typesafe.config.{Config, ConfigFactory} import net.ceedubs.ficus.Ficus._ import scredis._ import scredis.protocol.AuthConfig import scala.concurrent.{ExecutionContext, Future} class RedisImageCacheStorage extends ImageCacheStorage { implicit val system = ActorSystem("redis-actor-system") val redisConf = ConfigFactory.load().as[Config]("redis") val auth = for{ username <- redisConf.as[Option[String]]("username") password <- redisConf.as[Option[String]]("password") } yield AuthConfig(Some(username),password) val client = Client( host = redisConf.getString("host"), port = redisConf.getInt("port"), authOpt = auth ) override def clearField(id: FileId)(implicit ex: ExecutionContext): Future[Boolean] = { for{ keys <- client.keys(id.asString("")+"*") _ <- if(keys.nonEmpty) client.del({keys.toSeq}:_*) else Future.successful(0L) //del on empty list fails } yield true } override def save(fileId: FileCacheKey, data: Array[Byte])(implicit ex: ExecutionContext): Future[Boolean] = { client.set(fileId.asString,data) } override def delete(fileId: FileCacheKey)(implicit ex: ExecutionContext): Future[Boolean] = { client.del(fileId.asString()).map(_ > 0) } override def get(fileId: FileCacheKey)(implicit ex: ExecutionContext): Future[Option[Array[Byte]]] = { import scredis.serialization.Implicits.bytesReader client.get[Array[Byte]](fileId.asString()) } }
Insubric/box
server-cache-redis/src/main/scala/ch/wsl/box/cache/redis/RedisImageCacheStorage.scala
Scala
apache-2.0
1,599
class X(val elem: Int) { def foo(y: String): Int = y.length + elem } object X { implicit class BarDeco(x: X) { def bar: String = "!" } } object Implicits { implicit val impl: X = new X(0) implicit def conv(x: Int): X = new X(x) class Xdecorator(x: X) extends Object { def foo(cond: Boolean): Int = if (cond) x.foo("abc") else 0 } implicit def XDecorator(x: X): Xdecorator = new Xdecorator(x) val a: Object = "abc" val b: Any = "abc" def foo(x: Int)(implicit y: X): Int = { println(y) x } val y: Int = foo(1) val z: X = 3 val c: Int = y.elem val d: Int = z.foo("abc") val x: X = Byte.MinValue //import X.BarDeco println(z.bar) val e: Int = z.foo(true) // Haoyi Li's example on scala-user: trait Modifier implicit def stringNode(v: String): Modifier = new Modifier {} val s: Modifier = Some("rd").getOrElse("") val xx: Int = (1: Byte) }
densh/dotty
tests/pos/implicits1.scala
Scala
bsd-3-clause
923
package org.http4s import cats.effect.Sync import io.circe.{Json, Printer} package object circe extends CirceInstances { override val defaultPrinter: Printer = Printer.noSpaces override def jsonDecoder[F[_]: Sync]: EntityDecoder[F, Json] = CirceInstances.defaultJsonDecoder }
reactormonk/http4s
circe/src/main/scala/org/http4s/circe/package.scala
Scala
apache-2.0
291
package org.scalawiki.wlx import org.scalawiki.dto.Image import org.scalawiki.wlx.dto.{Contest, Monument} import org.scalawiki.wlx.stat.Output import org.specs2.mutable.Specification class GallerySpec extends Specification { val contest = Contest.WLMUkraine(2015) val uploadConfig = contest.uploadConfigs.head val listConfig = uploadConfig.listConfig // "Gallery" should { // "by monument id" in { // val monument1 = Monument(id = "01-111-1111", name = "[[name1]]", photo = Some("Img1.jpg"), listConfig = Some(listConfig)) // val monument2 = Monument(id = "05-111-1111", name = "[[article2|name2]]", listConfig = Some(listConfig)) // val monument3 = Monument(id = "05-111-1112", name = "name3", listConfig = Some(listConfig)) // val monuments = Seq(monument1, monument2, monument3) // val text = "header\\n" + monuments.map(_.asWiki()).mkString + "\\nfooter" // // val images = Seq( // Image("File:Img1.jpg", size = Some(10 ^ 6), width = Some(2048), height = Some(1024), monumentIds = List("01-111-1111")), // Image("File:Img2.jpg", size = Some(10 ^ 6), width = Some(1280), height = Some(1024), monumentIds = List("05-111-1111")), // Image("File:Img2sm.jpg", size = Some(10 ^ 6), width = Some(1024), height = Some(768), monumentIds = List("05-111-1111")) // ) // val monumentDb = new MonumentDB(contest, monuments) // val imageDb = new ImageDB(contest, images, Some(monumentDb)) // // val expected = // """== [[:uk:ะ’ั–ะบั–ะฟะตะดั–ั:ะ’ั–ะบั– ะปัŽะฑะธั‚ัŒ ะ—ะตะผะปัŽ/ะะฒั‚ะพะฝะพะผะฝะฐ ะ ะตัะฟัƒะฑะปั–ะบะฐ ะšั€ะธะผ|ะะฒั‚ะพะฝะพะผะฝะฐ ะ ะตัะฟัƒะฑะปั–ะบะฐ ะšั€ะธะผ]] == // |Rating: '''1''' = '''1''' old for author ids // |=== ะะฒั‚ะพะฝะพะผะฝะฐ ะ ะตัะฟัƒะฑะปั–ะบะฐ ะšั€ะธะผ old ids: 1 === // |==== 01-111-1111 ==== // |[[:uk:name1]] // |<gallery> // |File:Img1.jpg // |</gallery> // |== [[:uk:ะ’ั–ะบั–ะฟะตะดั–ั:ะ’ั–ะบั– ะปัŽะฑะธั‚ัŒ ะ—ะตะผะปัŽ/ะ’ั–ะฝะฝะธั†ัŒะบะฐ ะพะฑะปะฐัั‚ัŒ|ะ’ั–ะฝะฝะธั†ัŒะบะฐ ะพะฑะปะฐัั‚ัŒ]] == // |Rating: '''1''' = '''1''' old for author ids // |=== ะ’ั–ะฝะฝะธั†ัŒะบะฐ ะพะฑะปะฐัั‚ัŒ old ids: 1 === // |==== 05-111-1111 ==== // |[[:uk:article2|name2]] // |<gallery> // |File:Img2.jpg // |File:Img2sm.jpg // |</gallery>""".stripMargin // // val actual = Output.galleryByRegionAndId(monumentDb, imageDb, imageDb) // // compare this way to work across different line endings // actual.linesIterator.toBuffer === expected.linesIterator.toBuffer // } // } }
intracer/scalawiki
scalawiki-wlx/src/test/scala/org/scalawiki/wlx/GallerySpec.scala
Scala
apache-2.0
2,634
package org.singingwizard.genetics.prographs import scala.annotation.tailrec import org.singingwizard.util.collections.RandomSelection._ import org.singingwizard.util.collections.MetricOperations._ case class InterfacedGraph(inputs: Set[PortOnBlock[_]], outputs: Set[PortOnBlock[_]], graph: Graph) { def interfaceDifference(o: InterfacedGraph): Double = { val Seq(inLen1, inLen2) = Seq(this, o).map(_.inputs.size) val Seq(outLen1, outLen2) = Seq(this, o).map(_.outputs.size) val Seq(inTypes1, inTypes2) = Seq(this, o).map(_.inputs.map(_.port.tpe)) val Seq(outTypes1, outTypes2) = Seq(this, o).map(_.outputs.map(_.port.tpe)) val inDiffs = inTypes1.differenceVector(inTypes2, 1.0) val outDiffs = outTypes1.differenceVector(outTypes2, 1.0) val diffVec = inDiffs ++ outDiffs // This addition to the above expression prefers interfaces with the same sizes. val sizeCoords = Seq((inLen1 - inLen2).abs.toDouble / (inLen1 + inLen2), (outLen1 - outLen2).abs.toDouble / (outLen1 + outLen2)) //println(diffVec) (diffVec).pNorm(2) } def interface = inputs ++ outputs def connect(o: InterfacedGraph): Graph = { def connectionsForTpe[T](tpe: Type[T]): Seq[Connection[T]] = { def ports(ps: Traversable[PortOnBlock[_]]) : Seq[PortOnBlock[T]] = { val ps1 = ps collect { case p if p.port.tpe == tpe => p.asInstanceOf[PortOnBlock[T]] } ps1.toSeq.shuffle } val pairings = (ports(this.outputs) zip ports(o.inputs)) ++ (ports(o.outputs) zip ports(this.inputs)) val connections = pairings map { case (out, in) => out --> in } connections } val tpes = (this.interface ++ o.interface).map(_.port.tpe).toSet val connections = tpes.flatMap((t: Type[_]) => connectionsForTpe(t): Seq[Connection[_]]) this.graph ++ o.graph addConnections connections } } object GraphCutting { type Node = Set[AnyBlock] case class Edge(a: Node, connections: Set[AnyConnection], b: Node) { override def hashCode() = a.hashCode() + b.hashCode() + (connections.hashCode() * 37) override def equals(o: Any) = o match { case Edge(`a`, `connections`, `b`) โ‡’ true case Edge(`b`, `connections`, `a`) โ‡’ true case _ โ‡’ false } override def toString() = s"Edge($a, $b)" def isIncident(n: Node) = a == n || b == n } object EdgeList extends ((Set[Edge]) โ‡’ EdgeList) { def apply(edges: Set[Edge]): EdgeList = { val edges1 = edges.groupBy(e โ‡’ Set(e.a, e.b)) map { case (s, es) โ‡’ val Seq(a, b) = if (s.size == 2) s.toSeq else Seq(s.head, s.head) //pprint.pprintln((a, b, es.size, es)) Edge(a, es.flatMap(_.connections), b) } new EdgeList(edges1.toSet) } def unapply(e: EdgeList): Option[Set[Edge]] = Some(e.edges) override def toString = getClass.getName.split("""\\$""").reverse.dropWhile(x โ‡’ { val char = x.take(1).head; !((char == '_') || char.isLetter) }).head } class EdgeList private (val edges: Set[Edge]) { assert { edges.forall(x โ‡’ edges.forall({ y โ‡’ val b = x.copy(connections = Set()) != y.copy(connections = Set()) || x == y assert(b, s"$x and $y have the same end points.") b })) } def incidentEdges(n: Node) = edges.filter(_.isIncident(n)) def contract(e: Edge) = { val a = e.a val b = e.b val n = a ++ b EdgeList(edges flatMap { case `e` โ‡’ None case Edge(`a` | `b`, cons, b1) โ‡’ Some(Edge(n, cons, b1)) case Edge(a1, cons, `a` | `b`) โ‡’ Some(Edge(a1, cons, n)) case e โ‡’ Some(e) }) } override def toString() = edges.mkString("EdgeList(", ", ", ")") override def hashCode() = edges.hashCode() override def equals(o: Any) = o match { case EdgeList(`edges`) โ‡’ true case _ โ‡’ false } } def normal(mu: Double, delta: Double)(x: Double) = { val x_mu = (x - mu) math.exp(-x_mu * x_mu / (2 * delta * delta)) } } trait GraphCutting { this: Graph โ‡’ import GraphCutting._ /** Cut this graph into two graphs with specified interfaces. * * An interface is a set of input ports and a set of output ports. * * The cut is selected randomly. */ def randomCut(): (InterfacedGraph, Set[AnyConnection], InterfacedGraph) = { @tailrec def contractToTwo(edgeList: EdgeList): EdgeList = { if (edgeList.edges.size == 1) edgeList else contractToTwo(edgeList.contract(edgeList.edges.random())) } val edges = connections.map(c โ‡’ Edge(Set(c.src.block), Set(c), Set(c.dst.block))) val edgeList = EdgeList(edges) val res = contractToTwo(edgeList) assert(res.edges.size == 1) val Edge(s1, cs, s2) = res.edges.head val g1 = this removeBlocks s2 val g2 = this removeBlocks s1 def computeInterface(g: Graph): InterfacedGraph = { val ins = cs.map(_.dst).filter(p โ‡’ g.blocks contains p.block) val outs = cs.map(_.src).filter(p โ‡’ g.blocks contains p.block) InterfacedGraph(ins, outs, g) } (computeInterface(g1), cs, computeInterface(g2)) } private val N_TO_GENERATE = 10 def randomBestInterfacedSubgraph(): InterfacedGraph = { val graphs = (0 to N_TO_GENERATE).flatMap { _ โ‡’ val (g1, cs, g2) = this.randomCut() Seq(g1, g2) } val graphSizeCurve = normal(this.blocks.size * 0.25, this.blocks.size * 0.10) _ val interfaceSizeCurve = normal(this.blocks.map(_.ports.size).pNorm(2) * 1.2 + 1, 3) _ graphs maxBy { g โ‡’ val interfaceSize = g.inputs.size + g.outputs.size val graphSize = g.graph.blocks.size graphSizeCurve(graphSize) + interfaceSizeCurve(interfaceSize) } } @tailrec final def randomInterfacedSubgraph(n: Int = N_TO_GENERATE*10): (InterfacedGraph, InterfacedGraph) = { val idealSize = this.blocks.size * 0.25 val g = { val (g1, _, g2) = this.randomCut() (g1, g2) } def acceptable(gg: InterfacedGraph): Boolean = { val interfaceSize = gg.inputs.size + gg.outputs.size val graphSize = gg.graph.blocks.size interfaceSize >= 2 && graphSize >= 2 } if (n <= 0 || (acceptable(g._1) && acceptable(g._2))) g else randomInterfacedSubgraph(n - 1) } def randomMatchingSubgraphs(o: Graph) = { val Seq(g1Cuts, g2Cuts) = Seq(this, o).map(g โ‡’ (0 to N_TO_GENERATE).map { i โ‡’ val gr = g.randomInterfacedSubgraph() // println(s"========= Cut $i (graph size ${gr.graph.blocks.size})") // pprint.pprintln(gr.inputs) // pprint.pprintln(gr.outputs) // pprint.pprintln(gr.graph) gr }) val cross = for (a โ† g1Cuts ++ g1Cuts.map(_.swap); b โ† g2Cuts ++ g2Cuts.map(_.swap)) yield (a, b) val ((mA, _), (_, mB)) = cross minBy { case ((x1, x2), (y1, y2)) โ‡’ x1.interfaceDifference(y1) } (mA, mB) } }
arthurp/genetic-prographs
prographs/src/org/singingwizard/genetics/prographs/GraphCutting.scala
Scala
gpl-3.0
7,079
package de.kaufhof.pillar import java.util.Date import com.datastax.driver.core.Session import com.datastax.driver.core.querybuilder.QueryBuilder object Migration { def apply(description: String, authoredAt: Date, up: String): Migration = { new IrreversibleMigration(description, authoredAt, up) } def apply(description: String, authoredAt: Date, up: String, down: Option[String]): Migration = { down match { case Some(downStatement) => new ReversibleMigration(description, authoredAt, up, downStatement) case None => new ReversibleMigrationWithNoOpDown(description, authoredAt, up) } } } trait Migration { val description: String val authoredAt: Date val up: String def key: MigrationKey = MigrationKey(authoredAt, description) def authoredAfter(date: Date): Boolean = { authoredAt.after(date) } def authoredBefore(date: Date): Boolean = { authoredAt.compareTo(date) <= 0 } def executeUpStatement(session: Session) { session.execute(up) insertIntoAppliedMigrations(session) } def executeDownStatement(session: Session) protected def deleteFromAppliedMigrations(session: Session) { session.execute(QueryBuilder. delete(). from("applied_migrations"). where(QueryBuilder.eq("authored_at", authoredAt)). and(QueryBuilder.eq("description", description)) ) } private def insertIntoAppliedMigrations(session: Session) { session.execute(QueryBuilder. insertInto("applied_migrations"). value("authored_at", authoredAt). value("description", description). value("applied_at", System.currentTimeMillis()) ) } } class IrreversibleMigration(val description: String, val authoredAt: Date, val up: String) extends Migration { def executeDownStatement(session: Session) { throw new IrreversibleMigrationException(this) } } class ReversibleMigrationWithNoOpDown(val description: String, val authoredAt: Date, val up: String) extends Migration { def executeDownStatement(session: Session) { deleteFromAppliedMigrations(session) } } class ReversibleMigration(val description: String, val authoredAt: Date, val up: String, val down: String) extends Migration { def executeDownStatement(session: Session) { session.execute(down) deleteFromAppliedMigrations(session) } }
j-potts/pillar
src/main/scala/de/kaufhof/pillar/Migration.scala
Scala
mit
2,350
package org.openurp.edu.eams.teach.grade.transcript.service.impl import org.beangle.commons.collection.Collections import org.openurp.edu.base.Student import org.openurp.edu.eams.teach.grade.service.CourseGradeProvider import org.openurp.edu.eams.teach.grade.service.impl.GradeFilter import org.openurp.edu.eams.teach.grade.service.impl.GradeFilterRegistry import org.openurp.edu.eams.teach.grade.transcript.service.TranscriptDataProvider import org.openurp.edu.teach.grade.CourseGrade class TranscriptPublishedGradeProvider extends TranscriptDataProvider { private var gradeFilterRegistry: GradeFilterRegistry = _ private var courseGradeProvider: CourseGradeProvider = _ def getDataName(): String = "grades" def getData[T](std: Student, options: Map[String, String]): T = { var grades = courseGradeProvider.getPublished(std) val matched = getFilters(options) for (filter <- matched) grades = filter.filter(grades) grades.asInstanceOf[T] } def getDatas[T](stds: List[Student], options: Map[String, String]): Map[Student, T] = { val datas = Collections.newMap[Any] val matched = getFilters(options) val gradeMap = courseGradeProvider.getPublished(stds) for (std <- stds) { var grades = gradeMap.get(std) for (filter <- matched) grades = filter.filter(grades) datas.put(std, grades.asInstanceOf[T]) } datas } protected def getFilters(options: Map[String, String]): List[GradeFilter] = { if (null == options || options.isEmpty) return Collections.emptyList() gradeFilterRegistry.getFilters(options.get("grade.filters")) } def setGradeFilterRegistry(gradeFilterRegistry: GradeFilterRegistry) { this.gradeFilterRegistry = gradeFilterRegistry } def setCourseGradeProvider(courseGradeProvider: CourseGradeProvider) { this.courseGradeProvider = courseGradeProvider } }
openurp/edu-eams-webapp
grade/src/main/scala/org/openurp/edu/eams/teach/grade/transcript/service/impl/TranscriptPublishedGradeProvider.scala
Scala
gpl-3.0
1,874
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution import java.util.NoSuchElementException import org.apache.spark.sql.catalyst.InternalRow /** * An internal iterator interface which presents a more restrictive API than * [[scala.collection.Iterator]]. * * One major departure from the Scala iterator API is the fusing of the `hasNext()` and `next()` * calls: Scala's iterator allows users to call `hasNext()` without immediately advancing the * iterator to consume the next row, whereas RowIterator combines these calls into a single * [[advanceNext()]] method. */ abstract class RowIterator { /** * Advance this iterator by a single row. Returns `false` if this iterator has no more rows * and `true` otherwise. If this returns `true`, then the new row can be retrieved by calling * [[getRow]]. */ def advanceNext(): Boolean /** * Retrieve the row from this iterator. This method is idempotent. It is illegal to call this * method after [[advanceNext()]] has returned `false`. */ def getRow: InternalRow /** * Convert this RowIterator into a [[scala.collection.Iterator]]. */ def toScala: Iterator[InternalRow] = new RowIteratorToScala(this) } object RowIterator { def fromScala(scalaIter: Iterator[InternalRow]): RowIterator = { scalaIter match { case wrappedRowIter: RowIteratorToScala => wrappedRowIter.rowIter case _ => new RowIteratorFromScala(scalaIter) } } } private final class RowIteratorToScala(val rowIter: RowIterator) extends Iterator[InternalRow] { private [this] var hasNextWasCalled: Boolean = false private [this] var _hasNext: Boolean = false override def hasNext: Boolean = { // Idempotency: if (!hasNextWasCalled) { _hasNext = rowIter.advanceNext() hasNextWasCalled = true } _hasNext } override def next(): InternalRow = { if (!hasNext) throw new NoSuchElementException hasNextWasCalled = false rowIter.getRow } } private final class RowIteratorFromScala(scalaIter: Iterator[InternalRow]) extends RowIterator { private[this] var _next: InternalRow = null override def advanceNext(): Boolean = { if (scalaIter.hasNext) { _next = scalaIter.next() true } else { _next = null false } } override def getRow: InternalRow = _next override def toScala: Iterator[InternalRow] = scalaIter }
bravo-zhang/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/RowIterator.scala
Scala
apache-2.0
3,170
package net.scalax.ubw.database.test import net.scalax.ubw.core.AtomicPathImpl import net.scalax.ubw.json.operation.{ FDefaultAtomicHelper, FPropertyAtomicHelper } import net.scalax.ubw.mix.helpers.{ Slick2JsonFsnImplicit, SlickCRUDImplicits } import net.scalax.ubw.slick.helpers.{ FJsonAtomicHelper, FStrSelectExtAtomicHelper, StrFSSelectAtomicHelper } import net.scalax.ubw.slick.model.{ ColumnOrder, JsonOut, JsonView } import scala.concurrent.ExecutionContext.Implicits.global import scala.language.implicitConversions import slick.jdbc.H2Profile.api._ import scala.concurrent._ object Sample05 extends SlickCRUDImplicits with StrFSSelectAtomicHelper with Slick2JsonFsnImplicit { implicit def fPilesOptionImplicit[D](path: AtomicPathImpl[D]): FJsonAtomicHelper[D] with FStrSelectExtAtomicHelper[D] with FPropertyAtomicHelper[D] with FDefaultAtomicHelper[D] = { val path1 = path new FJsonAtomicHelper[D] with FStrSelectExtAtomicHelper[D] with FPropertyAtomicHelper[D] with FDefaultAtomicHelper[D] { override val path = path1 } } val fQuery = for { friend <- FriendTable.out } yield { List( "id" ofPile friend.id.out.order.describe("่‡ชๅขžไธป้”ฎ").writeJ, "name" ofPile friend.name.out.orderTarget("nick").describe("ๆ˜ต็งฐ").writeJ, "nick" ofPile friend.nick.out.order.describe("ๆ˜ต็งฐ").writeJ, "ageOpt" ofPile friend.age.out.writeJ ) } val result1: JsonOut = fQuery.strResult val view1: DBIO[JsonView] = result1.toView Await.result(Helper.db.run { Helper.initData .flatMap { _ => view1.map { s => Helper.prettyPrint(s) } } }, duration.Duration.Inf) val view2: DBIO[JsonView] = fQuery.addOrders(List(ColumnOrder("name", true), ColumnOrder("id", false), ColumnOrder("ageOpt", false))).strResult.toView Await.result(Helper.db.run { view2.map { s => Helper.prettyPrint(s) } }, duration.Duration.Inf) }
scalax/fsn
sample/commonSlick/src/main/scala/Sample05.scala
Scala
mit
2,000
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.security import java.net.{InetAddress, ServerSocket, Socket} import scala.concurrent.Promise import scala.concurrent.duration.Duration import scala.language.existentials import scala.util.Try import org.apache.spark.SparkEnv import org.apache.spark.network.util.JavaUtils import org.apache.spark.util.ThreadUtils /** * Creates a server in the JVM to communicate with external processes (e.g., Python and R) for * handling one batch of data, with authentication and error handling. */ private[spark] abstract class SocketAuthServer[T]( authHelper: SocketAuthHelper, threadName: String) { def this(env: SparkEnv, threadName: String) = this(new SocketAuthHelper(env.conf), threadName) def this(threadName: String) = this(SparkEnv.get, threadName) private val promise = Promise[T]() val (port, secret) = SocketAuthServer.setupOneConnectionServer(authHelper, threadName) { sock => promise.complete(Try(handleConnection(sock))) } /** * Handle a connection which has already been authenticated. Any error from this function * will clean up this connection and the entire server, and get propagated to [[getResult]]. */ def handleConnection(sock: Socket): T /** * Blocks indefinitely for [[handleConnection]] to finish, and returns that result. If * handleConnection throws an exception, this will throw an exception which includes the original * exception as a cause. */ def getResult(): T = { getResult(Duration.Inf) } def getResult(wait: Duration): T = { ThreadUtils.awaitResult(promise.future, wait) } } private[spark] object SocketAuthServer { /** * Create a socket server and run user function on the socket in a background thread. * * The socket server can only accept one connection, or close if no connection * in 15 seconds. * * The thread will terminate after the supplied user function, or if there are any exceptions. * * If you need to get a result of the supplied function, create a subclass of [[SocketAuthServer]] * * @return The port number of a local socket and the secret for authentication. */ def setupOneConnectionServer( authHelper: SocketAuthHelper, threadName: String) (func: Socket => Unit): (Int, String) = { val serverSocket = new ServerSocket(0, 1, InetAddress.getByAddress(Array(127, 0, 0, 1))) // Close the socket if no connection in 15 seconds serverSocket.setSoTimeout(15000) new Thread(threadName) { setDaemon(true) override def run(): Unit = { var sock: Socket = null try { sock = serverSocket.accept() authHelper.authClient(sock) func(sock) } finally { JavaUtils.closeQuietly(serverSocket) JavaUtils.closeQuietly(sock) } } }.start() (serverSocket.getLocalPort, authHelper.secret) } }
Aegeaner/spark
core/src/main/scala/org/apache/spark/security/SocketAuthServer.scala
Scala
apache-2.0
3,706
package utils import models.daos.UserDAOImpl import models.services.UserServiceImpl trait UserServiceModule { lazy val userDAO = new UserDAOImpl lazy val userService = new UserServiceImpl(userDAO) }
yzernik/office-ladder
server/app/utils/UserServiceModule.scala
Scala
mit
206
package latis.reader.tsml import scala.collection.immutable.{ Range => _ } import scala.collection.mutable import scala.collection.Searching._ import java.nio.ByteBuffer import java.nio.ByteOrder import java.nio.channels.FileChannel import java.nio.file._ import latis.data.Data import latis.data.seq.DataSeq import latis.dm._ import latis.ops.Operation import latis.ops.filter._ import latis.reader.tsml.ml._ import latis.time._ import latis.util.LatisServiceException import latis.util.StringUtils import ColumnarBinaryAdapter2._ /** * Combines separate binary files representing individual Variables * into a single Dataset. */ class ColumnarBinaryAdapter2(tsml: Tsml) extends TsmlAdapter(tsml) { type Index = Array[Double] private lazy val order: ByteOrder = getProperty("byteOrder", "big-endian") match { case "big-endian" => ByteOrder.BIG_ENDIAN case "little-endian" => ByteOrder.LITTLE_ENDIAN } private lazy val domainVars: Seq[VariableMl] = getDomainVars(tsml.dataset) private val indexMap: mutable.Map[String, Index] = mutable.Map() private val ranges: mutable.LinkedHashMap[String, Range] = { val zero = mutable.LinkedHashMap[String, Range]() domainVars.foldLeft(zero)(_ += _.getName -> All) } private val operations: mutable.ArrayBuffer[Operation] = mutable.ArrayBuffer[Operation]() private val origLength: mutable.LinkedHashMap[String, Int] = { val zero = mutable.LinkedHashMap[String, Int]() domainVars.foldLeft(zero) { (m, v) => val vname = v.getName m += vname -> (getFileLength(vname)/8) } } private def withChannel[A](name: String)(f: FileChannel => A): A = { val root = getUrlFile.toPath val path = root.resolve(s"${name}.bin") val chan = FileChannel.open(path, StandardOpenOption.READ) val res = f(chan) chan.close() res } private def getFileLength(vname: String): Int = withChannel(vname)(_.size().toInt) override def handleOperation(op: Operation): Boolean = op match { case Selection(vname, o, v) if isDomainVar(vname) => val newOp = if (vname == "time" && !StringUtils.isNumeric(v)) { val nt = convertTime(vname, v) new Selection(vname, o, nt) } else { op } operations += newOp true case NearestNeighborFilter(vname, v) if isDomainVar(vname) => val newOp = if (vname == "time" && !StringUtils.isNumeric(v)) { val nt = convertTime(vname, v) new NearestNeighborFilter(vname, nt) } else { op } operations += newOp true case _: FirstFilter => operations += op true case _ => false } private def isDomainVar(vname: String): Boolean = domainVars.exists(_.hasName(vname)) private def convertTime(vname: String, value: String): String = { val domainVar = domainVars.find(_.hasName(vname)).get val units = domainVar.getMetadataAttributes.get("units").getOrElse { val msg = "Time variable must have units." throw new UnsupportedOperationException(msg) } val ts = TimeScale(units) Time.fromIso(value).convert(ts).getValue.toString } private def buildIndex(vname: String): Unit = indexMap += vname -> readData(vname) private def applyOperations: Unit = operations.foreach { case Selection(vname, op, value) => indexMap.get(vname).foreach { index => val range = queryIndex(index, op, value.toDouble) /* * This lookup should never fail (the only Selections * allowed are ones with names that come from the set of * domain variables used for these keys) but we can't * statically prove this. */ ranges += vname -> range.intersect(ranges(vname)) } case NearestNeighborFilter(vname, value) => indexMap.get(vname).foreach { index => val range = queryIndex(index, "~", value.toDouble) /* * This lookup should never fail (the only * NearestNeighborFilters allowed are ones with names that * come from the set of domain variables used for these * keys) but we can't statically prove this. */ ranges += vname -> range.intersect(ranges(vname)) } case _: FirstFilter => val range = Bounds(0, 0) val vname = ranges.head._1 ranges += vname -> ranges(vname).compose(range) } /* * We need to set the "length" of the inner function, which is * defined in the TSML, to the correct value based on the selections * we were given. This means that we need to have read the indices * and applied our operations before we can return the original * Dataset, because we cannot update metadata after that. */ override def makeOrigDataset: Dataset = { val ds = super.makeOrigDataset // Build indices for domain variables that have some sort of // selection on them. operations.collect { case Selection(vname, _, _) => vname case NearestNeighborFilter(vname, _) => vname }.distinct.foreach(buildIndex(_)) applyOperations // Collect the length of each variable. val rs: Seq[Int] = ranges.toSeq.collect { case (_, r: Bounds) => r.length case (k, _) => getFileLength(k)/8 } // This is effectively treating the dataset as though it were // uncurried and limiting the number of samples in the uncurried // dataset. getProperty("limit").foreach { limit => if (rs.product > limit.toInt) { throw new LatisServiceException( s"Limit exceeded: requested ${rs.product} samples, allowed ${limit.toInt}" ) } } replaceLengthMetadata(ds, rs) } // TODO: Copied from NetcdfAdapter3. private def replaceLengthMetadata(ds: Dataset, rs: Seq[Int]): Dataset = { def go(v: Variable, rs: Seq[Int]): Variable = v match { case f: Function => val md = if (rs.head > 0) { f.getMetadata + ("length", s"${rs.head}") } else { f.getMetadata } Function(f.getDomain, go(f.getRange, rs.tail), md) case t @ Tuple(vs) => Tuple(vs.map(v => go(v, rs)), t.getMetadata) case _ => v } ds match { case Dataset(v) => Dataset(go(v, rs), ds.getMetadata) } } // Convenience method for reading data to build the index. private def readData(vname: String): Array[Double] = readData(vname, Seq(All)) private def readData(vname: String, rs: Seq[Range]): Array[Double] = rs.length match { case 0 => Array[Double]() case 1 => rs(0) match { case Empty => Array[Double]() case All => readData1D(vname, Bounds(0, origLength(vname) - 1)) case r: Bounds => readData1D(vname, r) } case 2 => (rs(0), rs(1)) match { case (Empty, _) | (_, Empty) => Array[Double]() case (All, r) => val len = origLength.toSeq(0)._2 - 1 val r2 = Bounds(0, len) readData(vname, Seq(r2, r)) case (r, All) => val len = origLength.toSeq(1)._2 - 1 val r2 = Bounds(0, len) readData(vname, Seq(r, r2)) case (r1: Bounds, r2: Bounds) => readData2D(vname, Seq(r1, r2)) } case _ => val msg = "Can only read data up to 2D." throw new UnsupportedOperationException(msg) } def readData1D(vname: String, r: Bounds): Array[Double] = withChannel(vname) { channel => val nSamples = r.length val arr: Array[Double] = Array.ofDim(nSamples) val bytes = ByteBuffer.allocate(nSamples * 8).order(order) // Skip to where we will start reading. channel.position(r.lower * 8) channel.read(bytes) bytes.rewind() bytes.asDoubleBuffer.get(arr) arr } private def readData2D(name: String, rs: Seq[Bounds]): Array[Double] = withChannel(name) { channel => val outer = rs(0) val inner = rs(1) val nSamples = outer.length * inner.length val arr: Array[Double] = Array.ofDim(nSamples) // To read this we need to wrap a byte array. val bytes: Array[Byte] = Array.ofDim(nSamples * 8) for (i <- outer.lower to outer.upper) { val col = inner.lower val row = i val ind = { val origLen = origLength.toSeq(1)._2 (row * origLen) + col } val len = inner.length * 8 val off = (i - outer.lower) * len val bb = ByteBuffer.wrap(bytes, off, len).order(order) channel.position(ind * 8) channel.read(bb) } val bb = ByteBuffer.wrap(bytes).order(order) bb.asDoubleBuffer.get(arr) arr } private def readIntoCache(vname: String): Unit = { /* * Making the following assumption: If the variable we're reading * isn't a domain variable, we assume the variable is a function * of all the domain variables. */ val rs: Seq[Range] = ranges.get(vname) match { case Some(r) => Seq(r) case None => ranges.toSeq.map { case (_, v) => v } } val data = readData(vname, rs).map(Data(_)) cache(vname, DataSeq(data)) } override def init: Unit = getOrigScalars.foreach(s => readIntoCache(s.getName)) override def close: Unit = () } object ColumnarBinaryAdapter2 { // Assuming that the data are ordered in ascending order. // // TODO: Copied from NetcdfAdapter3 but uses a different range type. def queryIndex(index: Array[Double], op: String, v: Double): Range = { val len = index.length if (len > 0) { index.search(v) match { case Found(i) => op match { case ">" => if (i+1 < len) { Bounds(i+1, len-1) } else { Empty } case ">=" => Bounds(i, len-1) case "=" | "~" => Bounds(i, i) case "<=" => Bounds(0, i) case "<" => if (i-1 >= 0) { Bounds(0, i-1) } else { Empty } } case InsertionPoint(i) => op match { case ">" | ">=" => if (i < len) { Bounds(i, len-1) } else { Empty } case "=" => Empty case "~" => if (i == 0) { // i = 0 implies our query is smaller than the smallest // value in the index Bounds(0, 0) } else if (i == len) { // i = len implies our query is larger than the largest // value in the index Bounds(len-1, len-1) } else { // Here we must determine the value in the index nearest // to the queried value. // We've already handled the i = 0 case, so i-1 should // be safe to access. val a = index(i-1) val b = index(i) // a < v < b // If v is equidistant from a and b (v - a = b - v), we // will round down. This is to be consistent with the // NearestNeighborInterpolation strategy. if (v - a <= b - v) { Bounds(i-1, i-1) } else { Bounds(i, i) } } case "<" | "<=" => if (i > 0) { Bounds(0, i-1) } else { Empty } } } } else { Empty } } /** * Return a sequence of VariableMl corresponding to the domain * variables for this DatasetMl. */ // TODO: This is shared with NetcdfAdapter3. def getDomainVars(ds: DatasetMl): Seq[VariableMl] = { def go(vml: VariableMl, acc: Seq[VariableMl]): Seq[VariableMl] = { vml match { case f: FunctionMl => go(f.range, acc :+ f.domain) //TODO: consider tuple domain case t: TupleMl => t.variables.map(go(_,acc)).flatten case _ => acc } } go(ds.getVariableMl, Seq.empty) } sealed abstract class Range { def intersect(r2: Range): Range = (this, r2) match { case (Empty, _) => Empty case (_, Empty) => Empty case (r1, All) => r1 case (All, r2) => r2 case (Bounds(l1, u1), Bounds(l2, u2)) => Bounds(Math.max(l1, l2), Math.min(u1, u2)) } def compose(r2: Range): Range = (this, r2) match { case (Empty, _) => Empty case (_, Empty) => Empty case (r1, All) => r1 case (All, r2) => r2 case (Bounds(l1, u1), Bounds(l2, u2)) => val vec = Vector.range(l1, u1 + 1).slice(l2, u2 + 1) if (vec.length > 0) { Bounds(vec.head, vec.last) } else { Empty } } } final case object All extends Range final case object Empty extends Range final case class Bounds(lower: Int, upper: Int) extends Range { def length: Int = (upper - lower) + 1 } }
dlindhol/LaTiS
src/main/scala/latis/reader/tsml/ColumnarBinaryAdapter2.scala
Scala
epl-1.0
13,063
package com.github.mdr.mash.view.model import com.github.mdr.mash.classes.MashClass import com.github.mdr.mash.functions.MashFunction import com.github.mdr.mash.ns.core.help.{ FieldHelpClass, MethodHelpClass } import com.github.mdr.mash.view.model.TwoDTableModelCreator.isSuitableForTwoDTable import com.github.mdr.mash.runtime.{ MashList, MashObject, MashValue } import com.github.mdr.mash.utils.Dimensions import com.github.mdr.mash.view.ViewConfig trait DisplayModel object DisplayModel { def getDisplayModel(value: MashValue, viewConfig: ViewConfig, terminalSize: Dimensions): DisplayModel = value match { case obj: MashObject if obj.classOpt == Some(MethodHelpClass) โ‡’ new HelpModelCreator(terminalSize, viewConfig).createForMethod(obj) case obj: MashObject if obj.classOpt == Some(FieldHelpClass) โ‡’ new HelpModelCreator(terminalSize, viewConfig).createForField(obj) case klass: MashClass โ‡’ new HelpModelCreator(terminalSize, viewConfig).createForClass(klass) case f: MashFunction โ‡’ new HelpModelCreator(terminalSize, viewConfig).createForFunction(f) case _ if isSuitableForTwoDTable(value) โ‡’ new TwoDTableModelCreator(terminalSize, supportMarking = true, viewConfig).create(value) case obj: MashObject if obj.nonEmpty โ‡’ new SingleObjectTableModelCreator(terminalSize, supportMarking = true, viewConfig).create(obj) case xs: MashList โ‡’ new TextLinesModelCreator(viewConfig).create(xs) case _ โ‡’ new ValueModelCreator(viewConfig).create(value) } }
mdr/mash
src/main/scala/com/github/mdr/mash/view/model/DisplayModel.scala
Scala
mit
1,821