repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
|---|---|---|
gaborh-da/daml
|
ledger/sandbox/src/main/scala/com/digitalasset/platform/sandbox/services/transaction/SandboxTransactionService.scala
|
// Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.platform.sandbox.services.transaction
import java.util.concurrent.atomic.AtomicLong
import akka.NotUsed
import akka.stream.Materializer
import akka.stream.scaladsl.Source
import com.digitalasset.api.util.TimestampConversion._
import com.digitalasset.daml.lf.data.Ref.Party
import com.digitalasset.grpc.adapter.ExecutionSequencerFactory
import com.digitalasset.ledger.api.domain._
import com.digitalasset.ledger.api.messages.transaction._
import com.digitalasset.ledger.api.v1.transaction.{Transaction => PTransaction}
import com.digitalasset.ledger.api.v1.transaction_service.{
GetTransactionsResponse,
TransactionServiceLogging
}
import com.digitalasset.ledger.api.validation.PartyNameChecker
import com.digitalasset.ledger.backend.api.v1.LedgerBackend
import com.digitalasset.ledger.backend.api.v1.LedgerSyncEvent.AcceptedTransaction
import com.digitalasset.platform.participant.util.EventFilter
import com.digitalasset.platform.participant.util.EventFilter.TemplateAwareFilter
import com.digitalasset.platform.sandbox.services.transaction.SandboxEventIdFormatter.TransactionIdWithIndex
import com.digitalasset.platform.server.api._
import com.digitalasset.platform.server.api.services.domain.TransactionService
import com.digitalasset.platform.server.api.services.grpc.GrpcTransactionService
import com.digitalasset.platform.server.api.validation.{ErrorFactories, IdentifierResolver}
import com.digitalasset.platform.server.services.transaction._
import io.grpc._
import org.slf4j.LoggerFactory
import scalaz.Tag
import scalaz.syntax.tag._
import scala.collection.breakOut
import scala.concurrent.{ExecutionContext, Future}
object SandboxTransactionService {
def createApiService(ledgerBackend: LedgerBackend, identifierResolver: IdentifierResolver)(
implicit ec: ExecutionContext,
mat: Materializer,
esf: ExecutionSequencerFactory)
: GrpcTransactionService with BindableService with TransactionServiceLogging =
new GrpcTransactionService(
new SandboxTransactionService(ledgerBackend),
ledgerBackend.ledgerId,
PartyNameChecker.AllowAllParties,
identifierResolver) with TransactionServiceLogging
}
class SandboxTransactionService private (val ledgerBackend: LedgerBackend, parallelism: Int = 4)(
implicit executionContext: ExecutionContext,
materializer: Materializer,
esf: ExecutionSequencerFactory)
extends TransactionService
with AutoCloseable
with ErrorFactories {
private val logger = LoggerFactory.getLogger(this.getClass)
private val subscriptionIdCounter = new AtomicLong()
private val transactionPipeline = TransactionPipeline(ledgerBackend)
@SuppressWarnings(Array("org.wartremover.warts.Option2Iterable"))
override def getTransactions(
request: GetTransactionsRequest): Source[GetTransactionsResponse, NotUsed] = {
val subscriptionId = subscriptionIdCounter.incrementAndGet().toString
logger.debug(
"Received request for transaction subscription {}: {}",
subscriptionId: Any,
request)
val eventFilter = EventFilter.byTemplates(request.filter)
transactionPipeline
.run(request.begin, request.end)
.mapConcat { trans =>
acceptedToFlat(trans, request.verbose, eventFilter) match {
case Some(transaction) =>
val response = GetTransactionsResponse(Seq(transaction))
logger.debug(
"Serving item {} (offset: {}) in transaction subscription {} to client",
transaction.transactionId,
transaction.offset,
subscriptionId)
List(response)
case None =>
logger.trace(
"Not serving item {} for transaction subscription {} as no events are visible",
trans.transactionId,
subscriptionId: Any)
Nil
}
}
}
private def acceptedToFlat(
trans: AcceptedTransaction,
verbose: Boolean,
eventFilter: TemplateAwareFilter): Option[PTransaction] = {
val events =
TransactionConversion
.genToFlatTransaction(
trans.transaction,
trans.explicitDisclosure.mapValues(set => set.toSet[String]),
verbose)
.flatMap(eventFilter.filterEvent(_).toList)
val submitterIsSubscriber =
trans.submitter
.map(Party.assertFromString)
.fold(false)(eventFilter.isSubmitterSubscriber)
if (events.nonEmpty || submitterIsSubscriber) {
Some(
PTransaction(
transactionId = trans.transactionId,
commandId = if (submitterIsSubscriber) trans.commandId.getOrElse("") else "",
workflowId = trans.workflowId,
effectiveAt = Some(fromInstant(trans.recordTime)),
events = events,
offset = trans.offset
))
} else None
}
override def getTransactionTrees(request: GetTransactionTreesRequest)
: Source[WithOffset[String, VisibleTransaction], NotUsed] = {
logger.debug("Received {}", request)
transactionPipeline
.run(
request.begin,
request.end
)
.mapConcat { trans =>
toResponseIfVisible(request.parties, trans)
.fold(List.empty[WithOffset[String, VisibleTransaction]])(e =>
List(WithOffset(trans.offset, e)))
}
}
private def toResponseIfVisible(subscribingParties: Set[Party], trans: AcceptedTransaction) = {
val eventFilter = TransactionFilter(subscribingParties.map(_ -> Filters.noFilter)(breakOut))
val withMeta = toTransactionWithMeta(trans)
VisibleTransaction.toVisibleTransaction(eventFilter, withMeta)
}
override def getTransactionByEventId(
request: GetTransactionByEventIdRequest): Future[Option[VisibleTransaction]] = {
logger.debug("Received {}", request)
SandboxEventIdFormatter
.split(request.eventId.unwrap)
.fold(
Future.failed[Option[VisibleTransaction]](
Status.INVALID_ARGUMENT
.withDescription(s"invalid eventId: ${request.eventId}")
.asRuntimeException())) {
case TransactionIdWithIndex(transactionId, index) =>
lookUpTreeByTransactionId(
TransactionId(transactionId.toString),
request.requestingParties)
}
}
override def getTransactionById(
request: GetTransactionByIdRequest): Future[Option[VisibleTransaction]] = {
logger.debug("Received {}", request)
lookUpTreeByTransactionId(request.transactionId, request.requestingParties)
}
override def getFlatTransactionByEventId(
request: GetTransactionByEventIdRequest): Future[Option[PTransaction]] = {
SandboxEventIdFormatter
.split(request.eventId.unwrap)
.fold(
Future.failed[Option[PTransaction]](
Status.INVALID_ARGUMENT
.withDescription(s"invalid eventId: ${request.eventId}")
.asRuntimeException())) {
case TransactionIdWithIndex(transactionId, index) =>
lookUpFlatByTransactionId(
TransactionId(transactionId.toString),
request.requestingParties)
}
}
override def getFlatTransactionById(
request: GetTransactionByIdRequest): Future[Option[PTransaction]] = {
lookUpFlatByTransactionId(request.transactionId, request.requestingParties)
}
override def getLedgerEnd(ledgerId: String): Future[LedgerOffset.Absolute] =
ledgerBackend.getCurrentLedgerEnd.map(LedgerOffset.Absolute)
override lazy val offsetOrdering: Ordering[LedgerOffset.Absolute] =
Ordering.by(abs => BigInt(abs.value))
private def lookUpTreeByTransactionId(
transactionId: TransactionId,
requestingParties: Set[Party]): Future[Option[VisibleTransaction]] =
transactionPipeline
.getTransactionById(transactionId.unwrap)
.flatMap {
case Some(trans) =>
Future.successful(toResponseIfVisible(requestingParties, trans))
case None =>
Future.failed(
Status.INVALID_ARGUMENT
.withDescription(s"$transactionId could not be found")
.asRuntimeException())
}
private def lookUpFlatByTransactionId(
transactionId: TransactionId,
requestingParties: Set[Party]): Future[Option[PTransaction]] =
transactionPipeline
.getTransactionById(transactionId.unwrap)
.flatMap {
case Some(trans) =>
val eventFilter = EventFilter.byTemplates(
TransactionFilter(requestingParties.map(_ -> Filters.noFilter)(breakOut)))
val result = acceptedToFlat(trans, verbose = true, eventFilter)
Future.successful(result)
case None =>
Future.failed(
Status.INVALID_ARGUMENT
.withDescription(s"$transactionId could not be found")
.asRuntimeException())
}
private def toTransactionWithMeta(trans: AcceptedTransaction) =
TransactionWithMeta(
trans.transaction,
extractMeta(trans)
)
private def extractMeta(trans: AcceptedTransaction): TransactionMeta =
TransactionMeta(
TransactionId(trans.transactionId),
Tag.subst(trans.commandId),
Tag.subst(trans.applicationId),
trans.submitter.map(Party.assertFromString),
WorkflowId(trans.workflowId),
trans.recordTime,
None
)
override def close(): Unit = ()
}
|
gaborh-da/daml
|
ledger/sandbox/src/main/scala/com/digitalasset/platform/sandbox/SandboxServer.scala
|
<gh_stars>0
// Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.platform.sandbox
import java.time.Instant
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{Sink, Source}
import com.digitalasset.api.util.TimeProvider
import com.digitalasset.daml.lf.data.ImmArray
import com.digitalasset.daml.lf.engine.Engine
import com.digitalasset.grpc.adapter.ExecutionSequencerFactory
import com.digitalasset.ledger.server.LedgerApiServer.{ApiServices, LedgerApiServer}
import com.digitalasset.platform.common.LedgerIdMode
import com.digitalasset.platform.sandbox.SandboxServer.{
asyncTolerance,
createInitialState,
logger,
scheduleHeartbeats
}
import com.digitalasset.platform.sandbox.banner.Banner
import com.digitalasset.platform.sandbox.config.{SandboxConfig, SandboxContext}
import com.digitalasset.platform.sandbox.metrics.MetricsManager
import com.digitalasset.platform.sandbox.services.SandboxResetService
import com.digitalasset.platform.sandbox.stores.ActiveContractsInMemory
import com.digitalasset.platform.sandbox.stores.ledger.ScenarioLoader.LedgerEntryWithLedgerEndIncrement
import com.digitalasset.platform.sandbox.stores.ledger._
import com.digitalasset.platform.sandbox.stores.ledger.sql.SqlStartMode
import com.digitalasset.platform.server.services.testing.TimeServiceBackend
import com.digitalasset.platform.services.time.TimeProviderType
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext, Future}
import scala.util.Try
object SandboxServer {
private val logger = LoggerFactory.getLogger(this.getClass)
private val asyncTolerance = 30.seconds
def apply(config: => SandboxConfig): SandboxServer =
new SandboxServer(
"sandbox",
config
)
// We memoize the engine between resets so we avoid the expensive
// repeated validation of the sames packages after each reset
private val engine = Engine()
private def scheduleHeartbeats(timeProvider: TimeProvider, onTimeChange: Instant => Future[Unit])(
implicit mat: ActorMaterializer,
ec: ExecutionContext) =
timeProvider match {
case timeProvider: TimeProvider.UTC.type =>
val interval = 1.seconds
logger.debug(s"Scheduling heartbeats in intervals of {}", interval)
val cancelable = Source
.tick(0.seconds, interval, ())
.mapAsync[Unit](1)(
_ => onTimeChange(timeProvider.getCurrentTime)
)
.to(Sink.ignore)
.run()
() =>
val _ = cancelable.cancel()
case _ =>
() =>
()
}
// if requested, initialize the ledger state with the given scenario
private def createInitialState(config: SandboxConfig, context: SandboxContext)
: (ActiveContractsInMemory, ImmArray[LedgerEntryWithLedgerEndIncrement], Option[Instant]) =
config.scenario match {
case None => (ActiveContractsInMemory.empty, ImmArray.empty, None)
case Some(scenario) =>
val (acs, records, ledgerTime) =
ScenarioLoader.fromScenario(context.packageContainer, scenario)
(acs, records, Some(ledgerTime))
}
}
class SandboxServer(actorSystemName: String, config: => SandboxConfig) extends AutoCloseable {
case class ApiServerState(
ledgerId: String,
apiServer: LedgerApiServer,
ledger: Ledger,
stopHeartbeats: () => Unit
) extends AutoCloseable {
def port: Int = apiServer.port
override def close: Unit = {
stopHeartbeats()
apiServer.close() //fully tear down the old server.
ledger.close()
}
}
case class Infrastructure(
actorSystem: ActorSystem,
materializer: ActorMaterializer,
metricsManager: MetricsManager)
extends AutoCloseable {
def executionContext: ExecutionContext = materializer.executionContext
override def close: Unit = {
materializer.shutdown()
Await.result(actorSystem.terminate(), asyncTolerance)
metricsManager.close()
}
}
@volatile private var sandboxState: SandboxState = _
case class SandboxState(apiServerState: ApiServerState, infra: Infrastructure)
extends AutoCloseable {
override def close(): Unit = {
apiServerState.close()
infra.close()
}
def resetAndRestartServer(): Future[Unit] = {
implicit val ec: ExecutionContext = sandboxState.infra.executionContext
val apiServicesClosed = apiServerState.apiServer.servicesClosed()
//need to run this async otherwise the callback kills the server under the in-flight reset service request!
Future {
apiServerState.close // fully tear down the old server
//TODO: eliminate the state mutation somehow
//yes, it's horrible that we mutate the state here, but believe me, it's still an improvement to what we had before!
sandboxState =
copy(apiServerState = buildAndStartApiServer(infra, SqlStartMode.AlwaysReset))
}(infra.executionContext)
// waits for the services to be closed, so we can guarantee that future API calls after finishing the reset will never be handled by the old one
apiServicesClosed
}
}
def port: Int = sandboxState.apiServerState.port
/** the reset service is special, since it triggers a server shutdown */
private val resetService: SandboxResetService = new SandboxResetService(
() => sandboxState.apiServerState.ledgerId,
() => sandboxState.infra.executionContext,
() => sandboxState.resetAndRestartServer()
)
sandboxState = start()
@SuppressWarnings(Array("org.wartremover.warts.ExplicitImplicitTypes"))
private def buildAndStartApiServer(
infra: Infrastructure,
startMode: SqlStartMode = SqlStartMode.ContinueIfExists): ApiServerState = {
implicit val mat = infra.materializer
implicit val ec: ExecutionContext = infra.executionContext
implicit val mm: MetricsManager = infra.metricsManager
val ledgerId = config.ledgerIdMode match {
case LedgerIdMode.Static(id) => id
case LedgerIdMode.Dynamic() => LedgerIdGenerator.generateRandomId()
}
val context = SandboxContext.fromConfig(config)
val (acs, records, mbLedgerTime) = createInitialState(config, context)
val (timeProvider, timeServiceBackendO: Option[TimeServiceBackend]) =
(mbLedgerTime, config.timeProviderType) match {
case (None, TimeProviderType.WallClock) => (TimeProvider.UTC, None)
case (None, _) =>
val ts = TimeServiceBackend.simple(Instant.EPOCH)
(ts, Some(ts))
case (Some(ledgerTime), _) =>
val ts = TimeServiceBackend.simple(ledgerTime)
(ts, Some(ts))
}
val (ledgerType, ledger) = config.jdbcUrl match {
case None =>
("in-memory", Ledger.metered(Ledger.inMemory(ledgerId, timeProvider, acs, records)))
case Some(jdbcUrl) =>
val ledgerF = Ledger.postgres(
jdbcUrl,
ledgerId,
timeProvider,
acs,
records,
config.commandConfig.maxCommandsInFlight * 2, // we can get commands directly as well on the submission service
startMode
)
val ledger = Try(Await.result(ledgerF, asyncTolerance)).fold(t => {
val msg = "Could not start PostgreSQL persistence layer"
logger.error(msg, t)
sys.error(msg)
}, identity)
(s"sql", Ledger.metered(ledger))
}
val ledgerBackend = new SandboxLedgerBackend(ledger)
val stopHeartbeats = scheduleHeartbeats(timeProvider, ledger.publishHeartbeat)
val apiServer = LedgerApiServer(
(am: ActorMaterializer, esf: ExecutionSequencerFactory) =>
ApiServices
.create(
config,
ledgerBackend,
SandboxServer.engine,
timeProvider,
timeServiceBackendO
.map(
TimeServiceBackend.withObserver(
_,
ledger.publishHeartbeat
)))(am, esf)
.withServices(List(resetService)),
// NOTE(JM): Re-use the same port after reset.
Option(sandboxState).fold(config.port)(_.apiServerState.port),
config.address,
config.tlsConfig.flatMap(_.server)
)
val newState = ApiServerState(
ledgerId,
apiServer,
ledger,
stopHeartbeats
)
Banner.show(Console.out)
logger.info(
"Initialized sandbox version {} with ledger-id = {}, port = {}, dar file = {}, time mode = {}, ledger = {}, daml-engine = {}",
BuildInfo.Version,
ledgerId,
newState.port.toString,
config.damlPackageContainer: AnyRef,
config.timeProviderType,
ledgerType
)
newState
}
private def start(): SandboxState = {
val actorSystem = ActorSystem(actorSystemName)
val infrastructure =
Infrastructure(actorSystem, ActorMaterializer()(actorSystem), MetricsManager())
val apiState = buildAndStartApiServer(infrastructure)
SandboxState(apiState, infrastructure)
}
override def close(): Unit = sandboxState.close()
}
|
gaborh-da/daml
|
daml-lf/interface/src/test/scala/com/digitalasset/daml/lf/iface/TypeSpec.scala
|
// Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.daml.lf.iface
import com.digitalasset.daml.lf.data.ImmArray.ImmArraySeq
import com.digitalasset.daml.lf.data.Ref.{Identifier, QualifiedName, PackageId}
import com.digitalasset.daml.lf.data.BackStack
import org.scalatest.{Matchers, WordSpec}
import com.digitalasset.daml.lf.testing.parser.Implicits._
import com.digitalasset.daml.lf.lfpackage.{Ast => Pkg}
import scala.language.implicitConversions
class TypeSpec extends WordSpec with Matchers {
implicit def packageId(s: String): PackageId = PackageId.assertFromString(s)
implicit def qualifiedName(s: String): QualifiedName = QualifiedName.assertFromString(s)
implicit def fromLfPackageType(pkgTyp00: Pkg.Type): Type = {
def assertOneArg(typs: BackStack[Type]): Type = typs.pop match {
case Some((head, tail)) if head.isEmpty => tail
case _ => sys.error(s"expected 1 argument, got ${typs.toImmArray.length}")
}
def assertZeroArgs(typs: BackStack[Type]): Unit = if (!typs.isEmpty) {
sys.error(s"expected 0 arguments, got ${typs.toImmArray.length}")
}
def go(typ0: Pkg.Type, args: BackStack[Type]): Type = typ0 match {
case Pkg.TVar(v) =>
assertZeroArgs(args)
TypeVar(v)
case Pkg.TApp(fun, arg) => go(fun, args :+ fromLfPackageType(arg))
case Pkg.TBuiltin(bltin) =>
bltin match {
case Pkg.BTInt64 =>
assertZeroArgs(args)
TypePrim(PrimTypeInt64, ImmArraySeq.empty)
case Pkg.BTDecimal =>
assertZeroArgs(args)
TypePrim(PrimTypeDecimal, ImmArraySeq.empty)
case Pkg.BTText =>
assertZeroArgs(args)
TypePrim(PrimTypeText, ImmArraySeq.empty)
case Pkg.BTTimestamp =>
assertZeroArgs(args)
TypePrim(PrimTypeTimestamp, ImmArraySeq.empty)
case Pkg.BTParty =>
assertZeroArgs(args)
TypePrim(PrimTypeParty, ImmArraySeq.empty)
case Pkg.BTUnit =>
assertZeroArgs(args)
TypePrim(PrimTypeUnit, ImmArraySeq.empty)
case Pkg.BTBool =>
assertZeroArgs(args)
TypePrim(PrimTypeBool, ImmArraySeq.empty)
case Pkg.BTList =>
TypePrim(PrimTypeList, ImmArraySeq(assertOneArg(args)))
case Pkg.BTMap =>
TypePrim(PrimTypeMap, ImmArraySeq(assertOneArg(args)))
case Pkg.BTUpdate =>
sys.error("cannot use update in interface type")
case Pkg.BTScenario =>
sys.error("cannot use scenario in interface type")
case Pkg.BTDate =>
assertZeroArgs(args)
TypePrim(PrimTypeDate, ImmArraySeq.empty)
case Pkg.BTContractId =>
TypePrim(PrimTypeBool, ImmArraySeq(assertOneArg(args)))
case Pkg.BTOptional => TypePrim(PrimTypeOptional, ImmArraySeq(assertOneArg(args)))
case Pkg.BTArrow => sys.error("cannot use arrow in interface type")
}
case Pkg.TTyCon(tycon) => TypeCon(TypeConName(tycon), args.toImmArray.toSeq)
case _: Pkg.TTuple => sys.error("cannot use tuples in interface type")
case _: Pkg.TForall => sys.error("cannot use forall in interface type")
}
go(pkgTyp00, BackStack.empty)
}
"instantiate type arguments correctly" in {
val tyCon = TypeCon(
TypeConName(Identifier("dummyPkg", "Mod:R")),
ImmArraySeq(t"Int64", t"Text"),
)
val inst = tyCon.instantiate(
DefDataType(
ImmArraySeq(n"a", n"b"),
Record(ImmArraySeq(n"fld1" -> t"List a", n"fld2" -> t"Mod:V b"))
)
)
inst shouldBe Record[Type](ImmArraySeq(n"fld1" -> t"List Int64", n"fld2" -> t"Mod:V Text"))
}
"mapTypeVars should replace all type variables in List(List a)" in {
val result: Type = t"List (List a)".mapTypeVars(v => t"Text")
val expected: Type = t"List (List Text)"
result shouldBe expected
}
"instantiate should work for a nested record" in {
val id1 = TypeConName(Identifier("P", "M:T1"))
val id2 = TypeConName(Identifier("P", "M:T2"))
val tc = TypeCon(id1, ImmArraySeq(t"Text"))
val ddt = DefDataType(
ImmArraySeq(n"a"),
Record(
ImmArraySeq(
n"f" -> TypeCon(id2, ImmArraySeq(t"a"))
))
)
val result = tc.instantiate(ddt)
result shouldBe Record(ImmArraySeq(n"f" -> TypeCon(id2, ImmArraySeq(t"Text"))))
}
}
|
gaborh-da/daml
|
ledger/sandbox/src/test/lib/scala/com/digitalasset/platform/sandbox/services/SandboxFixture.scala
|
<reponame>gaborh-da/daml<filename>ledger/sandbox/src/test/lib/scala/com/digitalasset/platform/sandbox/services/SandboxFixture.scala
// Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.platform.sandbox.services
import java.io.File
import akka.stream.Materializer
import com.digitalasset.api.util.TimeProvider
import com.digitalasset.grpc.adapter.ExecutionSequencerFactory
import com.digitalasset.ledger.api.testing.utils.{Resource, SuiteResource}
import com.digitalasset.ledger.api.v1.ledger_identity_service.{
GetLedgerIdentityRequest,
LedgerIdentityServiceGrpc
}
import com.digitalasset.ledger.api.v1.testing.time_service.TimeServiceGrpc
import com.digitalasset.ledger.client.services.testing.time.StaticTime
import com.digitalasset.platform.common.LedgerIdMode
import com.digitalasset.platform.sandbox.config.{DamlPackageContainer, SandboxConfig}
import com.digitalasset.platform.services.time.{TimeModel, TimeProviderType}
import io.grpc.Channel
import org.scalatest.Suite
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.util.Try
trait SandboxFixture extends SuiteResource[Channel] {
self: Suite =>
protected def darFile = new File("ledger/sandbox/Test.dar")
protected def channel: Channel = suiteResource.value
protected def ledgerIdOnServer: String =
LedgerIdentityServiceGrpc
.blockingStub(channel)
.getLedgerIdentity(GetLedgerIdentityRequest())
.ledgerId
protected def getTimeProviderForClient(
implicit mat: Materializer,
esf: ExecutionSequencerFactory): TimeProvider = {
Try(TimeServiceGrpc.stub(channel))
.map(StaticTime.updatedVia(_, ledgerIdOnServer)(mat, esf))
.fold[TimeProvider](_ => TimeProvider.UTC, Await.result(_, 30.seconds))
}
protected def config: SandboxConfig =
SandboxConfig.default
.copy(
port = 0, //dynamic port allocation
damlPackageContainer = DamlPackageContainer(files = packageFiles),
timeProviderType = TimeProviderType.Static,
timeModel = TimeModel.reasonableDefault,
scenario = scenario,
ledgerIdMode = LedgerIdMode.Static("sandbox server")
)
protected def packageFiles: List[File] = List(darFile)
protected def scenario: Option[String] = None
protected def ledgerId: String = ledgerIdOnServer
private lazy val sandboxResource = new SandboxServerResource(config)
protected override lazy val suiteResource: Resource[Channel] = sandboxResource
def getSandboxPort: Int = sandboxResource.getPort
}
|
gaborh-da/daml
|
ledger/sandbox/src/main/scala/com/digitalasset/platform/sandbox/stores/ledger/inmemory/InMemoryLedger.scala
|
<reponame>gaborh-da/daml
// Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.platform.sandbox.stores.ledger.inmemory
import java.time.Instant
import akka.NotUsed
import akka.stream.scaladsl.Source
import com.digitalasset.api.util.TimeProvider
import com.digitalasset.daml.lf.data.ImmArray
import com.digitalasset.daml.lf.transaction.Node
import com.digitalasset.daml.lf.value.Value.{AbsoluteContractId, ContractId}
import com.digitalasset.ledger.api.domain.{ApplicationId, CommandId}
import com.digitalasset.ledger.backend.api.v1.{
RejectionReason,
SubmissionResult,
TransactionId,
TransactionSubmission
}
import com.digitalasset.platform.sandbox.services.transaction.SandboxEventIdFormatter
import com.digitalasset.platform.sandbox.stores.{ActiveContracts, ActiveContractsInMemory}
import com.digitalasset.platform.sandbox.stores.deduplicator.Deduplicator
import com.digitalasset.platform.sandbox.stores.ledger.LedgerEntry.{Checkpoint, Rejection}
import com.digitalasset.platform.sandbox.stores.ledger.ScenarioLoader.LedgerEntryWithLedgerEndIncrement
import com.digitalasset.platform.sandbox.stores.ledger.{Ledger, LedgerEntry, LedgerSnapshot}
import org.slf4j.LoggerFactory
import scala.concurrent.Future
/** This stores all the mutable data that we need to run a ledger: the PCS, the ACS, and the deduplicator.
*
*/
class InMemoryLedger(
val ledgerId: String,
timeProvider: TimeProvider,
acs0: ActiveContractsInMemory,
ledgerEntries: ImmArray[LedgerEntryWithLedgerEndIncrement])
extends Ledger {
private val logger = LoggerFactory.getLogger(this.getClass)
private val entries = {
val l = new LedgerEntries[LedgerEntry](_.toString)
ledgerEntries.foreach {
case LedgerEntryWithLedgerEndIncrement(entry, increment) =>
l.publishWithLedgerEndIncrement(entry, increment)
()
}
l
}
override def ledgerEntries(offset: Option[Long]): Source[(Long, LedgerEntry), NotUsed] =
entries.getSource(offset)
// mutable state
private var acs = acs0
private var deduplicator = Deduplicator()
override def ledgerEnd: Long = entries.ledgerEnd
// need to take the lock to make sure the two pieces of data are consistent.
override def snapshot(): Future[LedgerSnapshot] =
Future.successful(this.synchronized {
LedgerSnapshot(entries.ledgerEnd, Source(acs.contracts))
})
override def lookupContract(
contractId: AbsoluteContractId): Future[Option[ActiveContracts.ActiveContract]] =
Future.successful(this.synchronized {
acs.contracts.get(contractId)
})
override def lookupKey(key: Node.GlobalKey): Future[Option[AbsoluteContractId]] =
Future.successful(this.synchronized {
acs.keys.get(key)
})
override def publishHeartbeat(time: Instant): Future[Unit] =
Future.successful(this.synchronized[Unit] {
entries.publish(Checkpoint(time))
()
})
override def publishTransaction(tx: TransactionSubmission): Future[SubmissionResult] =
Future.successful(
this.synchronized[SubmissionResult] {
val (newDeduplicator, isDuplicate) =
deduplicator.checkAndAdd(ApplicationId(tx.applicationId), CommandId(tx.commandId))
deduplicator = newDeduplicator
if (isDuplicate)
logger.warn(
"Ignoring duplicate submission for applicationId {}, commandId {}",
tx.applicationId: Any,
tx.commandId)
else
handleSuccessfulTx(entries.ledgerEnd.toString, tx)
SubmissionResult.Acknowledged
}
)
private def handleSuccessfulTx(transactionId: String, tx: TransactionSubmission): Unit = {
val recordTime = timeProvider.getCurrentTime
if (recordTime.isAfter(tx.maximumRecordTime)) {
// This can happen if the DAML-LF computation (i.e. exercise of a choice) takes longer
// than the time window between LET and MRT allows for.
// See https://github.com/digital-asset/daml/issues/987
handleError(
tx,
RejectionReason.TimedOut(
s"RecordTime $recordTime is after MaxiumRecordTime ${tx.maximumRecordTime}"))
} else {
val toAbsCoid: ContractId => AbsoluteContractId =
SandboxEventIdFormatter.makeAbsCoid(transactionId)
val mappedTx = tx.transaction.mapContractIdAndValue(toAbsCoid, _.mapContractId(toAbsCoid))
// 5b. modify the ActiveContracts, while checking that we do not have double
// spends or timing issues
val acsRes = acs.addTransaction(
let = tx.ledgerEffectiveTime,
workflowId = tx.workflowId,
transactionId = transactionId,
transaction = mappedTx,
explicitDisclosure = tx.blindingInfo.explicitDisclosure,
localImplicitDisclosure = tx.blindingInfo.localImplicitDisclosure,
globalImplicitDisclosure = tx.blindingInfo.globalImplicitDisclosure,
)
acsRes match {
case Left(err) =>
handleError(tx, RejectionReason.Inconsistent(s"Reason: ${err.mkString("[", ", ", "]")}"))
case Right(newAcs) =>
acs = newAcs
val recordTx = mappedTx
.mapNodeId(SandboxEventIdFormatter.fromTransactionId(transactionId, _))
val recordBlinding =
tx.blindingInfo.explicitDisclosure.map {
case (nid, parties) =>
(SandboxEventIdFormatter.fromTransactionId(transactionId, nid), parties)
}
val entry = LedgerEntry
.Transaction(
tx.commandId,
transactionId,
tx.applicationId,
tx.submitter,
tx.workflowId,
tx.ledgerEffectiveTime,
recordTime,
recordTx,
recordBlinding.transform((_, v) => v.toSet[String])
)
entries.publish(entry)
()
}
}
}
private def handleError(tx: TransactionSubmission, reason: RejectionReason): Unit = {
logger.warn(s"Publishing error to ledger: ${reason.description}")
entries.publish(
Rejection(timeProvider.getCurrentTime, tx.commandId, tx.applicationId, tx.submitter, reason)
)
()
}
override def close(): Unit = ()
override def lookupTransaction(
transactionId: TransactionId): Future[Option[(Long, LedgerEntry.Transaction)]] =
Future.successful(
entries
.getEntryAt(transactionId.toLong)
.collect[(Long, LedgerEntry.Transaction)] {
case t: LedgerEntry.Transaction =>
(transactionId.toLong, t) // the transaction id is also the offset
})
}
|
gaborh-da/daml
|
ledger/sandbox/src/main/scala/com/digitalasset/platform/sandbox/LedgerIdGenerator.scala
|
// Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.platform.sandbox
import java.util.UUID
object LedgerIdGenerator {
def generateRandomId(): String = s"sandbox-${UUID.randomUUID().toString}"
}
|
gaborh-da/daml
|
bazel_tools/runfiles/src/main/scala/com/digitalasset/daml/bazeltools/BazelRunfiles.scala
|
<gh_stars>0
// Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.daml.bazeltools
import com.google.devtools.build.runfiles.Runfiles
trait BazelRunfiles {
private val MainWorkspace = "com_github_digital_asset_daml"
def rlocation(path: String): String = Runfiles.create.rlocation(MainWorkspace + "/" + path)
}
|
gaborh-da/daml
|
navigator/backend/src/main/scala/com/digitalasset/navigator/model/converter/LedgerApiV1.scala
|
// Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.navigator.model.converter
import java.time.{Instant, LocalDate}
import java.time.format.DateTimeFormatter
import java.time.temporal.ChronoUnit
import com.digitalasset.daml.lf.data.{ImmArray, SortedLookupList}
import com.digitalasset.ledger.api.{v1 => V1}
import com.digitalasset.ledger.api.refinements.ApiTypes
import com.digitalasset.navigator.{model => Model}
import com.digitalasset.navigator.model.{IdentifierApiConversions, IdentifierDamlConversions}
import com.google.protobuf.timestamp.Timestamp
import com.google.rpc.code.Code
import scalaz.Tag
import scala.util.Try
case object LedgerApiV1 {
// ------------------------------------------------------------------------------------------------------------------
// Types
// ------------------------------------------------------------------------------------------------------------------
case class Context(party: ApiTypes.Party, templates: Model.PackageRegistry)
private type Result[X] = Either[ConversionError, X]
// ------------------------------------------------------------------------------------------------------------------
// Read methods (V1 -> Model)
// ------------------------------------------------------------------------------------------------------------------
/*
def readTransaction(tx: V1.transaction.Transaction, ctx: Context): Result[Model.Transaction] = {
for {
events <- Converter.sequence(tx.events.map(ev =>
readEvent(ev, ApiTypes.TransactionId(tx.transactionId), ctx, List.empty, ApiTypes.WorkflowId(tx.workflowId), None)))
effectiveAt <- Converter.checkExists("Transaction.effectiveAt", tx.effectiveAt)
offset <- readLedgerOffset(tx.offset)
} yield {
Model.Transaction(
id = ApiTypes.TransactionId(tx.transactionId),
commandId = if (tx.commandId.isEmpty) None else Some(ApiTypes.CommandId(tx.commandId)),
effectiveAt = Instant.ofEpochSecond(effectiveAt.seconds, effectiveAt.nanos),
offset = offset,
events = events
)
}
}
private def readEvent(
event: V1.event.Event,
transactionId: ApiTypes.TransactionId,
ctx: Context,
parentWitnessParties: List[ApiTypes.Party],
workflowId: ApiTypes.WorkflowId,
parentId: Option[ApiTypes.EventId] = None
): Result[Model.Event] = {
event match {
case V1.event.Event(V1.event.Event.Event.Created(ev)) =>
readEventCreated(ev, transactionId, parentWitnessParties, workflowId, parentId, ctx)
case V1.event.Event(V1.event.Event.Event.Exercised(ev)) =>
// This case should be removed from the protobuf, Transactions never contain Exercised events
Left(GenericConversionError("Exercised event found in GetTransactions"))
case V1.event.Event(V1.event.Event.Event.Archived(ev)) =>
readEventArchived(ev, transactionId, parentWitnessParties, workflowId, parentId, ctx)
case V1.event.Event(V1.event.Event.Event.Empty) =>
Left(RequiredFieldDoesNotExistError("Event.value"))
}
}
private def readEventArchived(
event: V1.event.ArchivedEvent,
transactionId: ApiTypes.TransactionId,
parentWitnessParties: List[ApiTypes.Party],
workflowId: ApiTypes.WorkflowId,
parentId: Option[ApiTypes.EventId],
ctx: Context
): Result[Model.Event] = {
val witnessParties = parentWitnessParties ++ ApiTypes.Party.subst(event.witnessParties)
Right(
Model.ContractArchived(
id = ApiTypes.EventId(event.eventId),
parentId = parentId,
transactionId = transactionId,
witnessParties = witnessParties,
workflowId = workflowId,
contractId = ApiTypes.ContractId(event.contractId)
)
)
}
*/
def readTransactionTree(
tx: V1.transaction.TransactionTree,
ctx: Context
): Result[Model.Transaction] = {
for {
events <- Converter
.sequence(
tx.rootEventIds
.map(
evid =>
readTreeEvent(
tx.eventsById(evid),
ApiTypes.TransactionId(tx.transactionId),
tx.eventsById,
ctx,
ApiTypes.WorkflowId(tx.workflowId),
None))
)
.map(_.flatten)
effectiveAt <- Converter.checkExists("Transaction.effectiveAt", tx.effectiveAt)
offset <- readLedgerOffset(tx.offset)
} yield {
Model.Transaction(
id = ApiTypes.TransactionId(tx.transactionId),
commandId = if (tx.commandId.isEmpty) None else Some(ApiTypes.CommandId(tx.commandId)),
effectiveAt = Instant.ofEpochSecond(effectiveAt.seconds, effectiveAt.nanos.toLong),
offset = offset,
events = events
)
}
}
private def readTreeEvent(
event: V1.transaction.TreeEvent,
transactionId: ApiTypes.TransactionId,
eventsById: Map[String, V1.transaction.TreeEvent],
ctx: Context,
workflowId: ApiTypes.WorkflowId,
parentId: Option[ApiTypes.EventId] = None
): Result[List[Model.Event]] = {
event match {
case V1.transaction.TreeEvent(V1.transaction.TreeEvent.Kind.Created(ev)) =>
readEventCreated(ev, transactionId, workflowId, parentId, ctx).map(List(_))
case V1.transaction.TreeEvent(V1.transaction.TreeEvent.Kind.Exercised(ev)) =>
readEventExercised(ev, transactionId, eventsById, workflowId, parentId, ctx)
case V1.transaction.TreeEvent(V1.transaction.TreeEvent.Kind.Empty) =>
Left(RequiredFieldDoesNotExistError("TreeEvent.value"))
}
}
private def getTemplate(
id: Model.DamlLfIdentifier,
ctx: Context
): Result[Model.Template] =
ctx.templates
.template(id)
.map(Right(_))
.getOrElse(Left(TypeNotFoundError(id)))
private def readLedgerOffset(offset: String): Result[String] = {
// Ledger offset may change to become a number in the future
// Try(BigInt(offset)).toEither
// .left.map(t => GenericConversionError(s"Could not parse ledger offset '$offset'"))
Right(offset)
}
private def readEventCreated(
event: V1.event.CreatedEvent,
transactionId: ApiTypes.TransactionId,
workflowId: ApiTypes.WorkflowId,
parentId: Option[ApiTypes.EventId],
ctx: Context
): Result[Model.Event] = {
val witnessParties = ApiTypes.Party.subst(event.witnessParties.toList)
for {
templateId <- Converter.checkExists("CreatedEvent.templateId", event.templateId)
templateIdentifier = templateId.asDaml
template <- getTemplate(templateIdentifier, ctx)
arguments <- Converter.checkExists("CreatedEvent.arguments", event.createArguments)
arg <- readRecordArgument(arguments, templateIdentifier, ctx)
} yield
Model.ContractCreated(
id = ApiTypes.EventId(event.eventId),
parentId = parentId,
transactionId = transactionId,
witnessParties = witnessParties,
workflowId = workflowId,
contractId = ApiTypes.ContractId(event.contractId),
templateId = templateIdentifier,
argument = arg
)
}
private def readEventExercised(
event: V1.event.ExercisedEvent,
transactionId: ApiTypes.TransactionId,
eventsById: Map[String, V1.transaction.TreeEvent],
workflowId: ApiTypes.WorkflowId,
parentId: Option[ApiTypes.EventId],
ctx: Context
): Result[List[Model.Event]] = {
val witnessParties = ApiTypes.Party.subst(event.witnessParties.toList)
for {
templateId <- Converter.checkExists("ExercisedEvent.templateId", event.templateId)
templateIdentifier = templateId.asDaml
template <- getTemplate(templateId.asDaml, ctx)
argument <- Converter.checkExists("ExercisedEvent.arguments", event.choiceArgument)
choice <- Converter.checkExists(
template.choices.find(c => ApiTypes.Choice.unwrap(c.name) == event.choice),
GenericConversionError(s"Choice '${event.choice}' not found"))
modelArgument <- readArgument(argument, choice.parameter, ctx)
children <- Converter
.sequence(
event.childEventIds
.map(
childId =>
readTreeEvent(
eventsById(childId),
transactionId,
eventsById,
ctx,
workflowId,
Some(ApiTypes.EventId(event.eventId))))
)
.map(_.flatten)
} yield
Model.ChoiceExercised(
id = ApiTypes.EventId(event.eventId),
parentId = parentId,
transactionId = transactionId,
witnessParties = witnessParties,
workflowId = workflowId,
contractId = ApiTypes.ContractId(event.contractId),
contractCreateEvent = ApiTypes.EventId(event.contractCreatingEventId),
templateId = templateIdentifier,
choice = ApiTypes.Choice(event.choice),
argument = modelArgument,
consuming = event.consuming,
actingParties = event.actingParties.map(ApiTypes.Party(_)).toList
) :: children
}
private def readRecordArgument(
value: V1.value.Record,
typId: Model.DamlLfIdentifier,
ctx: Context
): Result[Model.ApiRecord] =
readRecordArgument(
value,
Model.DamlLfTypeCon(Model.DamlLfTypeConName(typId), Model.DamlLfImmArraySeq()),
ctx)
private def readRecordArgument(
value: V1.value.Record,
typ: Model.DamlLfType,
ctx: Context
): Result[Model.ApiRecord] = {
for {
typeCon <- typ match {
case t @ Model.DamlLfTypeCon(_, _) => Right(t)
case _ => Left(GenericConversionError(s"Cannot read $value as $typ"))
}
ddt <- ctx.templates
.damlLfDefDataType(typeCon.name.identifier)
.toRight(GenericConversionError(s"Unknown type ${typeCon.name.identifier}"))
dt <- typeCon
.instantiate(ddt)
.fold(Right(_), _ => Left(GenericConversionError(s"Variant expected")))
fields <- Converter.sequence(
value.fields.toList
.zip(dt.fields.toList)
.map(
p =>
Converter
.checkExists("RecordField.value", p._1.value)
.flatMap(value => readArgument(value, p._2._2, ctx))
.map(a => Model.ApiRecordField(p._2._1, a))))
} yield Model.ApiRecord(Some(typeCon.name.identifier), fields)
}
private def readListArgument(
list: V1.value.List,
typ: Model.DamlLfType,
ctx: Context
): Result[Model.ApiList] = {
for {
elementType <- typ match {
case Model.DamlLfTypePrim(Model.DamlLfPrimType.List, t) =>
t.headOption.toRight(GenericConversionError("List type parameter missing"))
case _ => Left(GenericConversionError(s"Cannot read $list as $typ"))
}
values <- Converter.sequence(
list.elements.map(value => readArgument(value, elementType, ctx)))
} yield {
Model.ApiList(values)
}
}
private def duplicateKey[X, Y](list: List[(X, Y)]): Option[X] =
list.groupBy(_._1).collectFirst { case (k, l) if l.size > 1 => k }
private def readMapArgument(
list: V1.value.Map,
typ: Model.DamlLfType,
ctx: Context
): Result[Model.ApiMap] = {
for {
elementType <- typ match {
case Model.DamlLfTypePrim(Model.DamlLfPrimType.Map, t) =>
t.headOption.toRight(GenericConversionError("Map type parameter missing"))
case _ => Left(GenericConversionError(s"Cannot read $list as $typ"))
}
values <- Converter.sequence(list.entries.map {
case entry @ V1.value.Map.Entry(key, optValue) =>
for {
valueValue <- optValue.toRight(
GenericConversionError(s"Field 'value' required in $entry"))
value <- readArgument(valueValue, elementType, ctx)
} yield key -> value
})
map <- SortedLookupList.fromSortedImmArray(ImmArray(values)).left.map(GenericConversionError)
} yield Model.ApiMap(map)
}
private def readOptionalArgument(
opt: V1.value.Optional,
typ: Model.DamlLfType,
ctx: Context
): Result[Model.ApiOptional] = {
for {
optType <- typ match {
case Model.DamlLfTypePrim(Model.DamlLfPrimType.Optional, t) =>
t.headOption.toRight(GenericConversionError("Optional type parameter missing"))
case _ => Left(GenericConversionError(s"Cannot read $opt as $typ"))
}
value <- opt.value match {
case None => Right(None)
case Some(o) => readArgument(o, optType, ctx).map(Some(_))
}
} yield {
Model.ApiOptional(value)
}
}
private def readVariantArgument(
variant: V1.value.Variant,
typ: Model.DamlLfType,
ctx: Context
): Result[Model.ApiVariant] = {
for {
value <- Converter.checkExists("Variant.value", variant.value)
typeCon <- typ match {
case t @ Model.DamlLfTypeCon(_, _) => Right(t)
case _ => Left(GenericConversionError(s"Cannot read $variant as $typ"))
}
ddt <- ctx.templates
.damlLfDefDataType(typeCon.name.identifier)
.toRight(GenericConversionError(s"Unknown type ${typeCon.name.identifier}"))
dt <- typeCon
.instantiate(ddt)
.fold(_ => Left(GenericConversionError(s"Variant expected")), Right(_))
choice <- dt.fields
.find(f => f._1 == variant.constructor)
.toRight(GenericConversionError(s"Unknown choice ${variant.constructor}"))
argument <- readArgument(value, choice._2, ctx)
} yield {
Model.ApiVariant(Some(typeCon.name.identifier), variant.constructor, argument)
}
}
private def readArgument(
value: V1.value.Value,
typ: Model.DamlLfType,
ctx: Context
): Result[Model.ApiValue] = {
import V1.value.Value.{Sum => VS}
(value.sum, typ) match {
case (VS.Int64(v), _) => Right(Model.ApiInt64(v))
case (VS.Decimal(v), _) => Right(Model.ApiDecimal(v))
case (VS.Text(v), _) => Right(Model.ApiText(v))
case (VS.Unit(v), _) => Right(Model.ApiUnit())
case (VS.Bool(v), _) => Right(Model.ApiBool(v))
case (VS.Party(v), _) => Right(Model.ApiParty(v))
case (VS.Timestamp(v), _) => Right(Model.ApiTimestamp(v))
case (VS.Date(v), _) => Right(Model.ApiDate(v))
case (VS.ContractId(v), _) => Right(Model.ApiContractId(v))
case (VS.Optional(v), t) => readOptionalArgument(v, t, ctx)
case (VS.List(v), t) => readListArgument(v, t, ctx)
case (VS.Map(v), t) => readMapArgument(v, t, ctx)
case (VS.Record(v), t) => readRecordArgument(v, t, ctx)
case (VS.Variant(v), t) => readVariantArgument(v, t, ctx)
case (VS.Empty, _) => Left(GenericConversionError("Argument value is empty"))
case (_, _) => Left(GenericConversionError(s"Cannot read argument $value as $typ"))
}
}
def readCompletion(completion: V1.completion.Completion): Result[Option[Model.CommandStatus]] = {
for {
status <- Converter.checkExists("Completion.status", completion.status)
} yield {
val code = Code.fromValue(status.code)
if (code == Code.OK)
// The completion does not contain the new transaction created by this command.
// Do not report completion, the command result will be updated from the transaction stream.
None
else
Some(Model.CommandStatusError(code.toString(), status.message))
}
}
// ------------------------------------------------------------------------------------------------------------------
// Write methods (Model -> V1)
// ------------------------------------------------------------------------------------------------------------------
def writeArgument(value: Model.ApiValue): Result[V1.value.Value] = {
import V1.value.Value
value match {
case arg: Model.ApiRecord => writeRecordArgument(arg).map(a => Value(Value.Sum.Record(a)))
case arg: Model.ApiVariant => writeVariantArgument(arg).map(a => Value(Value.Sum.Variant(a)))
case arg: Model.ApiList => writeListArgument(arg).map(a => Value(Value.Sum.List(a)))
case Model.ApiBool(v) => Right(Value(Value.Sum.Bool(v)))
case Model.ApiInt64(v) => Right(Value(Value.Sum.Int64(v)))
case Model.ApiDecimal(v) => Right(Value(Value.Sum.Decimal(v)))
case Model.ApiParty(v) => Right(Value(Value.Sum.Party(v)))
case Model.ApiText(v) => Right(Value(Value.Sum.Text(v)))
case Model.ApiTimestamp(v) => Right(Value(Value.Sum.Timestamp(v)))
case Model.ApiDate(v) => Right(Value(Value.Sum.Date(v)))
case Model.ApiContractId(v) => Right(Value(Value.Sum.ContractId(v)))
case Model.ApiUnit() => Right(Value(Value.Sum.Unit(com.google.protobuf.empty.Empty())))
case Model.ApiOptional(None) => Right(Value(Value.Sum.Optional(V1.value.Optional(None))))
case Model.ApiOptional(Some(v)) =>
writeArgument(v).map(a => Value(Value.Sum.Optional(V1.value.Optional(Some(a)))))
case arg: Model.ApiMap =>
writeMapArgument(arg).map(a => Value(Value.Sum.Map(a)))
}
}
def writeRecordArgument(value: Model.ApiRecord): Result[V1.value.Record] = {
for {
fields <- Converter
.sequence(value.fields.map(f =>
writeArgument(f.value).map(v => V1.value.RecordField(f.label, Some(v)))))
} yield {
V1.value.Record(value.recordId.map(_.asApi), fields)
}
}
def writeVariantArgument(value: Model.ApiVariant): Result[V1.value.Variant] = {
for {
arg <- writeArgument(value.value)
} yield {
V1.value.Variant(value.variantId.map(_.asApi), value.constructor, Some(arg))
}
}
def writeListArgument(value: Model.ApiList): Result[V1.value.List] = {
for {
values <- Converter.sequence(value.elements.map(e => writeArgument(e)))
} yield {
V1.value.List(values)
}
}
def writeMapArgument(value: Model.ApiMap): Result[V1.value.Map] = {
for {
values <- Converter.sequence(
value.value.toImmArray.toList.map { case (k, v) => writeArgument(v).map(k -> _) }
)
} yield {
V1.value.Map(values.map {
case (k, v) => V1.value.Map.Entry(k, Some(v))
})
}
}
/** Write a composite command consisting of just the given command */
def writeCommands(
party: Model.PartyState,
command: Model.Command,
maxRecordDelay: Long,
ledgerId: String,
applicationId: String
): Result[V1.commands.Commands] = {
for {
ledgerCommand <- writeCommand(party, command)
} yield {
val ledgerEffectiveTime =
new Timestamp(command.platformTime.getEpochSecond, command.platformTime.getNano)
val maximumRecordTime =
ledgerEffectiveTime.copy(seconds = ledgerEffectiveTime.seconds + maxRecordDelay)
V1.commands.Commands(
ledgerId,
Tag.unwrap(command.workflowId),
applicationId,
Tag.unwrap(command.id),
Tag.unwrap(party.name),
Some(ledgerEffectiveTime),
Some(maximumRecordTime),
List(ledgerCommand)
)
}
}
def writeCommand(
party: Model.PartyState,
command: Model.Command
): Result[V1.commands.Command] = {
command match {
case cmd: Model.CreateCommand => writeCreateContract(party, cmd.template, cmd.argument)
case cmd: Model.ExerciseCommand =>
writeExerciseChoice(party, cmd.contract, cmd.choice, cmd.argument)
}
}
def writeCreateContract(
party: Model.PartyState,
templateId: Model.DamlLfIdentifier,
value: Model.ApiRecord
): Result[V1.commands.Command] = {
for {
template <- Converter.checkExists(
party.packageRegistry.template(templateId),
GenericConversionError(s"Template '$templateId' not found"))
argument <- writeRecordArgument(value)
} yield {
V1.commands.Command(
V1.commands.Command.Command.Create(
V1.commands.CreateCommand(
Some(template.id.asApi),
Some(argument)
)
)
)
}
}
def writeExerciseChoice(
party: Model.PartyState,
contractId: ApiTypes.ContractId,
choiceId: ApiTypes.Choice,
value: Model.ApiValue
): Result[V1.commands.Command] = {
for {
contract <- Converter.checkExists(
party.ledger.contract(contractId, party.packageRegistry),
GenericConversionError(s"Contract '${Tag.unwrap(contractId)}' not found"))
choice <- Converter.checkExists(
contract.template.choices.find(c => c.name == choiceId),
GenericConversionError(s"Choice '${Tag.unwrap(choiceId)}' not found"))
argument <- writeArgument(value)
} yield {
V1.commands.Command(
V1.commands.Command.Command.Exercise(
V1.commands.ExerciseCommand(
Some(contract.template.id.asApi),
Tag.unwrap(contractId),
Tag.unwrap(choiceId),
Some(argument)
)
)
)
}
}
// ------------------------------------------------------------------------------------------------------------------
// Helpers
// ------------------------------------------------------------------------------------------------------------------
private def epochMicrosToString(time: Long): Result[String] = {
val micro: Long = 1000000
val seconds: Long = time / micro
val nanos: Long = (time % micro) * 1000
(for {
instant <- Try(Instant.ofEpochSecond(seconds, nanos)).toEither
result <- Try(DateTimeFormatter.ISO_INSTANT.format(instant)).toEither
} yield {
result
}).left.map(e => GenericConversionError(s"Could not convert timestamp '$time' to a string"))
}
private def stringToEpochMicros(time: String): Result[Long] = {
(for {
ta <- Try(DateTimeFormatter.ISO_INSTANT.parse(time)).toEither
instant <- Try(Instant.from(ta)).toEither
} yield {
val micro: Long = 1000000
instant.getEpochSecond * micro + instant.getNano / 1000
}).left.map(e => GenericConversionError(s"Could not convert string '$time' to a TimeStamp: $e"))
}
private def epochDaysToString(time: Int): Result[String] = {
(for {
ta <- Try(LocalDate.ofEpochDay(time.toLong)).toEither
result <- Try(DateTimeFormatter.ISO_LOCAL_DATE.format(ta)).toEither
} yield {
result
}).left.map(e => GenericConversionError(s"Could not convert date '$time' to a Date: $e"))
}
private def stringToEpochDays(time: String): Result[Int] = {
(for {
ta <- Try(DateTimeFormatter.ISO_INSTANT.parse(time)).toEither
instant <- Try(Instant.from(ta)).toEither
} yield {
val epoch = Instant.EPOCH
epoch.until(instant, ChronoUnit.DAYS).toInt
}).left.map(e => GenericConversionError(s"Could not convert string '$time' to a Date: $e"))
}
}
|
gaborh-da/daml
|
ledger/ledger-api-common/src/main/scala/com/digitalasset/ledger/api/validation/TransactionServiceRequestValidator.scala
|
// Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.ledger.api.validation
import brave.propagation.TraceContext
import com.digitalasset.daml.lf.data.Ref.Party
import com.digitalasset.ledger.api.domain
import com.digitalasset.ledger.api.domain.{LedgerId, LedgerIdTag, LedgerOffset}
import com.digitalasset.ledger.api.messages.transaction
import com.digitalasset.ledger.api.messages.transaction.GetTransactionTreesRequest
import com.digitalasset.ledger.api.v1.transaction_filter.{Filters, TransactionFilter}
import com.digitalasset.ledger.api.v1.transaction_service.{
GetLedgerEndRequest,
GetTransactionByEventIdRequest,
GetTransactionByIdRequest,
GetTransactionsRequest
}
import com.digitalasset.platform.server.api.validation.ErrorFactories._
import com.digitalasset.platform.server.api.validation.FieldValidations._
import com.digitalasset.platform.server.api.validation.IdentifierResolver
import com.digitalasset.platform.server.util.context.TraceContextConversions._
import io.grpc.StatusRuntimeException
import scalaz.Tag
class TransactionServiceRequestValidator(
ledgerId: String,
partyNameChecker: PartyNameChecker,
identifierResolver: IdentifierResolver) {
private type Result[X] = Either[StatusRuntimeException, X]
private val filterValidator = new TransactionFilterValidator(identifierResolver)
private def matchId(input: String): Result[LedgerId] =
Tag.subst[String, Result[?], LedgerIdTag](matchLedgerId(ledgerId)(input))
private val rightNone = Right(None)
case class PartialValidation(
ledgerId: domain.LedgerId,
transactionFilter: TransactionFilter,
begin: domain.LedgerOffset,
end: Option[domain.LedgerOffset],
traceContext: Option[TraceContext])
private def commonValidations(req: GetTransactionsRequest): Result[PartialValidation] = {
for {
ledgerId <- matchId(req.ledgerId)
filter <- requirePresence(req.filter, "filter")
requiredBegin <- requirePresence(req.begin, "begin")
convertedBegin <- LedgerOffsetValidator.validate(requiredBegin, "begin")
convertedEnd <- req.end
.fold[Result[Option[domain.LedgerOffset]]](rightNone)(end =>
LedgerOffsetValidator.validate(end, "end").map(Some(_)))
_ <- requireKnownParties(req.getFilter)
} yield {
PartialValidation(
ledgerId,
filter,
convertedBegin,
convertedEnd,
req.traceContext.map(toBrave))
}
}
private def offsetIsBeforeEndIfAbsolute(
offsetType: String,
ledgerOffset: LedgerOffset,
ledgerEnd: LedgerOffset.Absolute,
offsetOrdering: Ordering[LedgerOffset.Absolute]): Result[Unit] = {
ledgerOffset match {
case abs: LedgerOffset.Absolute if offsetOrdering.gt(abs, ledgerEnd) =>
Left(
invalidArgument(
s"$offsetType offset ${abs.value} is after ledger end ${ledgerEnd.value}"))
case _ => Right(())
}
}
private def requireParties(parties: Traversable[String]): Result[Set[Party]] =
parties.foldLeft[Result[Set[Party]]](Right(Set.empty)) { (acc, partyTxt) =>
for {
parties <- acc
party <- requireParty(partyTxt)
} yield parties + party
}
private def requireKnownParties(transactionFilter: TransactionFilter): Result[Unit] =
requireParties(transactionFilter.filtersByParty.keys).flatMap(requireKnownParties)
private def requireKnownParties(partiesInRequest: Iterable[Party]): Result[Unit] = {
val unknownParties = partiesInRequest.filterNot(partyNameChecker.isKnownParty)
if (unknownParties.nonEmpty)
Left(invalidArgument(s"Unknown parties: ${unknownParties.mkString("[", ", ", "]")}"))
else Right(())
}
def validate(
req: GetTransactionsRequest,
ledgerEnd: LedgerOffset.Absolute,
offsetOrdering: Ordering[LedgerOffset.Absolute])
: Result[transaction.GetTransactionsRequest] = {
for {
partial <- commonValidations(req)
_ <- offsetIsBeforeEndIfAbsolute("Begin", partial.begin, ledgerEnd, offsetOrdering)
_ <- partial.end.fold[Result[Unit]](Right(()))(
offsetIsBeforeEndIfAbsolute("End", _, ledgerEnd, offsetOrdering))
convertedFilter <- filterValidator.validate(
partial.transactionFilter,
"filter.filters_by_party")
_ <- requireKnownParties(req.getFilter)
} yield {
transaction.GetTransactionsRequest(
partial.ledgerId,
partial.begin,
partial.end,
convertedFilter,
req.verbose,
req.traceContext.map(toBrave))
}
}
def validateTree(
req: GetTransactionsRequest,
ledgerEnd: LedgerOffset.Absolute,
offsetOrdering: Ordering[LedgerOffset.Absolute]): Result[GetTransactionTreesRequest] = {
for {
partial <- commonValidations(req)
_ <- offsetIsBeforeEndIfAbsolute("Begin", partial.begin, ledgerEnd, offsetOrdering)
_ <- partial.end.fold[Result[Unit]](Right(()))(
offsetIsBeforeEndIfAbsolute("End", _, ledgerEnd, offsetOrdering))
convertedFilter <- transactionFilterToPartySet(
partial.transactionFilter,
"filter.filters_by_party")
} yield {
transaction.GetTransactionTreesRequest(
partial.ledgerId,
partial.begin,
partial.end,
convertedFilter,
req.verbose,
req.traceContext.map(toBrave))
}
}
def validateLedgerEnd(req: GetLedgerEndRequest): Result[transaction.GetLedgerEndRequest] = {
for {
ledgerId <- matchId(req.ledgerId)
} yield {
transaction.GetLedgerEndRequest(ledgerId, req.traceContext.map(toBrave))
}
}
def validateTransactionById(
req: GetTransactionByIdRequest): Result[transaction.GetTransactionByIdRequest] = {
for {
ledgerId <- matchId(req.ledgerId)
trId <- requireNumber(req.transactionId, "transaction_id")
_ <- requireNonEmpty(req.requestingParties, "requesting_parties")
parties <- requireParties(req.requestingParties)
_ <- requireKnownParties(parties)
} yield {
transaction.GetTransactionByIdRequest(
ledgerId,
domain.TransactionId(trId.toString),
parties,
req.traceContext.map(toBrave))
}
}
def validateTransactionByEventId(
req: GetTransactionByEventIdRequest): Result[transaction.GetTransactionByEventIdRequest] = {
for {
ledgerId <- matchId(req.ledgerId)
_ <- requireNonEmptyString(req.eventId, "event_id")
_ <- requireNonEmpty(req.requestingParties, "requesting_parties")
parties <- requireParties(req.requestingParties)
_ <- requireKnownParties(parties)
} yield {
transaction.GetTransactionByEventIdRequest(
ledgerId,
domain.EventId(req.eventId),
parties,
req.traceContext.map(toBrave))
}
}
private def transactionFilterToPartySet(
transactionFilter: TransactionFilter,
fieldName: String
) =
transactionFilter.filtersByParty
.collectFirst {
case (party, Filters(Some(inclusive))) =>
invalidArgument(
s"$party attempted subscription for templates ${inclusive.templateIds.mkString("[", ", ", "]")}. Template filtration is not supported on GetTransactionTrees RPC. To get filtered data, use the GetTransactions RPC.")
}
.fold(requireParties(transactionFilter.filtersByParty.keys))(Left(_))
}
|
gaborh-da/daml
|
ledger/ledger-api-integration-tests/src/test/itsuite/scala/com/digitalasset/platform/tests/integration/ledger/api/identity/LedgerIdentityServiceGivenIT.scala
|
// Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.platform.tests.integration.ledger.api.identity
import com.digitalasset.platform.common.LedgerIdMode
@SuppressWarnings(Array("org.wartremover.warts.Any"))
class LedgerIdentityServiceGivenIT extends LedgerIdentityServiceITBase {
override protected def config: Config =
Config.default.withLedgerIdMode(LedgerIdMode.Static(givenId))
"A platform" when {
"started" should {
"have a given ledger id" in allFixtures { context =>
for {
ledgerId <- getLedgerId(context.ledgerIdentityService)
} yield {
ledgerId should not be empty
ledgerId shouldEqual givenId
}
}
}
}
}
|
gaborh-da/daml
|
ledger/sandbox/src/test/suite/scala/com/digitalasset/platform/sandbox/services/transaction/EventFilterSpec.scala
|
// Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.platform.sandbox.services.transaction
import com.digitalasset.ledger.api.v1.event.Event.Event.{Archived, Created}
import com.digitalasset.ledger.api.v1.event._
import com.digitalasset.ledger.api.v1.transaction_filter.{
Filters,
InclusiveFilters,
TransactionFilter
}
import com.digitalasset.ledger.api.v1.value.Identifier
import com.digitalasset.ledger.api.validation.TransactionFilterValidator
import com.digitalasset.platform.api.v1.event.EventOps._
import com.digitalasset.platform.participant.util.EventFilter
import com.digitalasset.platform.server.api.validation.IdentifierResolver
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{Matchers, OptionValues, WordSpec}
import scala.concurrent.Future
@SuppressWarnings(Array("org.wartremover.warts.Any"))
class EventFilterSpec extends WordSpec with Matchers with ScalaFutures with OptionValues {
private val otherPartyWhoSeesEvents = "otherParty"
private val packageId = "myPackage"
private val eventId = "someEventId"
private val contractId = "someContractId"
private val party1 = "party1"
private val party2 = "party2"
private val party3 = "party3"
private val module1 = "module1"
private val module2 = "module2"
private val template1 = "template1"
private val template2 = "template2"
private val templateId1 = Identifier(packageId, moduleName = module1, entityName = template1)
private val templateId2 = Identifier(packageId, moduleName = module2, entityName = template2)
private val filterValidator = new TransactionFilterValidator(
IdentifierResolver(_ => Future.successful(None)))
private val mapping = Map(
party1 -> getFilter(Seq(module1 -> template1)),
party2 -> getFilter(Seq(module1 -> template1, module2 -> template2))
)
private val filter = (event: Event) =>
filterValidator
.validate(TransactionFilter(mapping), "filter")
.toOption
.flatMap(
EventFilter
.TemplateAwareFilter(_)
.filterEvent(event))
def getFilter(templateIds: Seq[(String, String)]) =
Filters(Some(InclusiveFilters(templateIds.map {
case (mod, ent) => Identifier(packageId, moduleName = mod, entityName = ent)
})))
"EventFilter" when {
"filtered by TemplateIds" should {
runTemplateFilterAssertions("CreatedEvent")(createdEvent)
runTemplateFilterAssertions("ArchivedEvent")(archivedEvent)
"remove non-requesting witnesses from the disclosed event" in {
val resultO = filter(createdEvent(party1, templateId1)).map(_.witnesses)
resultO should not be empty
val result = resultO.get
result should not contain otherPartyWhoSeesEvents
result should contain theSameElementsAs List(party1)
}
}
}
def runTemplateFilterAssertions(eventType: String)(createEvent: (String, Identifier) => Event) = {
val isExercised = eventType == "ExercisedEvent"
val negateIfRequired = if (isExercised) "not " else ""
s"${negateIfRequired}let $eventType through when both party and templateId matches" in {
filter(createEvent(party1, templateId1)) should (if (isExercised) be(empty) else not be empty)
}
s"${negateIfRequired}let $eventType through when interested in multiple templateIds" in {
filter(createEvent(party2, templateId1)) should (if (isExercised) be(empty) else not be empty)
filter(createEvent(party2, templateId2)) should (if (isExercised) be(empty) else not be empty)
}
s"not let $eventType through when party is not listened to" in {
filter(createEvent("unknownParty", templateId1)) shouldEqual None
}
s"not let $eventType through when packageId does not match" in {
filter(createEvent(
party1,
Identifier("someOtherPackageId", moduleName = module1, entityName = template1))) shouldEqual None
}
s"not let $eventType through when templateId is not listened to" in {
filter(createEvent(party1, templateId2)) shouldEqual None
}
}
private def createdEvent(party: String, templateId: Identifier) =
Event(
Created(
CreatedEvent(
eventId = eventId,
contractId = contractId,
templateId = Some(templateId),
witnessParties = Seq(party, otherPartyWhoSeesEvents)
)
))
private def archivedEvent(party: String, templateId: Identifier) =
Event(
Archived(
ArchivedEvent(
eventId = eventId,
contractId = contractId,
templateId = Some(templateId),
witnessParties = Seq(party, otherPartyWhoSeesEvents)
)
))
}
|
gaborh-da/daml
|
ledger/ledger-api-integration-tests/src/main/scala/com/digitalasset/platform/semantictest/StandaloneSemanticTestRunner.scala
|
<filename>ledger/ledger-api-integration-tests/src/main/scala/com/digitalasset/platform/semantictest/StandaloneSemanticTestRunner.scala
// Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.platform.semantictest
import java.io.{BufferedInputStream, File, FileInputStream}
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import com.digitalasset.daml.lf.data.Ref.PackageId
import com.digitalasset.daml.lf.engine.testing.SemanticTester
import com.digitalasset.daml.lf.lfpackage.{Ast, Decode}
import com.digitalasset.grpc.adapter.AkkaExecutionSequencerPool
import com.digitalasset.platform.apitesting.{LedgerContext, PlatformChannels, RemoteServerResource}
import com.digitalasset.platform.common.LedgerIdMode
import com.digitalasset.platform.sandbox.config.{DamlPackageContainer, SandboxConfig}
import scala.concurrent.{Await, ExecutionContext}
import scala.concurrent.duration._
object StandaloneSemanticTestRunner {
def main(args: Array[String]): Unit = {
implicit val system: ActorSystem = ActorSystem("SemanticTestRunner")
implicit val mat: ActorMaterializer = ActorMaterializer()(system)
implicit val ec: ExecutionContext = mat.executionContext
implicit val esf: AkkaExecutionSequencerPool =
new AkkaExecutionSequencerPool("esf-" + this.getClass.getSimpleName)(system)
val config = argParser
.parse(args, defaultConfig)
.getOrElse(sys.exit(1))
val packages: Map[PackageId, Ast.Package] = config.packageContainer.packages
val scenarios = SemanticTester.scenarios(packages)
val nScenarios: Int = scenarios.foldLeft(0)((c, xs) => c + xs._2.size)
println(s"Running ${nScenarios} scenarios against ${config.host}:${config.port}...")
val ledgerResource = RemoteServerResource(config.host, config.port, None)
.map {
case PlatformChannels(channel) =>
LedgerContext.SingleChannelContext(channel, LedgerIdMode.Dynamic(), packages.keys)
}
ledgerResource.setup()
val ledger = ledgerResource.value
if (config.performReset) {
Await.result(ledger.reset(), 10.seconds)
}
scenarios.foreach {
case (pkgId, names) =>
val tester = new SemanticTester(
parties => new SemanticTestAdapter(ledger, packages, parties),
pkgId,
packages)
names.foreach { name =>
println(s"Testing scenario: $name")
val _ = Await.result(
tester.testScenario(name),
10.seconds
)
}
}
println("All scenarios completed.")
ledgerResource.close()
mat.shutdown()
val _ = Await.result(system.terminate(), 5.seconds)
}
private def readPackage(f: File): (PackageId, Ast.Package) = {
val is = new BufferedInputStream(new FileInputStream(f))
try {
Decode.decodeArchiveFromInputStream(is)
} finally {
is.close()
}
}
final case class Config(
host: String,
port: Int,
packageContainer: DamlPackageContainer,
performReset: Boolean)
private val defaultConfig = Config(
host = "localhost",
port = SandboxConfig.DefaultPort,
packageContainer = DamlPackageContainer(),
performReset = false,
)
private val argParser = new scopt.OptionParser[Config]("semantic-test-runner") {
head("Semantic test runner")
opt[Int]('p', "port")
.action((x, c) => c.copy(port = x))
.text(s"Ledger API server port. Defaults to ${SandboxConfig.DefaultPort}.")
opt[Int]("target-port")
.action((x, c) => c.copy(port = x))
.text(s"Ledger API server port. Defaults to ${SandboxConfig.DefaultPort}.")
opt[String]('h', "host")
.action((x, c) => c.copy(host = x))
.text("Ledger API server host. Defaults to localhost.")
opt[Unit]('r', "reset")
.action((_, c) => c.copy(performReset = true))
.text("Perform a ledger reset before running the tests. Defaults to false.")
arg[File]("<archive>...")
.unbounded()
.action((f, c) => c.copy(packageContainer = c.packageContainer.withFile(f)))
.text("DAML-LF Archives to load and run all scenarios from.")
}
}
|
gaborh-da/daml
|
ledger/ledger-api-integration-tests/src/test/itsuite/scala/com/digitalasset/platform/tests/integration/ledger/api/WitnessesIT.scala
|
// Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.platform.tests.integration.ledger.api
import scala.concurrent.Future
import org.scalatest.{AsyncFreeSpec, Matchers}
import com.digitalasset.ledger.api.testing.utils.{
SuiteResourceManagementAroundEach,
AkkaBeforeAndAfterAll
}
import org.scalatest.concurrent.{AsyncTimeLimitedTests, ScalaFutures}
import com.digitalasset.platform.apitesting.TestTemplateIds
import com.digitalasset.platform.apitesting.{MultiLedgerFixture, LedgerContext}
import com.digitalasset.ledger.client.services.commands.SynchronousCommandClient
import com.digitalasset.ledger.api.v1.transaction_filter.{Filters, TransactionFilter}
import com.digitalasset.platform.apitesting.LedgerContextExtensions._
import com.digitalasset.ledger.api.v1.commands.{CreateCommand, ExerciseCommand}
import com.digitalasset.ledger.api.v1.value.{Record, RecordField, Value}
import com.digitalasset.platform.participant.util.ValueConversions._
import com.digitalasset.ledger.api.v1.event.{ExercisedEvent}
import com.digitalasset.ledger.api.v1.transaction.TreeEvent
@SuppressWarnings(Array("org.wartremover.warts.Any"))
class WitnessesIT
extends AsyncFreeSpec
with AkkaBeforeAndAfterAll
with MultiLedgerFixture
with SuiteResourceManagementAroundEach
with ScalaFutures
with AsyncTimeLimitedTests
with Matchers
with TestTemplateIds {
override protected def config: Config = Config.default
private def commandClient(ctx: LedgerContext): SynchronousCommandClient =
new SynchronousCommandClient(ctx.commandService)
private val filter = TransactionFilter(
Map(
"alice" -> Filters.defaultInstance,
"bob" -> Filters.defaultInstance,
"charlie" -> Filters.defaultInstance,
))
"disclosure rules are respected" in allFixtures { ctx =>
val createArg = Record(
fields = List(
RecordField("p_signatory", "alice".asParty),
RecordField("p_observer", "bob".asParty),
RecordField("p_actor", "charlie".asParty),
))
val exerciseArg = Value(Value.Sum.Record(Record()))
def exercise(cid: String, choice: String): Future[ExercisedEvent] =
ctx.testingHelpers
.submitAndListenForSingleTreeResultOfCommand(
ctx.testingHelpers
.submitRequestWithId(s"$choice-exercise")
.update(
_.commands.commands :=
List(
ExerciseCommand(Some(templateIds.witnesses), cid, choice, Some(exerciseArg)).wrap),
_.commands.party := "charlie"
),
filter,
false
)
.map { tx =>
tx.eventsById(tx.rootEventIds(0)).kind match {
case TreeEvent.Kind.Exercised(e) => e
case _ => fail("unexpected event")
}
}
for {
// Create Witnesses contract
createTx <- ctx.testingHelpers.submitAndListenForSingleResultOfCommand(
ctx.testingHelpers
.submitRequestWithId("create")
.update(
_.commands.commands :=
List(CreateCommand(Some(templateIds.witnesses), Some(createArg)).wrap),
_.commands.party := "alice"
),
filter
)
createdEv = ctx.testingHelpers.getHead(ctx.testingHelpers.createdEventsIn(createTx))
// Divulge Witnesses contract to charlie, who's just an actor and thus cannot
// see it by default.
divulgeCreatedEv <- ctx.testingHelpers.simpleCreate(
"create-divulge",
"charlie",
templateIds.divulgeWitnesses,
Record(
fields =
List(RecordField(value = "alice".asParty), RecordField(value = "charlie".asParty)))
)
_ <- ctx.testingHelpers.simpleExercise(
"exercise-divulge",
"alice",
templateIds.divulgeWitnesses,
divulgeCreatedEv.contractId,
"Divulge",
Value(
Value.Sum.Record(
Record(fields = List(RecordField(value = createdEv.contractId.asContractId)))))
)
// Now, first try the non-consuming choice
nonConsumingExerciseEv <- exercise(createdEv.contractId, "WitnessesNonConsumingChoice")
// And then the consuming one
consumingExerciseEv <- exercise(createdEv.contractId, "WitnessesChoice")
} yield {
createdEv.witnessParties should contain theSameElementsAs List("alice", "bob") // stakeholders = signatories \cup observers
nonConsumingExerciseEv.witnessParties should contain theSameElementsAs List(
"alice",
"charlie") // signatories \cup actors
consumingExerciseEv.witnessParties should contain theSameElementsAs List(
"alice",
"bob",
"charlie") // stakeholders \cup actors
}
}
}
|
gaborh-da/daml
|
ledger/ledger-api-test-tool/src/main/scala/com/daml/ledger/api/testtool/LedgerApiTestTool.scala
|
<filename>ledger/ledger-api-test-tool/src/main/scala/com/daml/ledger/api/testtool/LedgerApiTestTool.scala
// Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.daml.ledger.api.testtool
import java.io.{File, PrintWriter, StringWriter}
import java.nio.file.{Files, Path, Paths, StandardCopyOption}
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import com.digitalasset.daml.lf.UniversalArchiveReader
import com.digitalasset.daml.lf.types.{Ledger => L}
import com.digitalasset.daml.lf.data.Ref.{PackageId, QualifiedName}
import com.digitalasset.daml.lf.engine.testing.SemanticTester
import com.digitalasset.daml.lf.lfpackage.{Ast, Decode}
import com.digitalasset.grpc.adapter.AkkaExecutionSequencerPool
import com.digitalasset.platform.apitesting.{LedgerContext, PlatformChannels, RemoteServerResource}
import com.digitalasset.platform.common.LedgerIdMode
import com.digitalasset.platform.semantictest.SemanticTestAdapter
import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext}
import scala.collection.breakOut
import scala.util.Random
object LedgerApiTestTool {
def main(args: Array[String]): Unit = {
implicit val system: ActorSystem = ActorSystem("LedgerApiTestTool")
implicit val mat: ActorMaterializer = ActorMaterializer()(system)
implicit val ec: ExecutionContext = mat.executionContext
implicit val esf: AkkaExecutionSequencerPool =
new AkkaExecutionSequencerPool("esf-" + this.getClass.getSimpleName)(system)
val testResources = List("/ledger/ledger-api-integration-tests/SemanticTests.dar")
val config = Cli
.parse(args)
.getOrElse(sys.exit(1))
if (config.extract) {
extractTestFiles(testResources)
System.exit(0)
}
val packages: Map[PackageId, Ast.Package] = testResources
.flatMap(loadAllPackagesFromResource)(breakOut)
val scenarios = SemanticTester.scenarios(packages)
val nScenarios: Int = scenarios.foldLeft(0)((c, xs) => c + xs._2.size)
println(s"Running $nScenarios scenarios against ${config.host}:${config.port}...")
val ledgerResource = RemoteServerResource(config.host, config.port, config.tlsConfig)
.map {
case PlatformChannels(channel) =>
LedgerContext.SingleChannelContext(channel, LedgerIdMode.Dynamic(), packages.keys)
}
ledgerResource.setup()
val ledger = ledgerResource.value
if (config.performReset) {
Await.result(ledger.reset(), 10.seconds)
}
var failed = false
val runSuffix = "-" + Random.alphanumeric.take(10).mkString
val partyNameMangler = (partyText: String) => partyText + runSuffix
val commandIdMangler: ((QualifiedName, Int, L.NodeId) => String) = (scenario, stepId, nodeId) =>
s"ledger-api-test-tool-$scenario-$stepId-$nodeId-$runSuffix"
try {
scenarios.foreach {
case (pkgId, names) =>
val tester = new SemanticTester(
parties =>
new SemanticTestAdapter(
ledger,
packages,
parties,
timeoutScaleFactor = config.timeoutScaleFactor),
pkgId,
packages,
partyNameMangler,
commandIdMangler
)
names
.foreach { name =>
println(s"Testing scenario: $name")
val _ = try {
Await.result(
tester.testScenario(name),
(60 * config.timeoutScaleFactor).seconds
)
} catch {
case (t: Throwable) =>
val sw = new StringWriter
t.printStackTrace(new PrintWriter(sw))
sys.error(
s"Running scenario $name failed with: " + t
.getMessage() + "\n\nWith stacktrace:\n" + sw
.toString() + "\n\nTesting tool own stacktrace is:")
}
}
}
println("All scenarios completed.")
} catch {
case (t: Throwable) =>
failed = true
if (!config.mustFail) throw t
} finally {
ledgerResource.close()
mat.shutdown()
val _ = Await.result(system.terminate(), 5.seconds)
}
if (config.mustFail) {
if (failed) println("One or more scenarios failed as expected.")
else
throw new RuntimeException(
"None of the scenarios failed, yet the --must-fail flag was specified!")
}
}
private def loadAllPackagesFromResource(resource: String): Map[PackageId, Ast.Package] = {
// TODO: replace with stream-supporting functions from UniversalArchiveReader when
// https://github.com/digital-asset/daml/issues/547 is fixed
val is = getClass.getResourceAsStream(resource)
if (is == null) sys.error(s"Could not find $resource in classpath")
val targetPath: Path = Files.createTempFile("ledger-api-test-tool-", "-test.dar")
Files.copy(is, targetPath, StandardCopyOption.REPLACE_EXISTING);
val f: File = targetPath.toFile
if (f == null) sys.error(s"Could not open $targetPath")
val packages = UniversalArchiveReader().readFile(f).get
Map(packages.all.map {
case (pkgId, pkgArchive) => Decode.readArchivePayloadAndVersion(pkgId, pkgArchive)._1
}: _*)
}
private def extractTestFiles(testResources: List[String]): Unit = {
val pwd = Paths.get(".").toAbsolutePath
println(s"Extracting all DAML resources necessary to run the tests into $pwd.")
testResources
.foreach { n =>
val is = getClass.getResourceAsStream(n)
if (is == null) sys.error(s"Could not find $n in classpath")
val targetFile = new File(new File(n).getName)
Files.copy(is, targetFile.toPath, StandardCopyOption.REPLACE_EXISTING)
println(s"Extracted $n to $targetFile")
}
}
}
|
gaborh-da/daml
|
daml-lf/data/src/main/scala/com/digitalasset/daml/lf/data/Utf8.scala
|
// Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.daml.lf.data
import java.nio.charset.StandardCharsets
import java.security.MessageDigest
import scala.annotation.tailrec
// The DAML-LF strings are supposed to be UTF-8 while standard java strings are UTF16
// Note number of UTF16 operations are not Utf8 equivalent (for instance length, charAt, ordering ...)
// This module provide UTF8 emulation functions.
object Utf8 {
// The DAML-LF strings are supposed to be UTF-8.
// However standard "exploding" java/scala methods like
// _.toList split in Character which are not Unicode codepoint.
def explode(s: String): ImmArray[String] = {
val len = s.length
val arr = ImmArray.newBuilder[String]
var i = 0
var j = 0
while (i < len) {
// if s(i) is a high surrogate the current codepoint uses 2 chars
val next = if (s(i).isHighSurrogate) i + 2 else i + 1
arr += s.substring(i, next)
j += 1
i = next
}
arr.result()
}
def getBytes(s: String): Array[Byte] =
s.getBytes(StandardCharsets.UTF_8)
def sha256(s: String): String = {
val digest = MessageDigest.getInstance("SHA-256")
val array = digest.digest(getBytes(s))
array.map("%02x" format _).mkString
}
def implode(ts: ImmArray[String]): String =
ts.toSeq.mkString
val Ordering: Ordering[String] = (xs: String, ys: String) => {
val lim = xs.length min ys.length
@tailrec
def lp(i: Int): Int =
if (i < lim) {
val x = xs(i)
val y = ys(i)
if (x != y) {
// If x is a low surrogate, then the current codepoint starts at the
// previous char, otherwise the codepoint starts at the current char.
val j = if (x.isLowSurrogate) i - 1 else i
xs.codePointAt(j) - ys.codePointAt(j)
} else lp(i + 1)
} else xs.length - ys.length
lp(0)
}
}
|
gaborh-da/daml
|
daml-lf/data/src/main/scala/com/digitalasset/daml/lf/data/MatchingStringModule.scala
|
<filename>daml-lf/data/src/main/scala/com/digitalasset/daml/lf/data/MatchingStringModule.scala
// Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.daml.lf.data
import scalaz.Equal
import scala.util.matching.Regex
sealed abstract class MatchingStringModule {
type T <: String
def fromString(s: String): Either[String, T]
@throws[IllegalArgumentException]
final def assertFromString(s: String): T =
assert(fromString(s))
def equalInstance: Equal[T]
// We provide the following array factory instead of a ClassTag
// because the latter lets people easily reinterpret any string as a T.
// See
// * https://github.com/digital-asset/daml/pull/983#discussion_r282513324
// * https://github.com/scala/bug/issues/9565
val Array: ArrayFactory[T]
}
object MatchingStringModule extends (Regex => MatchingStringModule) {
override def apply(regex: Regex): MatchingStringModule = new MatchingStringModule {
type T = String
private val pattern = regex.pattern
def fromString(s: String): Either[String, T] =
Either.cond(pattern.matcher(s).matches(), s, s"""string "$s" does not match regex "$regex"""")
def equalInstance: Equal[T] = scalaz.std.string.stringInstance
val Array: ArrayFactory[T] = new ArrayFactory[T]
}
}
|
gaborh-da/daml
|
ledger/backend-api/src/main/scala/com/digitalasset/ledger/backend/api/v1/LedgerBackend.scala
|
<filename>ledger/backend-api/src/main/scala/com/digitalasset/ledger/backend/api/v1/LedgerBackend.scala
// Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
package com.digitalasset.ledger.backend.api.v1
import akka.NotUsed
import akka.stream.scaladsl.Source
import com.digitalasset.ledger.backend.api.v1.LedgerSyncEvent.AcceptedTransaction
import scala.concurrent.Future
/** An instance of the [[LedgerBackend]] trait allows to read from and write
* to a DAML Ledger.
*
* The purpose of the [[LedgerBackend]] trait is to decouple the
* implementation of the ledger-api-server, which provides the user-facing
* DAML Ledger API, from the implementation of the underlying storage and
* synchronization mechanism. The ledger-api-server can thereby be used to
* offer the DAML Ledger API for many different distributed ledger and
* database technologies.
*
* The semantics of DAML Ledgers are specified as part of the DA Ledger
* model: https://docs.daml.com/concepts/ledger-model/index.html. We assume
* knowledge of the concepts explained therein for the remainder of this
* documentation. We also recommend going through the Getting Started Guide
* in the DAML SDK to get a feel for how a DAML Application works:
* https://docs.daml.com/getting-started/installation.html
*
* The [[LedgerBackend]] trait is structured such that it can also be
* implemented for distributed ledgers where each node has its own local view
* on the state of the ledger and on what transactions have been accepted.
*
* The [[LedgerBackend]] trait exposes changes to a Participant node's state
* as a stream of [[LedgerSyncEvent]]s. Not all changes are required to be
* exposed. Only the ones observable at the level of the DAML Ledger API. Use
* [[LedgerBackend.ledgerSyncEvents]] to consume this stream. Every
* [[LedgerSyncEvent]] has an associated [[LedgerSyncOffset]], which serves
* as a Participant-node-local address of that event. This allows consuming
* the [[LedgerSyncOffset.ledgerSyncEvents]] stream starting after a specific
* offset.
*
* Participant nodes often host the data for multiple DAML Parties; i.e.,
* they run in a multi-tenancy setup. The privacy model for DAML
* (https://docs.daml.com/concepts/ledger-model/ledger-privacy.html) is
* though defined on a per-party basis. This is why all methods that read
* privacy-sensitive data from a Participant node take the requesting
* party(ies) as an argument; see e.g., [[LedgerBackend.ledgerSyncEvents]].
* The implementors of [[LedgerBackend]] need to take care to properly filter
* the returned data to only include the requesting parties view.
*
* TODO (SM): move the per-party filtering into the domain of the
* ledger-api-server as part of the V2 version of the LedgerBackend API
*
* In principle, all reads from a Participant node's state can be performed
* using [[LedgerBackend.ledgerSyncEvents]] and computing the desired result
* over the returned stream. However this would not perform well, which is why
* we added extra methods to the [[LedgerBackend]] where desirable.
*
* All writes to the ledger happen by submitting a transaction using
* [[LedgerBackend.beginSubmission]] and calling [[SubmissionHandle.submit]]
* on the returned [[SubmissionHandle]]. It is expected that a Participant
* node coordinates with the other nodes constituting the ledger to commit
* the submitted transaction and inform the relevant Participant nodes about
* the newly committed transaction.
*
*/
trait LedgerBackend extends AutoCloseable {
/** Return the identifier of the Participant node's state that this
* [[LedgerBackend]] reads from and writes to.
*
* This identifier is used by consumers of the DAML Ledger API to check
* on reconnects to the Ledger API that they are connected to the same
* ledger and can therefore expect to receive the same data on calls that
* return append-only data. It is expected to be:
* (1) immutable over the lifetime of a [[LedgerBackend]] instance,
* (2) globally unique with high-probability,
* (3) matching the regexp [a-zA-Z0-9]+.
*
* Implementations where Participant nodes share a global view on all
* transactions in the ledger (e.g, via a blockchain) are expected to use
* the same ledger-id on all Participant nodes. Implementations where
* Participant nodes do not share a global view should ensure that the
* different participant nodes use different ledger-ids.
*
* TODO(SM): find a better name than 'ledger-id'.
*/
def ledgerId: String
/** Begin the submission of a transaction to the ledger.
*
* Every write to the ledger is initiated with its own call to this
* method. The returned [[SubmissionHandle]] is used by the DAML
* interpreter to read from the ledger and construct a transaction. See
* [[SubmissionHandle]] for details on its methods.
*
* This method SHOULD be light-weight on average. Implementors might
* for example use a connection pool to avoid high setup costs for
* connecting to its Participant node.
*/
def beginSubmission(): Future[SubmissionHandle]
/** Return the stream of ledger events starting from and including the given offset.
*
* @param offset : the ledger offset starting from which events should be streamed.
*
* The stream only terminates if there was an error.
*
* Two calls to this method with the same arguments are related such
* that
* (1) all events are delivered in the same order, but
* (2) [[LedgerSyncEvent.RejectedCommand]] and [[LedgerSyncEvent.Heartbeat]] events can be elided if their
* recordTime is equal to the preceding event.
* This rule provides implementors with the freedom to not persist
* [[LedgerSyncEvent.RejectedCommand]] and [[LedgerSyncEvent.Heartbeat]] events.
*
*/
def ledgerSyncEvents(offset: Option[LedgerSyncOffset] = None): Source[LedgerSyncEvent, NotUsed]
/** Return a recent snapshot of the active contracts.
*
* It is up to the implementation to decide on what 'recent' means.
* Consumers typically follow up on a call to this method with a call to
* [[ledgerSyncEvents]] starting from the snapshot's offset to track
* changes to that snapshot.
*
* TODO (SM): as part of the V2 API fix the problem that this will result in the create events
* at an accepted-transaction at the latest offset being returned twice: once as part of the active-contract
* snapshot and once as part of the first ledger-event returned by [[ledgerSyncEvents]].
*
* Semantically the method MUST return exactly the contracts for which
* there was a 'Create' event and no
* consuming 'Exercise' event in an [[AcceptedTransaction]] in the
* [[ledgerSyncEvents]] for the 'requestingParties' starting from the
* beginning until and including the offset at which the snapshot is
* computed.
*
* Implementations are expected to serve this stream in time proportional
* to its size.
*
*/
def activeContractSetSnapshot(): Future[(LedgerSyncOffset, Source[ActiveContract, NotUsed])]
/** Return the current [[LedgerSyncOffset]].
*
* Implementations are expected to return an offset whose associated
* [[LedgerSyncOffset]] has a 'recordTime' that is close to the wall-clock
* time of the Participant node. This is what 'current' means for this
* method.
*
* The typical use-case for this method is to use the returned offset as the
* starting offset for [[ledgerSyncEvents]] stream to subscribe to recent
* changes of the ledger.
*
*/
def getCurrentLedgerEnd: Future[LedgerSyncOffset]
/** Looks up a transaction by its id. */
def getTransactionById(transactionId: TransactionId): Future[Option[AcceptedTransaction]]
}
|
igieon/sbt-assembly
|
build.sbt
|
lazy val commonSettings: Seq[Setting[_]] = Seq(
version in ThisBuild := "0.14.7-SNAPSHOT",
organization in ThisBuild := "com.eed3si9n"
)
lazy val root = (project in file(".")).
// enablePlugins(GitVersioning).
settings(commonSettings: _*).
settings(
sbtPlugin := true,
name := "sbt-assembly",
description := "sbt plugin to create a single fat jar",
licenses := Seq("MIT License" -> url("https://github.com/sbt/sbt-assembly/blob/master/LICENSE")),
scalacOptions := Seq("-deprecation", "-unchecked", "-Dscalac.patmat.analysisBudget=1024", "-Xfuture"),
libraryDependencies ++= Seq(
"org.scalactic" %% "scalactic" % "3.0.1",
"org.pantsbuild" % "jarjar" % "1.6.5"
),
TaskKey[Unit]("runScriptedTest") := Def.taskDyn {
val sbtBinVersion = (sbtBinaryVersion in pluginCrossBuild).value
val base = sbtTestDirectory.value
def isCompatible(directory: File): Boolean = {
val buildProps = new java.util.Properties()
IO.load(buildProps, directory / "project" / "build.properties")
Option(buildProps.getProperty("sbt.version"))
.map { version =>
val requiredBinVersion = CrossVersion.binarySbtVersion(version)
val compatible = requiredBinVersion == sbtBinVersion
if (!compatible) {
val testName = directory.relativeTo(base).getOrElse(directory)
streams.value.log.warn(s"Skipping $testName since it requires sbt $requiredBinVersion")
}
compatible
}
.getOrElse(true)
}
val testDirectoryFinder = base * AllPassFilter * AllPassFilter filter { _.isDirectory }
val tests = for {
test <- testDirectoryFinder.get
if isCompatible(test)
path <- Path.relativeTo(base)(test)
} yield path.replace('\\', '/')
if (tests.nonEmpty)
Def.task(scripted.toTask(tests.mkString(" ", " ", "")).value)
else
Def.task(streams.value.log.warn("No tests can be run for this sbt version"))
}.value,
publishArtifact in (Compile, packageBin) := true,
publishArtifact in (Test, packageBin) := false,
publishArtifact in (Compile, packageDoc) := false,
publishArtifact in (Compile, packageSrc) := true
)
|
rise-lang/2021-CGO-artifact
|
lib/harris-rise-and-shine/build.sbt
|
scalaVersion := "2.12.10"
lazy val root = (project in file("."))
.dependsOn(shine)
.settings(
name := "harris-rise-and-shine",
javaOptions ++= Seq("-Xss20m", "-Xms512m", "-Xmx4G")
)
lazy val shine = ProjectRef(file("../shine"), "shine")
|
rise-lang/2021-CGO-artifact
|
lib/harris-rise-and-shine/Main.scala
|
package imgproc_rise_and_shine
import apps.harrisCornerDetectionHalide.harris
import apps.harrisCornerDetectionHalide.ocl._
import apps.harrisCornerDetectionHalideRewrite.{ocl => rewrite}
object Main {
def genKernel(e: rise.core.Expr, name: String, path: String): Unit = {
val lowered = rewrite.unrollDots(rise.core.types.infer(e))
val kernel = util.gen.OpenCLKernel(lowered, name)
util.writeToPath(path, kernel.code)
}
def main(args: Array[String]): Unit = {
val strip = 32
val vWidth = args(0).toInt
val highLevel = rise.core.types.infer(harris(strip, vWidth))
// genKernel(rewrite.harrisBufferedSplitPar(strip)(highLevel),
// "harris", "gen/harrisB3SPRW.cl")
// genKernel(harrisBufferedVecUnaligned(3, vWidth), "harris", "gen/harrisBVU.cl")
// genKernel(harrisBufferedVecAligned(3, vWidth), "harris", "gen/harrisBVA.cl")
// genKernel(harrisSplitPar(strip, vWidth, harrisBufferedVecUnaligned(3, vWidth)),
// "harris", "gen/harrisB3VUSP.cl")
// genKernel(harrisSplitPar(strip, vWidth, harrisBufferedVecUnaligned(4, vWidth)),
// "harris", "gen/harrisB4VUSP.cl")
// genKernel(rewrite.harrisBufferedVecUnalignedSplitPar(vWidth, strip)(highLevel),
// "harris", "gen/harrisB3VUSPRW.cl")
//genKernel(harrisSplitPar(strip, vWidth, harrisBufferedVecAligned(3, vWidth)),
// "harris", "gen/harrisB3VASP.cl")
//genKernel(harrisSplitPar(strip, vWidth, harrisBufferedVecAligned(4, vWidth)),
// "harris", "gen/harrisB4VASP.cl")
genKernel(rewrite.harrisBufferedVecAlignedSplitPar(vWidth, strip)(highLevel),
"harris", s"gen/vec${vWidth}/cbuf.cl")
//genKernel(harrisSplitPar(strip, vWidth, harrisBufferedRegRotVecAligned(3, vWidth)),
// "harris", "gen/harrisB3VASPRR.cl")
//genKernel(harrisSplitPar(strip, vWidth, harrisBufferedRegRotVecAligned(4, vWidth)),
// "harris", "gen/harrisB4VASPRR.cl")
genKernel(rewrite.harrisBufferedRegRotVecAlignedSplitPar(vWidth, strip)(highLevel),
"harris", s"gen/vec${vWidth}/cbuf+rrot.cl")
}
}
|
fehu/opentracing-scala
|
scala/src/main/scala/com/github/fehu/opentracing/internal/TracedTFunctions.scala
|
<reponame>fehu/opentracing-scala
package com.github.fehu.opentracing.internal
import cats.data.StateT
import cats.effect.{ IO, LiftIO, Sync }
import cats.syntax.functor._
import cats.{ Applicative, ApplicativeError, Functor, ~> }
import io.opentracing.propagation.Format
import com.github.fehu.opentracing.Traced
import com.github.fehu.opentracing.transformer.TracedT
import com.github.fehu.opentracing.transformer.TracedT.AutoConvert._
private[opentracing] trait TracedTFunctions {
def pure[F[_]: Applicative, A](a: A): TracedT[F, A] = TracedT(StateT.pure(a))
def liftF[F[_]: Applicative, A](fa: F[A]): TracedT[F, A] = TracedT(StateT.liftF(fa))
def liftIO[F[_]: Applicative: LiftIO, A](io: IO[A]): TracedT[F, A] = TracedT(StateT.liftF(LiftIO[F].liftIO(io)))
def raiseError[F[_], A](err: Throwable)(implicit A: ApplicativeError[F, Throwable]): TracedT[F, A] = liftF(A.raiseError[A](err))
def defer[F[_]: Sync, A](tfa: => TracedT[F, A]): TracedT[F, A] = traced.defer(tfa)
def deferIO[F[_]: Sync: LiftIO, A](io: => IO[A]): TracedT[F, A] = defer(liftIO(io))
def delay[F[_]: Sync, A](a: => A): TracedT[F, A] = defer(pure(a))
def currentSpan[F[_]: Sync]: Traced.SpanInterface[TracedT[F, *]] = traced.currentSpan
def extractContext[F[_]: Sync, C0 <: C, C](carrier: C0, format: Format[C]): TracedT[F, Option[C0]] =
traced.extractContext(carrier, format)
def liftK[F[_]: Applicative]: F ~> TracedT[F, *] = λ[F ~> TracedT[F, *]](liftF(_))
def mapK[F[_]: Functor, G[_]](fk: F ~> G): TracedT[F, *] ~> TracedT[G, *] = λ[TracedT[F, *] ~> TracedT[G, *]](_.stateT.mapK(fk))
def runK[F[_]: Sync](params: Traced.RunParams): TracedT[F, *] ~> F = λ[TracedT[F, *] ~> F](_.stateT.run(toState(params)).map(_._2))
def traceK[F[_]: Sync](operation: String, tags: Traced.Tag*): F ~> TracedT[F, *] =
λ[F ~> TracedT[F, *]](fa => traced.apply(operation, tags: _*)(liftF(fa)))
private def traced[F[_]: Sync]: Traced[TracedT[F, *]] = TracedT.tracedTTracedInstance[F]
private def toState[F[_]: Functor](params: Traced.RunParams) =
State(params.tracer, params.hooks, params.activeSpan.maybe, params.logError)
}
abstract class TracedTFunctionsForSync[F[_]: Sync] {
def pure[A](a: A): TracedT[F, A] = TracedT(StateT.pure(a))
lazy val unit: TracedT[F, Unit] = pure(())
def liftF[A](fa: F[A]): TracedT[F, A] = TracedT(StateT.liftF(fa))
def liftIO[A](io: IO[A])(implicit lift: LiftIO[F]): TracedT[F, A] = TracedT(StateT.liftF(lift.liftIO(io)))
def raiseError[A](err: Throwable): TracedT[F, A] = liftF(Sync[F].raiseError[A](err))
def defer[A](tfa: => TracedT[F, A]): TracedT[F, A] = traced.defer(tfa)
def deferIO[A](io: => IO[A])(implicit lift: LiftIO[F]): TracedT[F, A] = defer(liftIO(io))
def delay[A](a: => A): TracedT[F, A] = defer(pure(a))
def currentSpan: Traced.SpanInterface[TracedT[F, *]] = traced.currentSpan
def extractContext[C0 <: C, C](carrier: C0, format: Format[C]): TracedT[F, Option[C0]] =
traced.extractContext(carrier, format)
def liftK: F ~> TracedT[F, *] = λ[F ~> TracedT[F, *]](liftF(_))
def mapK[G[_]](fk: F ~> G): TracedT[F, *] ~> TracedT[G, *] = λ[TracedT[F, *] ~> TracedT[G, *]](_.stateT.mapK(fk))
def runK(params: Traced.RunParams): TracedT[F, *] ~> F = TracedT.runK(params)
def traceK(operation: String, tags: Traced.Tag*): F ~> TracedT[F, *] =
λ[F ~> TracedT[F, *]](fa => traced(operation, tags: _*)(liftF(fa)))
implicit lazy val traced: Traced[TracedT[F, *]] = TracedT.tracedTTracedInstance[F]
}
|
fehu/opentracing-scala
|
akka/src/main/scala/akka/fehu/MessageInterceptingActor.scala
|
package akka.fehu
import scala.util.control.NonFatal
import akka.actor.Actor
trait MessageInterceptingActor extends Actor {
override protected[akka] def aroundReceive(receive: Receive, msg: Any): Unit = {
try super.aroundReceive(receive, interceptIncoming(msg))
catch { case NonFatal(err) => afterReceive(Some(err)) }
afterReceive(None)
}
protected def interceptIncoming(message: Any): Any
protected def afterReceive(maybeError: Option[Throwable]): Unit
}
|
fehu/opentracing-scala
|
scala/src/main/scala/com/github/fehu/opentracing/util/FunctionK2.scala
|
<filename>scala/src/main/scala/com/github/fehu/opentracing/util/FunctionK2.scala
package com.github.fehu.opentracing.util
trait FunctionK2[F[*[_]], G[*[_]]] extends Serializable { self =>
def apply[A[_]](fa: F[A]): G[A]
def compose[E[*[_]]](f: FunctionK2[E, F]): FunctionK2[E, G] =
new FunctionK2[E, G] {
def apply[A[_]](fa: E[A]): G[A] = self(f(fa))
}
def andThen[H[*[_]]](f: FunctionK2[G, H]): FunctionK2[F, H] = f.compose(self)
}
object FunctionK2 {
type ~~>[F[*[_]], G[*[_]]] = FunctionK2[F, G]
}
|
fehu/opentracing-scala
|
scala/src/main/scala/com/github/fehu/opentracing/propagation/TextMapPropagation.scala
|
package com.github.fehu.opentracing.propagation
import java.util.{ Iterator => JIterator, Map => JMap }
import scala.collection.mutable
import scala.collection.JavaConverters._
import io.opentracing.propagation.{ Format, TextMap }
final class TextMapPropagation private (map: mutable.SortedMap[String, String]) extends Propagation {
type Underlying = TextMap
val underlying: TextMap = new TextMap {
def iterator(): JIterator[JMap.Entry[String, String]] = map.asJava.entrySet().iterator()
def put(key: String, value: String): Unit = map.update(key, value)
}
def format: Format[TextMap] = TextMapPropagation.format
type Repr = Map[String, String]
def repr: Map[String, String] = map.toMap
}
object TextMapPropagation extends PropagationCompanion[TextMapPropagation] {
def apply(): TextMapPropagation = new TextMapPropagation(mutable.SortedMap.empty)
def apply(repr: Map[String, String]): TextMapPropagation = new TextMapPropagation(mutable.SortedMap(repr.toSeq: _*))
def format: Format[TextMap] = Format.Builtin.TEXT_MAP
}
|
fehu/opentracing-scala
|
scala/src/main/scala/com/github/fehu/opentracing/propagation/BinaryPropagation.scala
|
package com.github.fehu.opentracing.propagation
import java.nio.ByteBuffer
import io.opentracing.propagation.{ Binary, Format }
final class BinaryPropagation private (private var buff: ByteBuffer) extends Propagation {
type Underlying = Binary
val underlying: Binary = new Binary {
def extractionBuffer(): ByteBuffer = buff
def injectionBuffer(length: Int): ByteBuffer = {
buff = ByteBuffer.allocate(length)
buff
}
}
def format: Format[Binary] = BinaryPropagation.format
type Repr = Array[Byte]
def repr: Array[Byte] = buff.array()
}
object BinaryPropagation extends PropagationCompanion[BinaryPropagation] {
def apply(): BinaryPropagation = new BinaryPropagation(ByteBuffer.allocate(0))
def apply(bytes: Array[Byte]): BinaryPropagation = new BinaryPropagation(ByteBuffer.wrap(bytes))
val format: Format[Binary] = Format.Builtin.BINARY
}
|
fehu/opentracing-scala
|
scala/src/main/scala/com/github/fehu/opentracing/util/ErrorLogger.scala
|
<filename>scala/src/main/scala/com/github/fehu/opentracing/util/ErrorLogger.scala
package com.github.fehu.opentracing.util
import cats.{ Applicative, Defer }
trait ErrorLogger {
def apply[F[_]: Applicative: Defer](message: String, error: Throwable): F[Unit]
}
object ErrorLogger {
lazy val stdout: ErrorLogger = new ErrorLogger {
def apply[F[_]: Applicative: Defer](message: String, error: Throwable): F[Unit] =
Defer[F].defer(Applicative[F].pure {
println(s"[ERROR] $message\nCaused by: $error")
})
}
}
|
fehu/opentracing-scala
|
scala/src/main/scala/com/github/fehu/opentracing/transformer/TracedT.scala
|
package com.github.fehu.opentracing.transformer
import cats.{ FlatMap, Functor, ~> }
import cats.data.StateT
import cats.effect.{ Effect, IO, LiftIO }
import com.github.fehu.opentracing.internal.{ State, TracedTFunctions, TracedTFunctionsForSync, TracedTInstances }
final case class TracedT[F[_], A](stateT: StateT[F, State, A]) extends AnyVal {
def transform[G[_], B](f: StateT[F, State, A] => StateT[G, State, B]): TracedT[G, B] = copy(f(stateT))
def map[B](f: A => B)(implicit F: Functor[F]): TracedT[F, B] = transform(_.map(f))
def flatMap[B](f: A => TracedT[F, B])(implicit F: FlatMap[F]): TracedT[F, B] = transform(_.flatMap(f andThen (_.stateT)))
def flatMapF[B](f: A => F[B])(implicit F: FlatMap[F]): TracedT[F, B] = transform(_.flatMapF(f))
def mapK[G[_]](fk: F ~> G)(implicit F: Functor[F]): TracedT[G, A] = transform(_.mapK(fk))
}
object TracedT extends TracedTInstances with TracedTFunctions {
type Underlying[F[_], A] = StateT[F, State, A]
private[opentracing] object AutoConvert {
import scala.language.implicitConversions
@inline implicit def autoToStateT[F[_], A](tt: TracedT[F, A]): Underlying[F, A] = tt.stateT
@inline implicit def autoFromStateT[F[_], A](st: Underlying[F, A]): TracedT[F, A] = new TracedT(st)
}
}
object TracedIO extends TracedTFunctionsForSync[IO] {
def liftEffectK[F[_]: Effect]: F ~> TracedIO = liftK compose Effect.toIOK
def mapIOK[F[_]: LiftIO]: TracedIO ~> TracedT[F, *] = mapK(LiftIO.liftK)
def comapIOK[F[_]: Effect]: TracedT[F, *] ~> TracedIO = λ[TracedT[F, *] ~> TracedIO](t => TracedT(t.stateT.mapK(Effect.toIOK)))
}
|
fehu/opentracing-scala
|
scala/src/test/scala/com/github/fehu/opentracing/propagation/PropagationSpec.scala
|
<reponame>fehu/opentracing-scala
package com.github.fehu.opentracing.propagation
import cats.effect.Effect
import cats.effect.syntax.effect._
import cats.syntax.flatMap._
import cats.syntax.functor._
import org.scalatest.Ignore
import org.scalatest.freespec.AnyFreeSpec
import com.github.fehu.opentracing.syntax._
import com.github.fehu.opentracing.{ Spec, Traced }
@Ignore
abstract class PropagationSpec[F[_]: Traced] extends AnyFreeSpec with Spec {
implicit val effect: Effect[F]
"Serialize and deserialize span context through `TextMap` built-in format" in {
for {
_ <- Effect[F].pure(()).trace("A")
carrier0 <- Traced.extractContext[F].to[TextMapPropagation]
repr = carrier0.fold(Map.empty[String, String])(_.repr)
carrier1 = TextMapPropagation(repr)
_ <- Effect[F].pure(()).injectPropagated(carrier1)("B")
} yield {
finishedSpans() shouldBe Seq(
TestedSpan(traceId = 1, spanId = 2, parentId = 0, operationName = "A"),
TestedSpan(traceId = 1, spanId = 3, parentId = 2, operationName = "B")
)
}
}.toIO.unsafeRunSync()
"Serialize and deserialize span context through `Binary` built-in format" in {
cancel("MockTracer only supports `TextMap` format.")
}
}
|
fehu/opentracing-scala
|
scala/src/main/scala/com/github/fehu/opentracing/transformer/package.scala
|
package com.github.fehu.opentracing
import cats.effect.IO
package object transformer {
type TracedIO[A] = TracedT[IO, A]
}
|
fehu/opentracing-scala
|
build.sbt
|
<gh_stars>1-10
// scalac plugin has its own version
val scala212 = "2.12.14"
val scala213 = "2.13.6"
ThisBuild / crossScalaVersions := List(scala212, scala213)
ThisBuild / scalaVersion := scala213
ThisBuild / version := "0.6.0"
ThisBuild / organization := "com.github.fehu"
inThisBuild(Seq(
addCompilerPlugin(Dependencies.`kind-projector`),
addCompilerPlugin(Dependencies.`monadic-for`),
Compile / scalacOptions ++= Seq("-feature", "-deprecation", "-language:higherKinds"),
Test / parallelExecution := false
))
lazy val root = (project in file("."))
.settings(
name := "opentracing",
skip in publish := true
)
.aggregate(scala, akka, fs2)
lazy val scala = (project in file("scala"))
.settings(
name := "opentracing-scala",
libraryDependencies ++= Seq(
Dependencies.`opentracing-api`,
Dependencies.`cats-core`,
Dependencies.`cats-effect`
),
libraryDependencies ++= testDependencies,
Compile / scalacOptions ++= {
CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, 12)) => List("-Ypartial-unification")
case _ => Nil
}
}
)
lazy val akka = (project in file("akka"))
.settings(
name := "opentracing-akka",
libraryDependencies += Dependencies.`akka-actor`,
libraryDependencies ++= testDependencies
)
.dependsOn(scala % "compile->compile;test->test")
lazy val fs2 = (project in file("fs2"))
.settings(
name := "opentracing-fs2",
libraryDependencies += Dependencies.`fs2-core`
)
.dependsOn(scala)
lazy val testDependencies = Seq(
Dependencies.scalatest % Test,
Dependencies.`opentracing-mock` % Test
)
// Has its own configuration file (and own version)
lazy val compilerPlugin = project in file("compiler-plugin") settings (
crossScalaVersions := List(scala212, scala213)
)
addCommandAlias("fullDependencyUpdates", ";dependencyUpdates; reload plugins; dependencyUpdates; reload return")
|
fehu/opentracing-scala
|
scala/src/test/scala/com/github/fehu/opentracing/TraceSpec.scala
|
package com.github.fehu.opentracing
import scala.concurrent.duration._
import org.scalatest.freespec.AnyFreeSpec
import cats.effect.{ ContextShift, Effect, Timer }
import cats.effect.syntax.effect._
import cats.syntax.apply._
import cats.syntax.flatMap._
import cats.syntax.functor._
import org.scalatest.Ignore
import com.github.fehu.opentracing.syntax._
import com.github.fehu.opentracing.util.ErrorLogger
@Ignore
abstract class TraceSpec[F[_]: Traced] extends AnyFreeSpec with Spec {
implicit val effect: Effect[F]
implicit val cs: ContextShift[F]
implicit val timer: Timer[F]
implicit lazy val tracedRunParams: Traced.RunParams =
Traced.RunParams(mockTracer, Traced.Hooks(), Traced.ActiveSpan.empty, ErrorLogger.stdout)
"Trace nested defer / delay" in {
Effect[F].defer {
Effect[F].defer {
Effect[F].defer {
Effect[F].delay {
()
}.trace("last", "depth" -> 3)
}.trace("inner", "depth" -> 2)
}.trace("middle", "depth" -> 1)
}.trace("outer", "depth" -> 0)
.map { _ =>
finishedSpans() shouldBe Seq(
TestedSpan(traceId = 1, spanId = 5, parentId = 4, operationName = "last", tags = Map("depth" -> Int.box(3))),
TestedSpan(traceId = 1, spanId = 4, parentId = 3, operationName = "inner", tags = Map("depth" -> Int.box(2))),
TestedSpan(traceId = 1, spanId = 3, parentId = 2, operationName = "middle", tags = Map("depth" -> Int.box(1))),
TestedSpan(traceId = 1, spanId = 2, parentId = 0, operationName = "outer", tags = Map("depth" -> Int.box(0)))
)
}.toIO.unsafeRunSync()
}
"Trace nested map / flatMap" in {
for {
_ <- Effect[F].pure(()).trace("one")
_ <- Effect[F].pure(()).trace("two")
_ <- Effect[F].pure(()).trace("three")
_ <- Effect[F].pure(()).trace("four")
_ <- Effect[F].pure(()).trace("five")
} yield {
finishedSpans() shouldBe Seq(
TestedSpan(traceId = 6, spanId = 7, parentId = 0, operationName = "one"),
TestedSpan(traceId = 6, spanId = 8, parentId = 7, operationName = "two"),
TestedSpan(traceId = 6, spanId = 9, parentId = 8, operationName = "three"),
TestedSpan(traceId = 6, spanId = 10, parentId = 9, operationName = "four"),
TestedSpan(traceId = 6, spanId = 11, parentId = 10, operationName = "five")
)
}
}.toIO.unsafeRunSync()
"Trace when context is shifted and timer is used" in {
val f1 = Timer[F].sleep(500.millis) *> Effect[F].delay(()).trace("f1")
val f2 = (Timer[F].sleep(500.millis) *> Effect[F].delay(())).trace("f2")
val f = (f1 *> ContextShift[F].shift *> f2).trace("f")
f.map { _ =>
finishedSpans() shouldBe Seq(
TestedSpan(traceId = 12, spanId = 14, parentId = 13, operationName = "f1"),
TestedSpan(traceId = 12, spanId = 15, parentId = 14, operationName = "f2"),
TestedSpan(traceId = 12, spanId = 13, parentId = 0, operationName = "f")
)
}.toIO.unsafeRunSync()
}
}
|
fehu/opentracing-scala
|
compiler-plugin/build.sbt
|
name := "opentracing-jaeger-scalac-implicits"
version := "0.1.3"
libraryDependencies ++= Seq(
Dependencies.`scala-compiler`.value,
Dependencies.`jaeger-client`
// [Assembly] slf4j implementation is not included despite the warnings that appear on plugin usage
)
// Assembly - dependencies aren't provided by sbt for compiler plugins
enablePlugins(AssemblyPlugin)
assemblyOption in assembly := (assemblyOption in assembly).value.copy(includeScala = false)
packageBin in Compile := (assembly in Compile).value
|
fehu/opentracing-scala
|
scala/src/main/scala/com/github/fehu/opentracing/internal/syntax/LowPrioritySyntax.scala
|
<gh_stars>1-10
package com.github.fehu.opentracing.internal.syntax
import io.opentracing.SpanContext
import io.opentracing.propagation.Format
import com.github.fehu.opentracing.{ Traced, Traced2 }
protected[opentracing] trait LowPrioritySyntax {
final implicit class Traced2WrapOps[G[_], A](ga: G[A]) {
def trace[F[_[*], *]](operation: String, tags: Traced.Tag*)(implicit traced: Traced2[F, G]): F[G, A] =
traced(operation, tags: _*)(traced.lift(ga))
def inject[F[_[*], *]](context: SpanContext)(operation: String, tags: Traced.Tag*)(implicit traced: Traced2[F, G]): F[G, A] =
traced.injectContext(context)(operation, tags: _*)(traced.lift(ga))
def inject[F[_[*], *]](context: Option[SpanContext])(operation: String, tags: Traced.Tag*)(implicit traced: Traced2[F, G]): F[G, A] =
context.map(inject(_)(operation, tags: _*)).getOrElse(traced.lift(ga))
def injectFrom[F[_[*], *], C](format: Format[C])(carrier: C)(operation: String, tags: Traced.Tag*)(implicit traced: Traced2[F, G]): F[G, A] =
traced.injectContextFrom(format)(carrier)(operation, tags: _*)(traced.lift(ga))
def injectFrom[F[_[*], *], C](carrier: Option[C], format: Format[C])(operation: String, tags: Traced.Tag*)(implicit traced: Traced2[F, G]): F[G, A] =
carrier.map(injectFrom(format)(_)(operation, tags: _*)).getOrElse(traced.lift(ga))
}
}
|
fehu/opentracing-scala
|
scala/src/main/scala/com/github/fehu/opentracing/internal/Stubs.scala
|
package com.github.fehu.opentracing.internal
import cats.effect.Resource
import cats.{Applicative, Defer, ~>}
import io.opentracing.SpanContext
import io.opentracing.propagation.Format
import com.github.fehu.opentracing.Traced
protected[opentracing] class TracedStub[F[_]](implicit A: Applicative[F], D: Defer[F]) extends TracedInterfaceStub[F] with Traced[F] {
def pure[A](a: A): F[A] = A.pure(a)
def defer[A](fa: => F[A]): F[A] = D.defer(fa)
def currentSpan: Traced.SpanInterface[F] = new SpanInterfaceStub(A.unit, λ[cats.Id ~> F](A.pure(_)))
def forceCurrentSpan(active: Traced.ActiveSpan): F[Traced.SpanInterface[F]] = A.pure(currentSpan)
def recoverCurrentSpan(active: Traced.ActiveSpan): F[Traced.SpanInterface[F]] = A.pure(currentSpan)
def injectContext(context: SpanContext): Traced.Interface[F] = new TracedInterfaceStub[F]
def injectContextFrom[C](format: Format[C])(carrier: C): Traced.Interface[F] = new TracedInterfaceStub[F]
def extractContext[C0 <: C, C](carrier: C0, format: Format[C]): F[Option[C0]] = A.pure(None)
}
protected[opentracing] class TracedInterfaceStub[F[_]](implicit A: Applicative[F]) extends Traced.Interface[F] {
def apply[A](op: String, tags: Traced.Tag*)(fa: F[A]): F[A] = fa
def spanResource(op: String, tags: Traced.Tag*): Resource[F, Traced.ActiveSpan] = Resource.pure(Traced.ActiveSpan.empty)
def withParent(span: Traced.ActiveSpan): Traced.Interface[F] = this
def withParent(span: SpanContext): Traced.Interface[F] = this
def withoutParent: Traced.Interface[F] = this
}
protected[opentracing] class SpanInterfaceStub[F[_]](unit: F[Unit], pure: cats.Id ~> F) extends Traced.SpanInterface[F] {
def context: F[Option[SpanContext]] = pure(None)
def setOperation(op: String): F[Unit] = unit
def setTag(tag: Traced.Tag): F[Unit] = unit
def setTags(tags: Traced.Tag*): F[Unit] = unit
def log(fields: (String, Any)*): F[Unit] = unit
def log(event: String): F[Unit] = unit
def setBaggageItem(key: String, value: String): F[Unit] = unit
def getBaggageItem(key: String): F[Option[String]] = pure(None)
def noop: F[Unit] = unit
def mapK[G[_]](f: F ~> G): Traced.SpanInterface[G] = new SpanInterfaceStub[G](f(unit), f compose pure)
}
|
fehu/opentracing-scala
|
scala/src/main/scala/com/github/fehu/opentracing/syntax/package.scala
|
package com.github.fehu.opentracing
import scala.language.existentials
import cats.{ Applicative, Defer, FlatMap, Functor, Monad, ~> }
import cats.effect.{ Resource, Sync }
import cats.syntax.flatMap._
import cats.syntax.functor._
import io.opentracing.{ SpanContext, Tracer }
import io.opentracing.propagation.Format
import com.github.fehu.opentracing.internal.syntax.LowPrioritySyntax
import com.github.fehu.opentracing.propagation.{ Propagation, PropagationCompanion }
import com.github.fehu.opentracing.util.ErrorLogger
package object syntax extends LowPrioritySyntax {
final implicit class TracedOps[F[_], A](fa: F[A])(implicit traced: Traced[F]) {
def trace(operation: String, tags: Traced.Tag*): F[A] = traced(operation, tags: _*)(fa)
def inject(context: SpanContext)(operation: String, tags: Traced.Tag*): F[A] =
traced.injectContext(context)(operation, tags: _*)(fa)
def inject(context: Option[SpanContext])(operation: String, tags: Traced.Tag*): F[A] =
context.map(inject(_)(operation, tags: _*)).getOrElse(fa)
def injectFrom[C](format: Format[C])(carrier: C)(operation: String, tags: Traced.Tag*): F[A] =
traced.injectContextFrom(format)(carrier)(operation, tags: _*)(fa)
def injectFromOpt[C](format: Format[C])(carrier: Option[C])(operation: String, tags: Traced.Tag*): F[A] =
carrier.map(injectFrom(format)(_)(operation, tags: _*)).getOrElse(fa)
def injectPropagated[C <: Propagation](carrier: C)(operation: String, tags: Traced.Tag*): F[A] =
traced.injectContextFrom(carrier.format)(carrier.underlying)(operation, tags: _*)(fa)
def injectPropagatedOpt[C <: Propagation](carrier: Option[C])(operation: String, tags: Traced.Tag*): F[A] =
carrier.map(c => injectPropagated(c)(operation, tags: _*)).getOrElse(fa)
}
sealed trait TracedFunctions {
def currentSpan[F[_]](implicit traced: Traced[F]): Traced.SpanInterface[F] = traced.currentSpan
def extractContext[F[_]]: TracedFunctions.Extract[F] = TracedFunctions.extractInstance.asInstanceOf[TracedFunctions.Extract[F]]
def mapK[T[_[*], *], F[_], G[_]: Functor](f: F ~> G)(implicit traced: Traced2[T, F]): T[F, *] ~> T[G, *] = traced.mapK(f)
def trace[F[_]](operation: String, tags: Traced.Tag*): TracedFunctions.Trace[F] = new TracedFunctions.Trace(operation, tags)
def traceK[F[_]](operation: String, tags: Traced.Tag*)(implicit traced: Traced[F]): F ~> F =
λ[F ~> F](f => traced(operation, tags: _*)(f))
def pure[T[_[*], *], F[_]]: TracedFunctions.Pure[F] = TracedFunctions.pureInstance.asInstanceOf[TracedFunctions.Pure[F]]
def defer[F[_]]: TracedFunctions.Defer[F] = TracedFunctions.deferInstance.asInstanceOf[TracedFunctions.Defer[F]]
def delay[F[_]]: TracedFunctions.Delay[F] = TracedFunctions.delayInstance.asInstanceOf[TracedFunctions.Delay[F]]
def liftK[T[_[*], *], F[_]: Applicative](implicit traced: Traced2[T, F]): F ~> T[F, *] =
λ[F ~> T[F, *]](f => traced.lift(f))
def runK[T[_[*], *], F[_]: FlatMap](params: Traced.RunParams)(implicit traced: Traced2[T, F]): T[F, *] ~> F =
λ[T[F, *] ~> F](t => traced.run(t, params))
}
object TracedFunctions extends TracedFunctions {
final class Extract[F[_]] private[syntax] () {
def apply[C0 <: C, C](carrier: C0, format: Format[C])(implicit traced: Traced[F]): F[Option[C0]] =
traced.extractContext(carrier, format)
def to[C <: Propagation](implicit companion: PropagationCompanion[C], traced: Traced[F], sync: Sync[F]): F[Option[C]] =
for {
carrier <- sync.delay { companion() }
uOpt <- apply(carrier.underlying, companion.format)
} yield uOpt.as(carrier)
}
final class Trace[F[_]] private[syntax] (operation: String, tags: Seq[Traced.Tag]) {
def apply[A](a: => A)(implicit traced: Traced[F]): F[A] = traced(operation, tags: _*)(traced.defer(traced.pure(a)))
}
final class Pure[F[_]] private[syntax] () {
def apply[A](a: A)(implicit traced: Traced[F]): F[A] = traced.pure(a)
}
final class Defer[F[_]] private[syntax] () {
def apply[A](fa: => F[A])(implicit traced: Traced[F]): F[A] = traced.defer(fa)
}
final class Delay[F[_]] private[syntax] () {
def apply[A](a: => A)(implicit traced: Traced[F]): F[A] = traced.defer(traced.pure(a))
}
protected lazy val extractInstance = new TracedFunctions.Extract[cats.Id]
protected lazy val pureInstance = new TracedFunctions.Pure[cats.Id]
protected lazy val deferInstance = new TracedFunctions.Defer[cats.Id]
protected lazy val delayInstance = new TracedFunctions.Delay[cats.Id]
}
final implicit class TracedObjOps(obj: Traced.type) extends TracedFunctions
final implicit class Traced2Ops[F[_[*], *], G[_], A](fa: F[G, A])(implicit traced: Traced2[F, G]) {
def runTracedP(params: Traced.RunParams): G[A] = traced.run(fa, params)
def runTraced(
tracer: Tracer,
hooks: Traced.Hooks = Traced.Hooks(),
parent: Traced.ActiveSpan = Traced.ActiveSpan.empty,
logError: ErrorLogger = ErrorLogger.stdout
): G[A] =
runTracedP(Traced.RunParams(tracer, hooks, parent, logError))
}
final implicit class TracedResourceOps[F[_]: Monad: Defer, A](resource: Resource[F, A])
(implicit t: Traced[F]) {
def traceLifetime(operation: String, tags: Traced.Tag*): Resource[F, A] =
t.spanResource(operation, tags: _*).flatMap(_ => resource)
def traceUsage(operation: String, tags: Traced.Tag*): Resource[F, A] =
resource.flatTap(_ => t.spanResource(operation, tags: _*))
def traceUsage(trace: A => Traced.Operation.Builder): Resource[F, A] =
resource.flatTap { a => t.spanResource(trace(a))}
}
}
|
fehu/opentracing-scala
|
scala/src/main/scala/com/github/fehu/opentracing/internal/TracedTInstances.scala
|
<reponame>fehu/opentracing-scala
package com.github.fehu.opentracing.internal
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.{ FiniteDuration, TimeUnit }
import cats.{ Applicative, CommutativeApplicative, Monad, MonadError, Parallel, ~> }
import cats.data.{ IndexedStateT, StateT }
import cats.effect._
import cats.instances.list._
import cats.instances.option._
import cats.syntax.apply._
import cats.syntax.applicative._
import cats.syntax.applicativeError._
import cats.syntax.either._
import cats.syntax.flatMap._
import cats.syntax.foldable._
import cats.syntax.functor._
import cats.syntax.monadError._
import cats.syntax.traverse._
import io.opentracing.propagation.Format
import io.opentracing.{ Span, SpanContext }
import com.github.fehu.opentracing.{ Traced, Traced2 }
import com.github.fehu.opentracing.Traced.ActiveSpan
import com.github.fehu.opentracing.transformer.TracedT
import com.github.fehu.opentracing.transformer.TracedT.AutoConvert._
/**
* {{{
* ====================================================
* = Instances =
* =============
* +---------+ +----------+ +--------------+ +-------+
* | Traced2 | | Parallel | | ContextShift | | Timer |
* +---------+ +----------+ +--------------+ +-------+
*
* +------------------+
* | ConcurrentEffect |
* +------------------+
* ▲
* ============ | ================================
* +-------+-------+ = Lower 1 =
* | | ===========
* +--------+ +------------+
* | Effect | | Concurrent |
* +--------+ +------------+
* ▲ ▲
* | |
* +-------+-------+
* ============ | ================================
* | = Lower 2 =
* +-------+ ===========
* | Async |
* +-------+
* ▲
* ============ | ================================
* +-------+-------+ = Lower 3 =
* | | ===========
* +--------+ +--------+
* | Sync | | LiftIO |
* +--------+ +--------+
* ▲
* === | =========================================
* | = Lower 4 =
* +--------------+ ===========
* | MonadError |
* +--------------+
* ▲
* ====== | ======================================
* | = Lower 5 =
* +---------+ ===========
* | Monad |
* +---------+
* }}}
*/
private[opentracing] trait TracedTInstances extends TracedTLowPriorityInstances1 {
implicit def tracedTTracedInstance[F[_]: Sync]: Traced2[TracedT, F] =
new TracedTTracedInstance
implicit def tracedTParallelInstance[F[_]](implicit par: Parallel[F]): Parallel.Aux[TracedT[F, *], TracedTParallelInstance.Par[par.F, *]] =
new TracedTParallelInstance[F, par.F]()(par)
/** Alias for [[ContextShift.deriveStateT]] */
implicit def tracedTContextShiftInstance[F[_]: ContextShift: Monad]: ContextShift[TracedT[F, *]] =
new ContextShift[TracedT[F, *]] {
private[this] val underlying = ContextShift.deriveStateT[F, State]
def shift: TracedT[F, Unit] = underlying.shift
def evalOn[A](ec: ExecutionContext)(fa: TracedT[F, A]): TracedT[F, A] = underlying.evalOn(ec)(fa.stateT)
}
/** Alias for [[Timer.deriveStateT]] */
implicit def tracedTTimerInstance[F[_]: Applicative: Timer]: Timer[TracedT[F, *]] =
new Timer[TracedT[F, *]] {
private[this] val underlying = Timer.deriveStateT[F, State]
def clock: Clock[TracedT[F, *]] = new Clock[TracedT[F, *]] {
private[this] val underlying = Clock.deriveStateT[F, State]
def realTime(unit: TimeUnit): TracedT[F, Long] = underlying.realTime(unit)
def monotonic(unit: TimeUnit): TracedT[F, Long] = underlying.monotonic(unit)
}
def sleep(duration: FiniteDuration): TracedT[F, Unit] = underlying.sleep(duration)
}
/** Requires implicit [[Traced.RunParams]] in scope. */
implicit def tracedTConcurrentEffectInstance[F[_]: ConcurrentEffect](implicit params: Traced.RunParams): ConcurrentEffect[TracedT[F, *]] =
new TracedTConcurrentEffectInstance
/** Requires implicit [[Traced.RunParams.Partial]] and [[ActiveSpan]] in scope. */
implicit def tracedTConcurrentEffectInstance2[F[_]: ConcurrentEffect]
(implicit partial: Traced.RunParams.Partial,
active: ActiveSpan
): ConcurrentEffect[TracedT[F, *]] =
tracedTConcurrentEffectInstance(ConcurrentEffect[F], partial(active))
}
private[opentracing] trait TracedTLowPriorityInstances1 extends TracedTLowPriorityInstances2 {
implicit def tracedTConcurrentInstance[F[_]: Concurrent]: Concurrent[TracedT[F, *]] = new TracedTConcurrentInstance
/** Requires implicit [[Traced.RunParams]] in scope. */
implicit def tracedTEffectInstance[F[_]: Effect](implicit params: Traced.RunParams): Effect[TracedT[F, *]] = new TracedTEffectInstance
/** Requires implicit [[Traced.RunParams.Partial]] and [[ActiveSpan]] in scope. */
implicit def tracedTEffectInstance2[F[_]: Effect](implicit partial: Traced.RunParams.Partial,
active: ActiveSpan
): Effect[TracedT[F, *]] =
tracedTEffectInstance(Effect[F], partial(active))
}
private[opentracing] trait TracedTLowPriorityInstances2 extends TracedTLowPriorityInstances3 {
implicit def tracedTAsyncInstance[F[_]: Async]: Async[TracedT[F, *]] = new TracedTAsyncInstance
}
private[opentracing] trait TracedTLowPriorityInstances3 extends TracedTLowPriorityInstances4 {
implicit def tracedTSyncInstance[F[_]: Sync]: Sync[TracedT[F, *]] = new TracedTSyncInstance
implicit def tracedTLiftIoInstance[F[_]: Applicative: LiftIO]: LiftIO[TracedT[F, *]] = new TracedTLiftIoInstance
}
private[opentracing] trait TracedTLowPriorityInstances4 extends TracedTLowPriorityInstances5 {
implicit def tracedTMonadErrorInstance[F[_], E](implicit M: MonadError[F, E]): MonadError[TracedT[F, *], E] =
new TracedTMonadErrorProxy[F, E] { protected val MF: MonadError[F, E] = M }
}
private[opentracing] trait TracedTLowPriorityInstances5 {
implicit def tracedTMonadInstance[F[_]](implicit M: Monad[F]): Monad[TracedT[F, *]] =
new TracedTMonadProxy[F] { protected val MF: Monad[F] = M }
}
private[opentracing] class TracedTTracedInstance[F[_]](implicit sync: Sync[F])
extends TracedTTracedInstance.TracedInterface[F] with Traced2[TracedT, F] { self =>
import sync.delay
import TracedTTracedInstance._
private def state = StateT.get[F, State]
def pure[A](a: A): TracedT[F, A] = TracedT(StateT.pure(a))
def defer[A](fa: => TracedT[F, A]): TracedT[F, A] = StateT.liftF(delay(fa.stateT)).flatMap(locally)
def lift[A](fa: F[A]): TracedT[F, A] = TracedT(StateT.liftF(fa))
def currentSpan: Traced.SpanInterface[TracedT[F, *]] = new CurrentSpan[TracedT[F, *]](state.map(_.currentSpan))
def forceCurrentSpan(active: ActiveSpan): TracedT[F, Traced.SpanInterface[TracedT[F, *]]] =
StateT.modify[F, State](_.copy(currentSpan = active.maybe))
.as(currentSpan)
def recoverCurrentSpan(active: ActiveSpan): TracedT[F, Traced.SpanInterface[TracedT[F, *]]] =
StateT.get[F, State].flatMap(
_.currentSpan
.map(_ => pure(currentSpan))
.getOrElse(forceCurrentSpan(active))
)
protected def spanParent: TracedT[F, Option[Either[Span, SpanContext]]] = TracedT(state.map(_.currentSpan.map(Left(_))))
def injectContext(context: SpanContext): Traced.Interface[TracedT[F, *]] = InterfaceProxy.pure(Some(Right(context)))
def injectContextFrom[C](format: Format[C])(carrier: C): Traced.Interface[TracedT[F, *]] =
new InterfaceProxy(
TracedT(
for {
s <- state
ce <- StateT liftF delay{ s.tracer.extract(format, carrier) }.attempt
_ <- StateT.liftF(ce.swap.traverse_(s.logError[F]("Failed to extract span context from carrier", _)))
} yield ce.toOption.map(_.asRight)
)
)
private class InterfaceProxy(parent: TracedT[F, Option[Either[Span, SpanContext]]]) extends TracedInterface[F] {
protected def spanParent: TracedT[F, Option[Either[Span, SpanContext]]] = parent
def withParent(span: ActiveSpan): Traced.Interface[TracedT[F, *]] =
InterfaceProxy.pure(Option(span).flatMap(_.maybe).map(_.asLeft))
def withParent(span: SpanContext): Traced.Interface[TracedT[F, *]] =
InterfaceProxy.pure(Option(span).map(_.asRight))
def withoutParent: Traced.Interface[TracedT[F, *]] =
InterfaceProxy.pure(None)
}
private object InterfaceProxy {
def pure(opt: Option[Either[Span, SpanContext]]): InterfaceProxy = new InterfaceProxy(self.pure(opt))
}
def extractContext[C0 <: C, C](carrier: C0, format: Format[C]): TracedT[F, Option[C0]] =
for {
s <- state
o <- StateT liftF s.currentSpan.traverse(span => delay(s.tracer.inject(span.context(), format, carrier)))
} yield o.map(_ => carrier)
def currentRunParams: TracedT[F, Traced.RunParams] =
state.map(s => Traced.RunParams(s.tracer, s.hooks, ActiveSpan(s.currentSpan), s.logError))
def run[A](traced: TracedT[F, A], params: Traced.RunParams): F[A] =
traced.run(State(params.tracer, params.hooks, params.activeSpan.maybe, params.logError)).map(_._2)
def mapK[G[_]](f: F ~> G): TracedT[F, *] ~> TracedT[G, *] = λ[TracedT[F, *] ~> TracedT[G, *]](_.mapK(f))
def withParent(span: ActiveSpan): Traced.Interface[TracedT[F, *]] = withParent0(Option(span).flatMap(_.maybe).map(_.asLeft))
def withParent(span: SpanContext): Traced.Interface[TracedT[F, *]] = withParent0(Option(span).map(_.asRight))
def withoutParent: Traced.Interface[TracedT[F, *]] = withParent0(None)
private def withParent0(span: Option[Either[Span, SpanContext]]): Traced.Interface[TracedT[F, *]] =
new InterfaceProxy(pure(span))
}
object TracedTTracedInstance {
abstract class TracedInterface[F[_]](implicit sync: Sync[F]) extends Traced.Interface[TracedT[F, *]] {
protected def spanParent: TracedT[F, Option[Either[Span, SpanContext]]]
import sync.delay
private def state = StateT.get[F, State]
private def setState = StateT.set[F, State] _
def apply[A](op: String, tags: Traced.Tag*)(fa: TracedT[F, A]): TracedT[F, A] =
spanResource(op, tags: _*).use { activeSpan =>
for {
s <- state
_ <- setState(s.copy(currentSpan = activeSpan.maybe))
a <- fa
} yield a
}
def spanResource(op: String, tags: Traced.Tag*): Resource[TracedT[F, *], ActiveSpan] =
Resource.makeCase[TracedT[F, *], ActiveSpan](
for {
s <- state
p <- spanParent
span <- StateT liftF Tools.newSpan(s.tracer, p, s.hooks.beforeStart, op, tags)
span1 = CurrentSpan(span)
_ <- StateT liftF s.hooks.justAfterStart(CurrentSpan(span)).traverse_(_(span1))
_ <- setState(s.copy(currentSpan = Some(span)))
} yield ActiveSpan(span)
) {
case (span, ExitCase.Completed) => finSpan(span, None)
case (span, ExitCase.Canceled) => finSpan(span, Some(new Exception("Canceled")))
case (span, ExitCase.Error(e)) => finSpan(span, Some(e))
}
private def finSpan(span: ActiveSpan, e: Option[Throwable]): TracedT[F, Unit] =
for {
s <- state
span1 = CurrentSpan(span.maybe)
_ <- StateT liftF s.hooks.beforeStop(CurrentSpan(span.maybe))(e).traverse_(_(span1))
.guarantee0(_ => delay{ span.maybe.foreach(_.finish()) })
} yield ()
private implicit class GuaranteeOps[A](fa: F[A]) {
def guarantee0(f: Either[Throwable, A] => F[Unit]): F[A] =
for {
ea <- fa.attempt
_ <- f(ea)
a <- ea.pure[F].rethrow
} yield a
}
}
}
private[opentracing] trait TracedTMonadProxy[F[_]] extends Monad[TracedT[F, *]] {
protected val MF: Monad[F]
private lazy val M0 = IndexedStateT.catsDataMonadForIndexedStateT[F, State](MF)
override def map[A, B](fa: TracedT[F, A])(f: A => B): TracedT[F, B] = M0.map(fa)(f)
def pure[A](x: A): TracedT[F, A] = M0.pure(x)
def flatMap[A, B](fa: TracedT[F, A])(f: A => TracedT[F, B]): TracedT[F, B] = M0.flatMap(fa)(f.andThen(_.stateT))
def tailRecM[A, B](a: A)(f: A => TracedT[F, Either[A, B]]): TracedT[F, B] = M0.tailRecM(a)(f.andThen(_.stateT))
}
private[opentracing] trait TracedTMonadErrorProxy[F[_], E] extends MonadError[TracedT[F, *], E] with TracedTMonadProxy[F] {
protected val MF: MonadError[F, E]
private lazy val M0 = IndexedStateT.catsDataMonadErrorForIndexedStateT[F, State, E](MF)
def raiseError[A](e: E): TracedT[F, A] = TracedT(M0.raiseError(e))
def handleErrorWith[A](fa: TracedT[F, A])(f: E => TracedT[F, A]): TracedT[F, A] = M0.handleErrorWith(fa)(f.andThen(_.stateT))
}
private[opentracing] class TracedTSyncInstance[F[_]](implicit sync: Sync[F])
extends Sync[TracedT[F, *]] with TracedTMonadErrorProxy[F, Throwable]
{
protected val MF: MonadError[F, Throwable] = sync
def suspend[A](thunk: => TracedT[F, A]): TracedT[F, A] = StateT.liftF(sync.delay(thunk.stateT)).flatMap(locally)
def bracketCase[A, B](acquire: TracedT[F, A])
(use: A => TracedT[F, B])
(release: (A, ExitCase[Throwable]) => TracedT[F, Unit]): TracedT[F, B] =
for {
s0 <- StateT.get[F, State]
(s1, b) <- StateT liftF sync.bracketCase(
acquire.run(s0)
){
case (s, a) => use(a).run(s)
} {
case ((s, a), e) => release(a, e).run(s).void
}
_ <- StateT.set[F, State](s1)
} yield b
}
private[opentracing] trait TracedTLiftIO[F[_]] extends LiftIO[TracedT[F, *]] {
protected val AF: Applicative[F]
protected val LIOF: LiftIO[F]
def liftIO[A](ioa: IO[A]): TracedT[F, A] = StateT.liftF[F, State, A](LIOF.liftIO(ioa))(AF)
}
private[opentracing] class TracedTLiftIoInstance[F[_]](implicit protected val AF: Applicative[F],
protected val LIOF: LiftIO[F])
extends TracedTLiftIO[F]
private[opentracing] class TracedTAsyncInstance[F[_]](implicit A: Async[F])
extends TracedTSyncInstance[F]
with Async[TracedT[F, *]]
with TracedTLiftIO[F] {
protected val AF: Applicative[F] = A
protected val LIOF: LiftIO[F] = A
protected[this] def state = StateT.get[F, State]
protected[this] def setState = StateT.set[F, State] _
override def liftIO[A](ioa: IO[A]): TracedT[F, A] = super[TracedTLiftIO].liftIO(ioa)
def async[A](k: (Either[Throwable, A] => Unit) => Unit): TracedT[F, A] = StateT.liftF[F, State, A](A.async(k))
def asyncF[A](k: (Either[Throwable, A] => Unit) => TracedT[F, Unit]): TracedT[F, A] =
for {
s0 <- state
a <- StateT liftF A.asyncF[A](k andThen runVoid(s0))
} yield a
private def runVoid[A](s: State)(traced: TracedT[F, A]) = traced.run(s).void
}
private[opentracing] trait TracedTEffect[F[_]] extends Effect[TracedT[F, *]] {
protected val EF: Effect[F]
implicit private def ef0: Effect[F] = EF
protected val params: Traced.RunParams
protected[this] def runP[A](traced: TracedT[F, A]) =
traced.run(State(params.tracer, params.hooks, params.activeSpan.maybe, params.logError)).map(_._2)
def runAsync[A](fa: TracedT[F, A])(cb: Either[Throwable, A] => IO[Unit]): SyncIO[Unit] =
EF.runAsync[A](runP(fa))(cb)
}
private[opentracing] class TracedTEffectInstance[F[_]](implicit protected val EF: Effect[F],
protected val params: Traced.RunParams)
extends TracedTAsyncInstance[F] with TracedTEffect[F]
private[opentracing] class TracedTConcurrentInstance[F[_]](implicit C: Concurrent[F])
extends TracedTAsyncInstance[F]
with Concurrent[TracedT[F, *]] {
protected[this] def run[A](s: State)(traced: TracedT[F, A]) = traced.run(s).map(_._2)
def start[A](fa: TracedT[F, A]): TracedT[F, Fiber[TracedT[F, *], A]] =
for {
s0 <- state
f <- StateT liftF C.start(run(s0)(fa))
} yield f.mapK(TracedT.liftK[F] andThen λ[TracedT[F, *] ~> TracedT[F, *]](_.stateT <* setState(s0)))
// TODO
def racePair[A, B](fa: TracedT[F, A], fb: TracedT[F, B]): TracedT[F, Either[(A, Fiber[TracedT[F, *], B]), (Fiber[TracedT[F, *], A], B)]] =
for {
s0 <- state
ef <- StateT liftF C.racePair(run(s0)(fa), run(s0)(fb))
} yield ef.leftMap{ case (a, f) => a -> f.mapK(TracedT.liftK[F]) }
.map { case (f, b) => f.mapK(TracedT.liftK[F]) -> b }
}
class TracedTConcurrentEffectInstance[F[_]](
implicit
ce: ConcurrentEffect[F],
protected val params: Traced.RunParams
) extends TracedTConcurrentInstance[F]
with ConcurrentEffect[TracedT[F, *]]
with TracedTEffect[F] {
protected val EF: Effect[F] = ce
def runCancelable[A](fa: TracedT[F, A])(cb: Either[Throwable, A] => IO[Unit]): SyncIO[CancelToken[TracedT[F, *]]] =
ce.runCancelable(runP(fa))(cb).map(StateT.liftF[F, State, Unit])
}
class TracedTParallelInstance[G[_], ParF[_]](implicit val par0: Parallel.Aux[G, ParF]) extends Parallel[TracedT[G, *]] {
import TracedTParallelInstance.Par
type F[A] = Par[ParF, A]
def applicative: Applicative[Par[ParF, *]] = Par.parCommutativeApplicative0(par0.applicative)
def monad: Monad[TracedT[G, *]] = new TracedTMonadProxy[G] { protected val MF: Monad[G] = par0.monad }
def sequential: Par[ParF, *] ~> TracedT[G, *] =
λ[Par[ParF, *] ~> TracedT[G, *]](_.traced.mapK(par0.sequential)(par0.applicative))
def parallel: TracedT[G, *] ~> Par[ParF, *] =
λ[TracedT[G, *] ~> Par[ParF, *]](t => new Par(t.mapK(par0.parallel)(par0.monad)))
}
object TracedTParallelInstance {
class Par[F[_], A](val traced: TracedT.Underlying[F, A]) extends AnyVal
object Par {
implicit def parCommutativeApplicative[F[_]: CommutativeApplicative]: CommutativeApplicative[Par[F, *]] =
parCommutativeApplicative0[F]
protected[TracedTParallelInstance] def parCommutativeApplicative0[F[_]](implicit A: Applicative[F]): CommutativeApplicative[Par[F, *]] =
new CommutativeApplicative[Par[F, *]] {
def pure[A](x: A): Par[F, A] = new Par(StateT.pure(x))
def ap[A, B](ff: Par[F, A => B])(fa: Par[F, A]): Par[F, B] =
new Par(StateT.applyF {
A.product(ff.traced.runF, fa.traced.runF)
.map { case (rff, rfa) =>
(s: State) =>
A.ap(rff(s).map(_._2))(rfa(s).map(_._2))
.map((s, _))
}
})
}
}
}
|
fehu/opentracing-scala
|
scala/src/main/scala/com/github/fehu/opentracing/internal/Tools.scala
|
package com.github.fehu.opentracing.internal
import cats.effect.Sync
import cats.Endo
import io.opentracing.{ Span, SpanContext, Tracer }
import com.github.fehu.opentracing.Traced
private[opentracing] object Tools {
def newSpan[F[_]: Sync](tracer: Tracer,
parent: Option[Either[Span, SpanContext]],
buildHook: Endo[Tracer.SpanBuilder],
op: String,
tags: Seq[Traced.Tag]): F[Span] = {
val b0 = tracer.buildSpan(op).ignoreActiveSpan
val b1 = parent.map(_.fold(b0.asChildOf, b0.asChildOf)).getOrElse(b0)
val b2 = tags.foldLeft(b1){ case (b, t) => t.apply(b) }
Sync[F].delay(buildHook(b2).start())
}
}
|
fehu/opentracing-scala
|
scala/src/main/scala/com/github/fehu/opentracing/internal/State.scala
|
<reponame>fehu/opentracing-scala
package com.github.fehu.opentracing.internal
import scala.collection.JavaConverters._
import cats.effect.Sync
import cats.~>
import cats.instances.option._
import cats.syntax.applicative._
import cats.syntax.flatMap._
import cats.syntax.functor._
import cats.syntax.traverse._
import io.opentracing.{ Span, SpanContext, Tracer }
import com.github.fehu.opentracing.Traced
import com.github.fehu.opentracing.util.ErrorLogger
final case class State(
private[opentracing] val tracer: Tracer,
private[opentracing] val hooks: Traced.Hooks,
private[opentracing] val currentSpan: Option[Span],
private[opentracing] val logError: ErrorLogger
)
private[opentracing] class CurrentSpan[F[_]](private[opentracing] val fOpt: F[Option[Span]])(implicit sync: Sync[F])
extends Traced.SpanInterface[F]
{ self =>
private def delay[R](f: Span => R): F[Option[R]] = fOpt.flatMap(_.traverse{ span => sync.delay(f(span)) })
def context: F[Option[SpanContext]] = delay(_.context())
def setOperation(op: String): F[Unit] = delay(_.setOperationName(op)).void
def setTag(tag: Traced.Tag): F[Unit] = delay(tag.apply(_)).void
def setTags(tags: Traced.Tag*): F[Unit] =
if (tags.nonEmpty) delay(tags.foldLeft(_)((s, t) => t.apply(s))).void
else sync.unit
def log(fields: (String, Any)*): F[Unit] = if (fields.nonEmpty) delay(_.log(fields.toMap.asJava)).void else sync.unit
def log(event: String): F[Unit] = delay(_.log(event)).void
def setBaggageItem(key: String, value: String): F[Unit] = delay(_.setBaggageItem(key, value)).void
def getBaggageItem(key: String): F[Option[String]] = delay(_.getBaggageItem(key))
def mapK[G[_]](f: F ~> G): Traced.SpanInterface[G] = new Traced.SpanInterface[G] {
def context: G[Option[SpanContext]] = f(self.context)
def setOperation(op: String): G[Unit] = f(self.setOperation(op))
def setTag(tag: Traced.Tag): G[Unit] = f(self.setTag(tag))
def setTags(tags: Traced.Tag*): G[Unit] = f(self.setTags(tags: _*))
def log(fields: (String, Any)*): G[Unit] = f(self.log(fields: _*))
def log(event: String): G[Unit] = f(self.log(event))
def setBaggageItem(key: String, value: String): G[Unit] = f(self.setBaggageItem(key, value))
def getBaggageItem(key: String): G[Option[String]] = f(self.getBaggageItem(key))
def mapK[H[_]](g: G ~> H): Traced.SpanInterface[H] = self.mapK(g compose f)
def noop: G[Unit] = f(sync.unit)
}
def noop: F[Unit] = sync.unit
}
private[opentracing] object CurrentSpan {
def apply[F[_]: Sync](span: F[Span]): CurrentSpan[F] = new CurrentSpan(span.map(Option(_)))
def apply[F[_]: Sync](span: Span): CurrentSpan[F] = new CurrentSpan(Option(span).pure[F])
def apply[F[_]: Sync](span: Option[Span]): CurrentSpan[F] = new CurrentSpan(span.pure[F])
}
|
fehu/opentracing-scala
|
akka/src/main/scala/com/github/fehu/opentracing/akka/AskTracing.scala
|
<reponame>fehu/opentracing-scala
package com.github.fehu.opentracing.akka
import akka.actor.ActorRef
import akka.pattern
import akka.util.Timeout
import cats.effect.{ Async, ContextShift }
import cats.syntax.flatMap._
import cats.syntax.functor._
import com.github.fehu.opentracing.Traced
import com.github.fehu.opentracing.syntax._
object AskTracing {
class Ops[F[_]: Async: ContextShift: Traced](ref: ActorRef, message: Any, sender: ActorRef)
(implicit timeout: Timeout) {
def traced: F[Any] = trace0
def trace(op: String, tags: Traced.Tag*): F[Any] = trace0.trace(op, tags: _*)
private def trace0: F[Any] =
for {
ctx <- Traced.currentSpan.context
res <- Async.fromFuture(
Async[F].delay{ pattern.ask(ref, TracedMessage(message, ctx), sender) }
)
} yield res
}
}
|
fehu/opentracing-scala
|
scala/src/main/scala/com/github/fehu/opentracing/propagation/Propagation.scala
|
package com.github.fehu.opentracing.propagation
import io.opentracing.propagation.Format
trait Propagation {
type Underlying
def underlying: Underlying
def format: Format[Underlying]
type Repr
def repr: Repr
}
trait PropagationCompanion[C <: Propagation] {
def apply(): C
def apply(repr: C#Repr): C
def format: Format[C#Underlying]
final implicit def companion: PropagationCompanion[C] = this
}
|
fehu/opentracing-scala
|
akka/src/main/scala/com/github/fehu/opentracing/akka/package.scala
|
package com.github.fehu.opentracing
import _root_.akka.actor.ActorRef
import _root_.akka.util.Timeout
import cats.effect.{ Async, ContextShift }
package object akka {
def ask[F[_]: Async: ContextShift: Traced](actorRef: ActorRef, message: Any, sender: ActorRef)
(implicit timeout: Timeout): AskTracing.Ops[F] =
new AskTracing.Ops[F](actorRef, message, sender)
def ask[F[_]: Async: ContextShift: Traced](actorRef: ActorRef, message: Any)
(implicit timeout: Timeout, sender: ActorRef): AskTracing.Ops[F] =
new AskTracing.Ops[F](actorRef, message, sender)
}
|
fehu/opentracing-scala
|
scala/src/test/scala/com/github/fehu/opentracing/propagation/io/PropagationTracedIOSpec.scala
|
package com.github.fehu.opentracing.propagation.io
import cats.effect.Effect
import com.github.fehu.opentracing.Traced
import com.github.fehu.opentracing.propagation.PropagationSpec
import com.github.fehu.opentracing.transformer._
import com.github.fehu.opentracing.util.ErrorLogger
class PropagationTracedIOSpec extends PropagationSpec[TracedIO] {
implicit lazy val tracedRunParams: Traced.RunParams =
Traced.RunParams(mockTracer, Traced.Hooks(), Traced.ActiveSpan.empty, ErrorLogger.stdout)
implicit lazy val effect: Effect[TracedIO] = TracedT.tracedTEffectInstance
}
|
fehu/opentracing-scala
|
fs2/src/main/scala/com/github/fehu/opentracing/syntax/FS2.scala
|
package com.github.fehu.opentracing.syntax
import _root_.fs2.Stream
import cats.~>
import cats.effect.Bracket
import cats.syntax.apply._
import io.opentracing.SpanContext
import io.opentracing.propagation.Format
import com.github.fehu.opentracing.Traced
import com.github.fehu.opentracing.propagation.{ Propagation, PropagationCompanion }
object FS2 {
final implicit class TracedFs2StreamOps[F[_]: Bracket[*[_], Throwable], A](stream: Stream[F, A])(implicit t: Traced[F]) {
// // // Trace Entire Stream // // //
def traceLifetime(operation: String, tags: Traced.Tag*): Stream[F, A] =
tracingLifetime(t, operation, tags)
def traceLifetimeInjecting(ctx: SpanContext)
(operation: String, tags: Traced.Tag*): Stream[F, A] =
tracingLifetime(t.injectContext(ctx), operation, tags)
def traceLifetimeInjectingOpt(
ctx: Option[SpanContext],
traceEmpty: Boolean = true,
emptyOrphan: Boolean = true
)(
operation: String,
tags: Traced.Tag*
): Stream[F, A] =
tracingLifetimeOpt(ctx.map(t.injectContext), operation, tags, traceEmpty, emptyOrphan)
def traceLifetimeInjectingFrom[C](format: Format[C])(carrier: C)
(operation: String, tags: Traced.Tag*): Stream[F, A] =
tracingLifetime(t.injectContextFrom(format)(carrier), operation, tags)
def traceLifetimeInjectingFromOpt[C](
format: Format[C]
)(
carrier: Option[C],
traceEmpty: Boolean = true,
emptyOrphan: Boolean = true
)(
operation: String,
tags: Traced.Tag*
): Stream[F, A] =
tracingLifetimeOpt(carrier.map(t.injectContextFrom(format)), operation, tags, traceEmpty, emptyOrphan)
def traceLifetimeInjectingPropagated[C <: Propagation](carrier: C)
(operation: String, tags: Traced.Tag*)
(implicit companion: PropagationCompanion[C]): Stream[F, A] =
traceLifetimeInjectingFrom(companion.format)(carrier.underlying)(operation, tags: _*)
def traceLifetimeInjectingPropagatedOpt[C <: Propagation](
carrier: Option[C],
traceEmpty: Boolean = true,
emptyOrphan: Boolean = true
)(
operation: String,
tags: Traced.Tag*
)(implicit
companion: PropagationCompanion[C]
): Stream[F, A] =
traceLifetimeInjectingFromOpt(companion.format)(carrier.map(_.underlying), traceEmpty, emptyOrphan)(operation, tags: _*)
// // // Trace Elements Usage // // //
def traceUsage(operation: String, tags: Traced.Tag*): Stream[F, A] =
tracingElems(_ => t, _ => _.span(operation, tags: _*))
def traceUsage(trace: A => Traced.Operation.Builder): Stream[F, A] =
tracingElems(_ => t, trace)
def traceUsageInjecting(context: A => SpanContext, trace: A => Traced.Operation.Builder): Stream[F, A] =
tracingElems(a => t.injectContext(context(a)), trace)
def traceUsageInjectingOpt(
context: A => Option[SpanContext],
trace: A => Traced.Operation.Builder,
traceEmpty: Boolean = true,
emptyOrphan: Boolean = true
): Stream[F, A] =
tracingElemsOpt(context(_).map(t.injectContext), trace, traceEmpty, emptyOrphan)
def traceUsageInjectingFrom[C](format: Format[C])(carrier: A => C, trace: A => Traced.Operation.Builder): Stream[F, A] =
tracingElems(a => t.injectContextFrom(format)(carrier(a)), trace)
def traceUsageInjectingFromOpt[C](format: Format[C])(
carrier: A => Option[C],
trace: A => Traced.Operation.Builder,
traceEmpty: Boolean = true,
emptyOrphan: Boolean = true
): Stream[F, A] =
tracingElemsOpt(carrier(_).map(t.injectContextFrom(format)), trace, traceEmpty, emptyOrphan)
def traceUsageInjectingPropagated[C <: Propagation](carrier: A => C, trace: A => Traced.Operation.Builder)
(implicit companion: PropagationCompanion[C]): Stream[F, A] =
traceUsageInjectingFrom(companion.format)(carrier andThen (_.underlying), trace)
def traceUsageInjectingPropagatedOpt[C <: Propagation](
carrier: A => Option[C],
trace: A => Traced.Operation.Builder,
traceEmpty: Boolean = true,
emptyOrphan: Boolean = true
)(implicit
companion: PropagationCompanion[C]
): Stream[F, A] =
traceUsageInjectingFromOpt(companion.format)(carrier andThen (_.map(_.underlying)), trace, traceEmpty, emptyOrphan)
// // // Log Elements // // //
def tracedLog(f: A => Seq[(String, Any)]): Stream[F, A] =
stream.evalTap(a => t.currentSpan.log(f(a): _*))
def tracedElemLog: Stream[F, A] = stream.evalTap(t.currentSpan log _.toString)
// // // Helpers // // //
private def tracingLifetime(i: Traced.Interface[F], op: String, tags: Seq[Traced.Tag]): Stream[F, A] =
for {
(span, finish) <- Stream eval i.spanResource(op, tags: _*).allocated
a <- stream.translate(λ[F ~> F](t.recoverCurrentSpan(span) *> _))
.onFinalize(finish)
} yield a
private def tracingLifetimeOpt(i: Option[Traced.Interface[F]], op: String, tags: Seq[Traced.Tag], traceEmpty: Boolean, emptyOrphan: Boolean): Stream[F, A] = {
lazy val iw =
if (traceEmpty) Some(if (emptyOrphan) t.withoutParent else t)
else None
i.orElse(iw).map(tracingLifetime(_, op, tags)).getOrElse(stream)
}
private def tracingElems(f: A => Traced.Interface[F], trace: A => Traced.Operation.Builder): Stream[F, A] =
stream.flatMap(a => Stream.resource(f(a).spanResource(trace(a))).as(a))
private def tracingElemsOpt(f: A => Option[Traced.Interface[F]], trace: A => Traced.Operation.Builder, traceEmpty: Boolean, emptyOrphan: Boolean): Stream[F, A] =
stream.flatMap { a =>
lazy val iw =
if (traceEmpty) Some(if (emptyOrphan) t.withoutParent else t)
else None
f(a).orElse(iw).fold(
Stream.emit(a).covary[F]
)(
i => Stream.resource(i.spanResource(trace(a))).as(a)
)
}
}
}
|
fehu/opentracing-scala
|
scala/src/main/scala/com/github/fehu/opentracing/Traced.scala
|
package com.github.fehu.opentracing
import scala.language.{ existentials, implicitConversions }
import cats.{ Show, ~> }
import cats.effect.Resource
import cats.syntax.show._
import io.opentracing.propagation.Format
import io.opentracing.{ Span, SpanContext, Tracer, tag }
import com.github.fehu.opentracing.util.ErrorLogger
import com.github.fehu.opentracing.util.FunctionK2.~~>
trait Traced2[F[_[*], _], U[_]] extends Traced[F[U, *]] {
def currentRunParams: F[U, Traced.RunParams]
def run[A](traced: F[U, A], params: Traced.RunParams): U[A]
def lift[A](ua: U[A]): F[U, A]
def mapK[G[_]](f: U ~> G): F[U, *] ~> F[G, *]
}
trait Traced[F[_]] extends Traced.Interface[F] {
def pure[A](a: A): F[A]
def defer[A](fa: => F[A]): F[A]
def currentSpan: Traced.SpanInterface[F]
def forceCurrentSpan(active: Traced.ActiveSpan): F[Traced.SpanInterface[F]]
/** Sets `active` span if no other is set. */
def recoverCurrentSpan(active: Traced.ActiveSpan): F[Traced.SpanInterface[F]]
def injectContext(context: SpanContext): Traced.Interface[F]
def injectContextFrom[C](format: Format[C])(carrier: C): Traced.Interface[F]
def extractContext[C0 <: C, C](carrier: C0, format: Format[C]): F[Option[C0]]
}
object Traced {
def apply[F[_]](implicit traced: Traced[F]): Traced[F] = traced
trait Interface[F[_]] {
def apply[A](op: String, tags: Traced.Tag*)(fa: F[A]): F[A]
def spanResource(op: String, tags: Traced.Tag*): Resource[F, ActiveSpan]
final def apply[A](op: Operation)(fa: F[A]): F[A] = apply(op.operation, op.tags: _*)(fa)
final def spanResource[A](op: Operation): Resource[F, ActiveSpan] = spanResource(op.operation, op.tags: _*)
final def apply[A](builder: Operation.Builder)(fa: F[A]): F[A] = apply(builder(Operation))(fa)
final def spanResource[A](builder: Operation.Builder): Resource[F, ActiveSpan] = spanResource(builder(Operation))
def withParent(span: ActiveSpan): Interface[F]
def withParent(span: SpanContext): Interface[F]
def withoutParent: Interface[F]
}
final case class Operation(operation: String, tags: Seq[Traced.Tag])
object Operation {
def span(op: String, tags: Traced.Tag*): Operation = new Operation(op, tags)
type Builder = Operation.type => Operation
}
class Tag(val apply: Taggable.PartiallyApplied) extends AnyVal
object Tag {
implicit def stringPair[A](p: (String, A))(implicit t: Taggable[A]): Tag = new Tag(t(p._1, p._2))
implicit def tagPair[A](p: (tag.Tag[A], A))(implicit t: Taggable[A]): Tag = new Tag(t(p._1.getKey, p._2))
}
trait Taggable[A] { self =>
def apply(builder: Tracer.SpanBuilder, key: String, value: A): Tracer.SpanBuilder
def apply(builder: Span, key: String, value: A): Span
def apply(key: String, value: A): Taggable.PartiallyApplied =
new Taggable.PartiallyApplied {
def apply(builder: Tracer.SpanBuilder): Tracer.SpanBuilder = self(builder, key, value)
def apply(builder: Span): Span = self(builder, key, value)
}
def contramap[B](f: B => A): Taggable[B] =
new Taggable[B] {
def apply(builder: Tracer.SpanBuilder, key: String, value: B): Tracer.SpanBuilder = self(builder, key, f(value))
def apply(builder: Span, key: String, value: B): Span = self(builder, key, f(value))
}
}
object Taggable {
trait PartiallyApplied {
def apply(builder: Tracer.SpanBuilder): Tracer.SpanBuilder
def apply(builder: Span): Span
}
implicit lazy val stringIsTaggable: Taggable[String] =
new Taggable[String] {
def apply(builder: Tracer.SpanBuilder, key: String, value: String): Tracer.SpanBuilder = builder.withTag(key, value)
def apply(builder: Span, key: String, value: String): Span = builder.setTag(key, value)
}
implicit lazy val boolIsTaggable: Taggable[Boolean] =
new Taggable[Boolean] {
def apply(builder: Tracer.SpanBuilder, key: String, value: Boolean): Tracer.SpanBuilder = builder.withTag(key, value)
def apply(builder: Span, key: String, value: Boolean): Span = builder.setTag(key, value)
}
implicit lazy val numberIsTaggable: Taggable[Number] = new Taggable[Number] {
def apply(builder: Tracer.SpanBuilder, key: String, value: Number): Tracer.SpanBuilder = builder.withTag(key, value)
def apply(builder: Span, key: String, value: Number): Span = builder.setTag(key, value)
}
implicit lazy val intIsTaggable: Taggable[Int] = numberIsTaggable.contramap(Int.box)
implicit lazy val longIsTaggable: Taggable[Long] = numberIsTaggable.contramap(Long.box)
implicit lazy val doubleIsTaggable: Taggable[Double] = numberIsTaggable.contramap(Double.box)
implicit lazy val floatIsTaggable: Taggable[Float] = numberIsTaggable.contramap(Float.box)
implicit def shownIsTaggable[A: Show]: Taggable[A] = stringIsTaggable.contramap(_.show)
}
trait SpanInterface[F[_]] {
def context: F[Option[SpanContext]]
def setOperation(op: String): F[Unit]
def setTag(tag: Traced.Tag): F[Unit]
def setTags(tags: Traced.Tag*): F[Unit]
def log(fields: (String, Any)*): F[Unit]
def log(event: String): F[Unit]
def setBaggageItem(key: String, value: String): F[Unit]
def getBaggageItem(key: String): F[Option[String]]
def mapK[G[_]](f: F ~> G): SpanInterface[G]
def noop: F[Unit]
}
class AccumulativeSpanInterface[F[_]](i: SpanInterface[F], accRev: List[SpanInterface[F] => F[Unit]]) {
def setTag(tag: Traced.Tag): AccumulativeSpanInterface[F] = accumulate(_.setTag(tag))
def setTags(tags: Traced.Tag*): AccumulativeSpanInterface[F] = accumulate(_.setTags(tags: _*))
def log(fields: (String, Any)*): AccumulativeSpanInterface[F] = accumulate(_.log(fields: _*))
def log(event: String): AccumulativeSpanInterface[F] = accumulate(_.log(event))
def noop: AccumulativeSpanInterface[F] = this
def accumulated: List[SpanInterface[F] => F[Unit]] = accRev.reverse
private def accumulate(f: SpanInterface[F] => F[Unit]) = new AccumulativeSpanInterface[F](i, f :: accRev)
}
trait SpanInterfaceK2 {
def setTag(tag: Traced.Tag): AccumulativeSpanInterface ~~> AccumulativeSpanInterface =
new (AccumulativeSpanInterface ~~> AccumulativeSpanInterface) {
def apply[A[_]](fa: AccumulativeSpanInterface[A]): AccumulativeSpanInterface[A] = fa.setTag(tag)
}
def setTags(tags: Traced.Tag*): AccumulativeSpanInterface ~~> AccumulativeSpanInterface =
new (AccumulativeSpanInterface ~~> AccumulativeSpanInterface) {
def apply[A[_]](fa: AccumulativeSpanInterface[A]): AccumulativeSpanInterface[A] = fa.setTags(tags: _*)
}
def log(fields: (String, Any)*): AccumulativeSpanInterface ~~> AccumulativeSpanInterface =
new (AccumulativeSpanInterface ~~> AccumulativeSpanInterface) {
def apply[A[_]](fa: AccumulativeSpanInterface[A]): AccumulativeSpanInterface[A] = fa.log(fields: _*)
}
def log(event: String): AccumulativeSpanInterface ~~> AccumulativeSpanInterface =
new (AccumulativeSpanInterface ~~> AccumulativeSpanInterface) {
def apply[A[_]](fa: AccumulativeSpanInterface[A]): AccumulativeSpanInterface[A] = fa.log(event)
}
def noop: AccumulativeSpanInterface ~~> AccumulativeSpanInterface =
new (AccumulativeSpanInterface ~~> AccumulativeSpanInterface) {
def apply[A[_]](fa: AccumulativeSpanInterface[A]): AccumulativeSpanInterface[A] = fa.noop
}
}
object SpanInterfaceK2 extends SpanInterfaceK2
final case class RunParams(tracer: Tracer, hooks: Hooks, activeSpan: ActiveSpan, logError: ErrorLogger)
object RunParams {
def apply(tracer: Tracer, hooks: Hooks, logError: ErrorLogger): Partial = Partial(tracer, hooks, logError)
final case class Partial(tracer: Tracer, hooks: Hooks, logError: ErrorLogger) {
def apply(active: ActiveSpan): RunParams = RunParams(tracer, hooks, active, logError)
}
implicit def fromPartial(p: Partial)(implicit active: ActiveSpan): RunParams = p(active)
}
final class ActiveSpan(val maybe: Option[Span]) extends AnyVal {
override def toString: String = s"ActiveSpan(${maybe.toString})"
}
object ActiveSpan {
def apply(span: Option[Span]): ActiveSpan = new ActiveSpan(span)
def apply(span: Span): ActiveSpan = apply(Option(span))
lazy val empty: ActiveSpan = new ActiveSpan(None)
object Implicits {
implicit val emptyActiveSpan: ActiveSpan = empty
}
}
final class Hooks(
val beforeStart: Tracer.SpanBuilder => Tracer.SpanBuilder,
val justAfterStart: SpanInterface ~~> λ[F[_] => List[SpanInterface[F] => F[Unit]]],
val beforeStop: SpanInterface ~~> λ[F[_] => Option[Throwable] => List[SpanInterface[F] => F[Unit]]]
)
object Hooks {
def apply(
beforeStart: Tracer.SpanBuilder => Tracer.SpanBuilder = null,
justAfterStart: SpanInterfaceK2 => (AccumulativeSpanInterface ~~> AccumulativeSpanInterface) = null,
beforeStop: SpanInterfaceK2 => Option[Throwable] => (AccumulativeSpanInterface ~~> AccumulativeSpanInterface) = null
): Hooks = {
val justAfterStart1 = Option(justAfterStart).getOrElse((_: SpanInterfaceK2).noop).apply(SpanInterfaceK2)
val beforeStop1 = Option(beforeStop).getOrElse((s: SpanInterfaceK2) => (_: Option[Throwable]) => s.noop).apply(SpanInterfaceK2)
new Hooks(
Option(beforeStart).getOrElse(locally),
accumulateK2 compose justAfterStart1 compose accumulativeSpanInterfaceK2,
new (SpanInterface ~~> λ[F[_] => Option[Throwable] => List[SpanInterface[F] => F[Unit]]]) {
def apply[A[_]](fa: SpanInterface[A]): Option[Throwable] => List[SpanInterface[A] => A[Unit]] =
e => beforeStop1(e)(new AccumulativeSpanInterface(fa, Nil)).accumulated
}
)
}
lazy val accumulativeSpanInterfaceK2: SpanInterface ~~> AccumulativeSpanInterface =
new (SpanInterface ~~> AccumulativeSpanInterface) {
def apply[A[_]](fa: SpanInterface[A]): AccumulativeSpanInterface[A] = new AccumulativeSpanInterface(fa, Nil)
}
lazy val accumulateK2: AccumulativeSpanInterface ~~> λ[F[_] => List[SpanInterface[F] => F[Unit]]] =
new (AccumulativeSpanInterface ~~> λ[F[_] => List[SpanInterface[F] => F[Unit]]]) {
def apply[A[_]](fa: AccumulativeSpanInterface[A]): List[SpanInterface[A] => A[Unit]] = fa.accumulated
}
}
}
|
fehu/opentracing-scala
|
scala/src/main/scala/com/github/fehu/opentracing/transformer/Untraced.scala
|
package com.github.fehu.opentracing.transformer
import cats.{Applicative, Defer}
import com.github.fehu.opentracing.Traced
import com.github.fehu.opentracing.internal.TracedStub
object Untraced {
/** Get a stub [[Traced]] instance for `F[_]`. */
def tracedStub[F[_]: Applicative: Defer]: Traced[F] = new TracedStub
}
|
fehu/opentracing-scala
|
scala/src/test/scala/com/github/fehu/opentracing/io/TracedIOSpec.scala
|
<gh_stars>1-10
package com.github.fehu.opentracing.io
import scala.concurrent.ExecutionContext
import cats.effect.{ ContextShift, Effect, IO, Timer }
import com.github.fehu.opentracing.transformer._
import com.github.fehu.opentracing.TraceSpec
class TracedIOSpec extends TraceSpec[TracedIO] {
implicit lazy val effect: Effect[TracedIO] = TracedT.tracedTEffectInstance
implicit lazy val csIO: ContextShift[IO] = IO.contextShift(ExecutionContext.global)
implicit lazy val cs: ContextShift[TracedIO] = TracedT.tracedTContextShiftInstance
implicit lazy val timerIO: Timer[IO] = IO.timer(ExecutionContext.global)
implicit lazy val timer: Timer[TracedIO] = TracedT.tracedTTimerInstance
}
|
fehu/opentracing-scala
|
akka/src/main/scala/com/github/fehu/opentracing/akka/TracingActor.scala
|
package com.github.fehu.opentracing.akka
import akka.fehu.MessageInterceptingActor
import io.opentracing.{ Span, SpanContext, Tracer }
import com.github.fehu.opentracing.Traced
final case class TracedMessage[A](message: A, spanContext: Option[SpanContext])
trait TracingActor extends MessageInterceptingActor {
implicit val tracer: Tracer
def actorSpanContext(): Option[SpanContext] = _spanContext
private var _spanContext: Option[SpanContext] = None
protected[TracingActor] def setSpanContext(ctx: SpanContext): Unit = _spanContext = Option(ctx)
protected def onSpanReceived(message: Any, ctx: SpanContext): Unit = setSpanContext(ctx)
protected def onNoSpanReceived(message: Any): Unit = {}
protected def interceptIncoming(message: Any): Any = message match {
case TracedMessage(msg, Some(ctx)) =>
_spanContext = Some(ctx)
onSpanReceived(msg, ctx)
msg
case TracedMessage(msg, _) =>
onNoSpanReceived(msg)
msg
case _ =>
onNoSpanReceived(message)
message
}
protected def afterReceive(maybeError: Option[Throwable]): Unit = {
_spanContext = None
}
}
object TracingActor {
trait ChildSpan extends TracingActor {
actor =>
def buildChildSpan(message: Any): Tracer.SpanBuilder
def actorSpan(): Option[Span] = _span
private var _span: Option[Span] = None
object buildSpan {
def apply[A](op: String, tags: Traced.Tag*): Tracer.SpanBuilder = {
val b0 = tracer.buildSpan(op).ignoreActiveSpan
tags.foldLeft(b0)((b, t) => t.apply(b))
}
}
override protected def onSpanReceived(message: Any, ctx: SpanContext): Unit = {
_span = Some(buildChildSpan(message).asChildOf(ctx).start())
super.onSpanReceived(message, ctx)
}
def modifySpanOnError(span: Span): Span = span
override protected def afterReceive(maybeError: Option[Throwable]): Unit = {
try _span.map(modifySpanOnError).foreach(_.finish())
finally super.afterReceive(maybeError)
}
}
trait AlwaysChildSpan extends ChildSpan {
override protected def onNoSpanReceived(message: Any): Unit = {
val span = buildChildSpan(message).start()
setSpanContext(span.context())
super.onNoSpanReceived(message)
}
}
}
|
fehu/opentracing-scala
|
scala/src/test/scala/com/github/fehu/opentracing/Spec.scala
|
package com.github.fehu.opentracing
import java.util.concurrent.atomic.AtomicLong
import scala.collection.JavaConverters._
import _root_.io.opentracing.mock.{ MockSpan, MockTracer }
import org.scalatest.{ BeforeAndAfter, BeforeAndAfterAll, Suite }
import org.scalatest.matchers.should.Matchers
trait Spec extends Matchers with BeforeAndAfter with BeforeAndAfterAll {
this: Suite =>
implicit lazy val mockTracer: MockTracer = new MockTracer
before {
mockTracer.reset()
}
type TestedSpan = Spec.TestedSpan
val TestedSpan: Spec.TestedSpan.type = Spec.TestedSpan
def finishedSpans(): Seq[Spec.TestedSpan] = mockTracer.finishedSpans().asScala.toSeq.map(Spec.shiftedTestedSpan(_offset, _))
private var _offset: Long = 0
override protected def beforeAll(): Unit = {
_offset = Spec.mockSpanNextIdField.get()
}
}
object Spec {
case class TestedSpan(
traceId: Long,
spanId: Long,
parentId: Long,
operationName: String,
tags: Map[String, AnyRef] = Map(),
logs: List[Map[String, Any]] = Nil
)
private[Spec] def shiftedTestedSpan(offset: Long, mock: MockSpan): TestedSpan = TestedSpan(
traceId = mock.context.traceId - offset,
spanId = mock.context.spanId - offset,
parentId = if (mock.parentId == 0) 0 else mock.parentId - offset,
operationName = mock.operationName,
tags = mock.tags.asScala.toMap,
logs = mock.logEntries().asScala.map(_.fields().asScala.toMap).toList
)
private[Spec] lazy val mockSpanNextIdField = {
val f = classOf[MockSpan].getDeclaredField("nextId")
f.setAccessible(true)
f.get(null).asInstanceOf[AtomicLong]
}
}
|
fehu/opentracing-scala
|
compiler-plugin/src/main/scala/com/github/fehu/opentracing/compile/ImplicitSearchTracingPlugin.scala
|
package com.github.fehu.opentracing.compile
import scala.collection.mutable
import scala.reflect.internal.util.NoSourceFile
import scala.tools.nsc.Global
import scala.tools.nsc.plugins.{ Plugin, PluginComponent }
import io.jaegertracing.Configuration
import io.jaegertracing.Configuration.SamplerConfiguration
import io.jaegertracing.internal.samplers.ConstSampler
import io.opentracing.{ Span, Tracer }
class ImplicitSearchTracingPlugin(val global: Global) extends Plugin {
import ImplicitSearchTracingPlugin.tracer
import global._
val name: String = "TracingImplicitSearch"
val description: String = "Traces implicit searches performed by scalac and reports them to local jaegertracing backend"
val components: List[PluginComponent] = Nil
private val spansStack = mutable.Stack.empty[Span]
analyzer.addAnalyzerPlugin(new ImplicitsTracingAnalyzer)
class ImplicitsTracingAnalyzer extends analyzer.AnalyzerPlugin {
override def pluginsNotifyImplicitSearch(search: global.analyzer.ImplicitSearch): Unit = {
val pos = search.pos
val code = if (pos.source != NoSourceFile) pos.lineContent else "<NoSourceFile>"
val span = tracer
.buildSpan(showShort(search.pt))
.asChildOf(spansStack.headOption.orNull)
.withTag("type", search.pt.safeToString)
.withTag("file", pos.source.path)
.withTag("line", pos.line)
.withTag("code", code)
.withTag("pos", pos.toString)
.start()
spansStack.push(span)
super.pluginsNotifyImplicitSearch(search)
}
override def pluginsNotifyImplicitSearchResult(result: global.analyzer.SearchResult): Unit = {
val span = spansStack.pop()
span.setTag("isSuccess", result.isSuccess)
val symb = result.tree.symbol
val providedBy = if (symb eq null) typeNames.NO_NAME.toString
else {
val rt = result.tree.tpe.resultType
val targs = if (rt.typeArgs.nonEmpty) rt.typeArgs.mkString("[", ", ", "]") else ""
s"${symb.kindString} ${symb.fullNameString}$targs"
}
span.setTag("provided by", providedBy)
result.subst.from zip result.subst.to foreach { case (from, to) =>
span.setTag(s"type subst ${from.name}", to.toLongString)
}
span.finish()
if (spansStack.isEmpty) {
// A workaround for `ClassNotFoundException`s on closing the tracer.
// Found at [[https://github.com/jaegertracing/jaeger-client-java/issues/593]]
Class.forName("io.jaegertracing.internal.reporters.RemoteReporter$CloseCommand")
Class.forName("io.jaegertracing.agent.thrift.Agent$Client")
}
super.pluginsNotifyImplicitSearchResult(result)
}
private def showName(name0: String): String =
name0.takeWhile(_ != '{').split('.').reverse match {
case Array("Aux", name, _*) => name
case Array(name, _*) => name
}
private def showShort(tpe: Type): String = showName(tpe.typeConstructor.toString)
}
}
object ImplicitSearchTracingPlugin {
val tracerServiceName = "implicit search"
protected val tracer: Tracer = Configuration
.fromEnv(tracerServiceName)
.withSampler(
SamplerConfiguration.fromEnv()
.withType(ConstSampler.TYPE)
.withParam(1)
)
.getTracer
}
|
rorygraves/akka-scalaclean-test
|
src/main/scala/akka/japi/tuple/Tuples.scala
|
<filename>src/main/scala/akka/japi/tuple/Tuples.scala<gh_stars>0
// auto-generated by sbt-boilerplate
/*
* Copyright (C) 2015-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package akka.japi.tuple
/**
* Used to create tuples with 3 elements in Java.
*/
object Tuple3 {
def create[T1, T2, T3](t1: T1, t2: T2, t3: T3) = new Tuple3(t1, t2, t3)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple3[T1, T2, T3](t1: T1, t2: T2, t3: T3) {
val toScala: (T1, T2, T3) = (t1, t2, t3)
}
/**
* Used to create tuples with 4 elements in Java.
*/
object Tuple4 {
def create[T1, T2, T3, T4](t1: T1, t2: T2, t3: T3, t4: T4) = new Tuple4(t1, t2, t3, t4)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple4[T1, T2, T3, T4](t1: T1, t2: T2, t3: T3, t4: T4) {
val toScala: (T1, T2, T3, T4) = (t1, t2, t3, t4)
}
/**
* Used to create tuples with 5 elements in Java.
*/
object Tuple5 {
def create[T1, T2, T3, T4, T5](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5) = new Tuple5(t1, t2, t3, t4, t5)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple5[T1, T2, T3, T4, T5](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5) {
val toScala: (T1, T2, T3, T4, T5) = (t1, t2, t3, t4, t5)
}
/**
* Used to create tuples with 6 elements in Java.
*/
object Tuple6 {
def create[T1, T2, T3, T4, T5, T6](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6) = new Tuple6(t1, t2, t3, t4, t5, t6)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple6[T1, T2, T3, T4, T5, T6](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6) {
val toScala: (T1, T2, T3, T4, T5, T6) = (t1, t2, t3, t4, t5, t6)
}
/**
* Used to create tuples with 7 elements in Java.
*/
object Tuple7 {
def create[T1, T2, T3, T4, T5, T6, T7](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7) = new Tuple7(t1, t2, t3, t4, t5, t6, t7)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple7[T1, T2, T3, T4, T5, T6, T7](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7) {
val toScala: (T1, T2, T3, T4, T5, T6, T7) = (t1, t2, t3, t4, t5, t6, t7)
}
/**
* Used to create tuples with 8 elements in Java.
*/
object Tuple8 {
def create[T1, T2, T3, T4, T5, T6, T7, T8](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8) = new Tuple8(t1, t2, t3, t4, t5, t6, t7, t8)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple8[T1, T2, T3, T4, T5, T6, T7, T8](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8) {
val toScala: (T1, T2, T3, T4, T5, T6, T7, T8) = (t1, t2, t3, t4, t5, t6, t7, t8)
}
/**
* Used to create tuples with 9 elements in Java.
*/
object Tuple9 {
def create[T1, T2, T3, T4, T5, T6, T7, T8, T9](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9) = new Tuple9(t1, t2, t3, t4, t5, t6, t7, t8, t9)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9) {
val toScala: (T1, T2, T3, T4, T5, T6, T7, T8, T9) = (t1, t2, t3, t4, t5, t6, t7, t8, t9)
}
/**
* Used to create tuples with 10 elements in Java.
*/
object Tuple10 {
def create[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10) = new Tuple10(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10) {
val toScala: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10) = (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10)
}
/**
* Used to create tuples with 11 elements in Java.
*/
object Tuple11 {
def create[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11) = new Tuple11(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11) {
val toScala: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11) = (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11)
}
/**
* Used to create tuples with 12 elements in Java.
*/
object Tuple12 {
def create[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12) = new Tuple12(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12) {
val toScala: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12) = (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12)
}
/**
* Used to create tuples with 13 elements in Java.
*/
object Tuple13 {
def create[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13) = new Tuple13(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13) {
val toScala: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13) = (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13)
}
/**
* Used to create tuples with 14 elements in Java.
*/
object Tuple14 {
def create[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14) = new Tuple14(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14) {
val toScala: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14) = (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14)
}
/**
* Used to create tuples with 15 elements in Java.
*/
object Tuple15 {
def create[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15) = new Tuple15(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15) {
val toScala: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15) = (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15)
}
/**
* Used to create tuples with 16 elements in Java.
*/
object Tuple16 {
def create[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16) = new Tuple16(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16) {
val toScala: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16) = (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16)
}
/**
* Used to create tuples with 17 elements in Java.
*/
object Tuple17 {
def create[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17) = new Tuple17(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17) {
val toScala: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17) = (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17)
}
/**
* Used to create tuples with 18 elements in Java.
*/
object Tuple18 {
def create[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18) = new Tuple18(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18) {
val toScala: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18) = (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18)
}
/**
* Used to create tuples with 19 elements in Java.
*/
object Tuple19 {
def create[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18, t19: T19) = new Tuple19(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18, t19: T19) {
val toScala: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19) = (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19)
}
/**
* Used to create tuples with 20 elements in Java.
*/
object Tuple20 {
def create[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18, t19: T19, t20: T20) = new Tuple20(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18, t19: T19, t20: T20) {
val toScala: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20) = (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20)
}
/**
* Used to create tuples with 21 elements in Java.
*/
object Tuple21 {
def create[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18, t19: T19, t20: T20, t21: T21) = new Tuple21(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18, t19: T19, t20: T20, t21: T21) {
val toScala: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21) = (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21)
}
/**
* Used to create tuples with 22 elements in Java.
*/
object Tuple22 {
def create[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18, t19: T19, t20: T20, t21: T21, t22: T22) = new Tuple22(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22)
}
/**
* Java API Tuple container.
*/
@SerialVersionUID(1L)
final case class Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22](t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6, t7: T7, t8: T8, t9: T9, t10: T10, t11: T11, t12: T12, t13: T13, t14: T14, t15: T15, t16: T16, t17: T17, t18: T18, t19: T19, t20: T20, t21: T21, t22: T22) {
val toScala: (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22) = (t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22)
}
|
rorygraves/akka-scalaclean-test
|
src/main/scala/akka/japi/function/Functions.scala
|
// auto-generated by sbt-boilerplate
/**
* Copyright (C) 2015-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package akka.japi.function
/**
* A Function interface. Used to create 3-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function3[-T1, -T2, -T3, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3): R
}
/**
* A Function interface. Used to create 4-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function4[-T1, -T2, -T3, -T4, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4): R
}
/**
* A Function interface. Used to create 5-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function5[-T1, -T2, -T3, -T4, -T5, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5): R
}
/**
* A Function interface. Used to create 6-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function6[-T1, -T2, -T3, -T4, -T5, -T6, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6): R
}
/**
* A Function interface. Used to create 7-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function7[-T1, -T2, -T3, -T4, -T5, -T6, -T7, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7): R
}
/**
* A Function interface. Used to create 8-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function8[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8): R
}
/**
* A Function interface. Used to create 9-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function9[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9): R
}
/**
* A Function interface. Used to create 10-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function10[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10): R
}
/**
* A Function interface. Used to create 11-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function11[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11): R
}
/**
* A Function interface. Used to create 12-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function12[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12): R
}
/**
* A Function interface. Used to create 13-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function13[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13): R
}
/**
* A Function interface. Used to create 14-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function14[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14): R
}
/**
* A Function interface. Used to create 15-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function15[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14, arg15: T15): R
}
/**
* A Function interface. Used to create 16-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function16[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14, arg15: T15, arg16: T16): R
}
/**
* A Function interface. Used to create 17-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function17[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14, arg15: T15, arg16: T16, arg17: T17): R
}
/**
* A Function interface. Used to create 18-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function18[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14, arg15: T15, arg16: T16, arg17: T17, arg18: T18): R
}
/**
* A Function interface. Used to create 19-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function19[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14, arg15: T15, arg16: T16, arg17: T17, arg18: T18, arg19: T19): R
}
/**
* A Function interface. Used to create 20-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function20[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14, arg15: T15, arg16: T16, arg17: T17, arg18: T18, arg19: T19, arg20: T20): R
}
/**
* A Function interface. Used to create 21-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function21[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14, arg15: T15, arg16: T16, arg17: T17, arg18: T18, arg19: T19, arg20: T20, arg21: T21): R
}
/**
* A Function interface. Used to create 22-arg first-class-functions is Java.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Function22[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, -T22, +R] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14, arg15: T15, arg16: T16, arg17: T17, arg18: T18, arg19: T19, arg20: T20, arg21: T21, arg22: T22): R
}
/**
* A Consumer interface. Used to create 2-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure2[-T1, -T2] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2): Unit
}
/**
* A Consumer interface. Used to create 3-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure3[-T1, -T2, -T3] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3): Unit
}
/**
* A Consumer interface. Used to create 4-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure4[-T1, -T2, -T3, -T4] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4): Unit
}
/**
* A Consumer interface. Used to create 5-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure5[-T1, -T2, -T3, -T4, -T5] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5): Unit
}
/**
* A Consumer interface. Used to create 6-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure6[-T1, -T2, -T3, -T4, -T5, -T6] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6): Unit
}
/**
* A Consumer interface. Used to create 7-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure7[-T1, -T2, -T3, -T4, -T5, -T6, -T7] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7): Unit
}
/**
* A Consumer interface. Used to create 8-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure8[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8): Unit
}
/**
* A Consumer interface. Used to create 9-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure9[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9): Unit
}
/**
* A Consumer interface. Used to create 10-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure10[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10): Unit
}
/**
* A Consumer interface. Used to create 11-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure11[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11): Unit
}
/**
* A Consumer interface. Used to create 12-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure12[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12): Unit
}
/**
* A Consumer interface. Used to create 13-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure13[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13): Unit
}
/**
* A Consumer interface. Used to create 14-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure14[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14): Unit
}
/**
* A Consumer interface. Used to create 15-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure15[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14, arg15: T15): Unit
}
/**
* A Consumer interface. Used to create 16-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure16[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14, arg15: T15, arg16: T16): Unit
}
/**
* A Consumer interface. Used to create 17-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure17[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14, arg15: T15, arg16: T16, arg17: T17): Unit
}
/**
* A Consumer interface. Used to create 18-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure18[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14, arg15: T15, arg16: T16, arg17: T17, arg18: T18): Unit
}
/**
* A Consumer interface. Used to create 19-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure19[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14, arg15: T15, arg16: T16, arg17: T17, arg18: T18, arg19: T19): Unit
}
/**
* A Consumer interface. Used to create 20-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure20[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14, arg15: T15, arg16: T16, arg17: T17, arg18: T18, arg19: T19, arg20: T20): Unit
}
/**
* A Consumer interface. Used to create 21-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure21[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14, arg15: T15, arg16: T16, arg17: T17, arg18: T18, arg19: T19, arg20: T20, arg21: T21): Unit
}
/**
* A Consumer interface. Used to create 22-arg consumers in Java.
* A Procedure is like a Function, but it doesn't produce a return value.
* `Serializable` is needed to be able to grab line number for Java 8 lambdas.
*/
@SerialVersionUID(1L)
trait Procedure22[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, -T22] extends java.io.Serializable {
@throws(classOf[Exception])
def apply(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, arg7: T7, arg8: T8, arg9: T9, arg10: T10, arg11: T11, arg12: T12, arg13: T13, arg14: T14, arg15: T15, arg16: T16, arg17: T17, arg18: T18, arg19: T19, arg20: T20, arg21: T21, arg22: T22): Unit
}
|
rorygraves/akka-scalaclean-test
|
build.sbt
|
import Dependencies._
ThisBuild / scalaVersion := "2.12.8"
ThisBuild / version := "0.1.0-SNAPSHOT"
ThisBuild / organization := "com.example"
ThisBuild / organizationName := "example"
lazy val root = (project in file("."))
.settings(
name := "Akka ScalaClean Test",
addCompilerPlugin("org.scalameta" % "semanticdb-scalac" % "4.1.11" cross CrossVersion.full),
scalacOptions += "-Yrangepos",
libraryDependencies += scalaTest % Test,
libraryDependencies += "org.scala-lang.modules" % "scala-java8-compat_2.12" % "0.9.0",
libraryDependencies += "com.typesafe" % "config" % "1.3.4"
)
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/93.scala
|
package net.tisue.euler
// By using each of the digits from the set {1, 2, 3, 4} exactly once, and
// making use of the four arithmetic operations (+, -, *, /) and parentheses,
// it is possible to form different positive integer targets.
//
// For example,
// 8 = (4 * (1 + 3)) / 2
// 14 = 4 * (3 + 1 / 2)
// 19 = 4 * (2 + 3) - 1
// 36 = 3 * 4 * (2 + 1)
//
// Note that concatenations of the digits, like 12 + 34, are not allowed. Using
// the set, {1, 2, 3, 4}, it is possible to obtain thirty-one different target
// numbers of which 36 is the maximum, and each of the numbers 1 to 28 can be
// obtained before encountering the first non-expressible number. Find the set
// of four distinct digits, a < b < c < d, for which the longest set of
// consecutive positive integers, 1 to n, can be obtained, giving your answer as
// a string: abcd.
// You have to be careful here to allow fractional intermediate results on the
// way to getting an integer in the end, e.g. 1 / 2 + 3 / 6 = 1.
class Problem93 extends Problem(93, "1258"):
val Zero = BigRational(0)
type OperatorFunction =
(BigRational, BigRational) => Option[BigRational]
enum Item:
case Digit(n: Int)
case Operator(c: Char, fn: OperatorFunction)
import Item.{ Digit, Operator }
val operators =
List(
Operator('+', (a, b) => Some(a + b)),
Operator('-', (a, b) => Some(a - b)),
Operator('*', (a, b) => Some(a * b)),
Operator('/', (a, b) => if b != Zero then Some(a / b)
else None))
def expressions(digits: List[Digit], stackHeight: Int): List[List[Item]] =
def pushes =
for d <- digits
e <- expressions(digits diff List(d), stackHeight + 1)
yield d :: e
def pops =
for o <- operators
e <- expressions(digits, stackHeight - 1)
yield o :: e
if stackHeight < 0 then
Nil
else if stackHeight == 1 && digits.isEmpty then
List(Nil)
else if stackHeight < 2 then
pushes
else
pushes ::: pops
def eval(items: List[Item], stack: List[BigRational]): Option[BigRational] =
if items.isEmpty then
Some(stack.head)
else items.head match
case Operator(_, fn) =>
fn(stack(1), stack(0)) match
case Some(n) =>
eval(items.tail, n :: stack.drop(2))
case None => None
case Digit(n) =>
eval(items.tail, BigRational(n) :: stack)
def targets(ns: List[Int]): Set[Int] =
expressions(ns.map(Digit(_)), 0)
.flatMap(e => eval(e, Nil))
.filter(_.denom == 1)
.map(_.numer.toInt)
.filter(_ > 0)
.toSet
def smallestMissing(ns: Set[Int]): Int =
LazyList.from(1).find(!ns.contains(_)).get
def solve =
(1 to 9).toList
.combinations(4).toList
.maxBy(ns => smallestMissing(targets(ns)))
.mkString
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/84.scala
|
package net.tisue.euler
// In a game of Monopoly, statistically it can be shown that the three most
// popular squares, in descending order, are JAIL, E3, and GO (squares 10, 24, 0
// = 102400). If, instead of using two 6-sided dice, two 4-sided dice are used,
// find the three winners.
//
// This can be solved by Monte Carlo simulation, but I chose to approach it
// analytically as a Markov chain problem:
// http://www.math.yorku.ca/Who/Faculty/Steprans/Courses/2042/Monopoly/Stewart2.html
// http://www.tkcs-collins.com/truman/monopoly/monopoly.shtml
// http://en.wikipedia.org/wiki/Markov_matrix
//
// The rule about rolling doubles three times doesn't affect the probabilities
// enough to change the final answer, so we just ignore that rule.
//
// Someone in the forum points out that "If you are on CH3 and go back on CC3,
// shouldn't you take another card?" but that doesn't affect the final answer
// either.
class Problem84 extends Problem(84, solution = "101524"):
val die = 4
val names = List("GO", "A1", "CC1", "A2", "T1", "R1", "B1", "CH1", "B2", "B3",
"JAIL", "C1", "U1", "C2", "C3", "R2", "D1", "CC2", "D2", "D3",
"FP", "E1", "CH2", "E2", "E3", "R3", "F1", "F2", "U2", "F3",
"G2J", "G1", "G2", "CC3", "G3", "R4", "CH3", "H1", "T2", "H2")
val squares: Map[String, Int] = // from name to number
names.zipWithIndex.toMap
val namesCycle: LazyList[String] =
names.to(LazyList).cycle
def limit(squareNumber: Int): Int =
(squareNumber + names.size) % names.size
def nextSquare(square: Int, roll: Int): List[BigRational] =
// kind is R for railroad or U for utility
def nextSpecial(square: Int, kind: Char): Int =
limit(square +
namesCycle
.drop(square)
.indexWhere(_.head == kind))
val newSquare: Int =
(square + roll) % names.size
val nexts: List[Int] =
names(newSquare) match
case "G2J" =>
List(squares("JAIL"))
case "CC1" | "CC2" | "CC3" =>
List(squares("GO"), squares("JAIL")) ::: List.fill(14)(newSquare)
case "CH1" | "CH2" | "CH3" =>
List(squares("GO"), squares("JAIL"), squares("C1"),
squares("E3"), squares("H2"), squares("R1"),
nextSpecial(square, 'R'), nextSpecial(square, 'R'),
nextSpecial(square, 'U'),
limit(square - 3)) :::
List.fill(6)(newSquare)
case _ =>
List(newSquare)
names.indices.toList.map(next =>
BigRational(
nexts.count(_ == next),
nexts.size * die * die))
// P is the Markov matrix
val P: List[List[Double]] = {
val zeroVector = List.fill(names.size)(BigRational(0))
val rolls =
for die1 <- 1 to die
die2 <- 1 to die
yield die1 + die2
def row(i: Int): List[BigRational] =
rolls.toList.foldLeft(zeroVector){(vec, roll) =>
vec.lazyZip(nextSquare(i, roll)).map(_ + _)}
names.indices.toList.map(row(_).map(_.toDouble))
}
def matrixMul(m1: List[List[Double]], m2: List[List[Double]]) =
val indices = m1.indices.toList
indices.map(i =>
indices.map(j =>
indices.map(r => m1(i)(r) * m2(r)(j))
.sum))
// as k approaches infinity, all rows of P^k approach the stationary probability vector.
// trial and error says k=150 gives accuracy to at least 3 decimal places
def solve =
val stationaryProbabilityVector =
Iterator.iterate(P)(matrixMul(_, P))
.drop(150).next.head
import Ordering.Double.TotalOrdering
stationaryProbabilityVector
.zipWithIndex
.sortBy(-_._1)
.take(3)
.map(_._2)
.map("%02d".format(_))
.mkString
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/90.scala
|
<reponame>SethTisue/Project-Euler
package net.tisue.euler
class Problem90 extends Problem(90, "1217"):
def solve =
val squares = List("01", "04", "09", "16", "25", "36", "49", "64", "81")
def isSolution(die1: Seq[Int], die2: Seq[Int]) =
val rolls =
for d1 <- die1
d2 <- die2
yield s"$d1$d2"
val allRolls = rolls.flatMap(x => List(x,
x.replaceAll("6", "9"),
x.replaceAll("9", "6")))
squares.forall(s => allRolls.contains(s) ||
allRolls.contains(s.reverse.mkString))
val dies = (0 to 9).combinations(6).toSeq
val pairs = for die1 <- dies
die2 <- dies
yield (die1, die2)
pairs.count(isSolution.tupled) / 2
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/110.scala
|
<gh_stars>1-10
package net.tisue.euler
import Primes.*
// Like problem 108, but we have to be smarter.
// We are asked to find the first solution n for which A018892(n) exceeds 4,000,000. Empirically, the
// factorCounts values which reach new highs for A018892(n) look like:
// (1)
// (2)
// (1, 1)
// (2, 1)
// (3, 1)
// (1, 1, 1)
// (2, 1, 1)
// (3, 1, 1)
// (2, 2, 1)
// (1, 1, 1, 1)
// (3, 2, 1)
// (2, 1, 1, 1)
// (3, 1, 1, 1)
// (2, 2, 1, 1)
// And so on. The factor counts are always decreasing, which is another way of saying
// that these are the products of primorials (en.wikipedia.org/wiki/Primorials).
class Problem110 extends Problem(110, "9350130049860600"):
// thank you On-Line Encyclopedia of Integer Sequences!
def A018892(n: BigInt) =
(factorCounts(n).map(_ * 2 + 1).product + 1) / 2
def partitions(n: Int): List[List[Int]] =
def recurse(n: Int, max: Int): List[List[Int]] =
if n == 0 then
List(Nil)
else (1 to (n min max)).toList
.flatMap(n1 => recurse(n - n1, n1 min max)
.map(n1 :: _))
recurse(n, n)
def primorialsWithNFactors(n: Int) =
def expand(f: Int, k: Int) = BigInt(primes(k)).pow(f)
partitions(n).map(_.zipWithIndex.map(Function.tupled(expand)).product)
val threshold = 4000000
// empirically, 18 is high enough to get the answer. even using a much larger limit like 40
// takes under 10 seconds, so I think I'll just settle for using an empirically found limit.
def solve = (1 to 18)
.flatMap(primorialsWithNFactors)
.sorted
.dropWhile(A018892(_) <= threshold)
.head
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/46.scala
|
<reponame>SethTisue/Project-Euler<gh_stars>1-10
package net.tisue.euler
import Primes.*
// What is the smallest odd composite that cannot be written as the sum of a prime and twice a
// square?
class Problem46 extends Problem(46, "5777"):
def square(n: Int) = n * n
def isSquare(n: Int) =
n == square(math.round(math.sqrt(n)).toInt)
def hasSolution(n: Int) =
primes.takeWhile(_ < n)
.exists{p => val diff = n - p; diff % 2 == 0 && isSquare(diff / 2)}
def solve =
LazyList.from(3, 2)
.find(n => !isSievedPrime(n) && !hasSolution(n))
.get
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/37.scala
|
<reponame>SethTisue/Project-Euler
package net.tisue.euler
import Primes.*
// Find the sum of the only eleven primes that are both truncatable from left to right and right to
// left. (Example: 3797. 379, 37, 3, 797, 97, and 7 are all prime.)
class Problem37 extends Problem(37, "748317"):
def solve =
def isTruncatable(n:Int) =
(1 until n.toString.size).forall(j => isSievedPrime(n.toString.drop(j).toInt) &&
isSievedPrime(n.toString.take(j).toInt))
primes.dropWhile(_ < 10).filter(isTruncatable).take(11).sum
|
SethTisue/Project-Euler
|
build.sbt
|
<filename>build.sbt<gh_stars>1-10
scalaVersion := "3.1.1-RC2"
libraryDependencies += "org.scalameta" %% "munit" % "0.7.29" % Test
scalacOptions ++= Seq(
"-encoding", "us-ascii",
"-deprecation",
"-feature",
"-Xfatal-warnings",
"-source:future",
)
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/86.scala
|
<gh_stars>1-10
package net.tisue.euler
// A spider, S, sits in one corner of a cuboid room, measuring 6 by 5 by 3, and
// a fly, F, sits in the opposite corner. By travelling on the surfaces of the
// room the shortest "straight line" distance from S to F is 10 and the path is
// shown on the diagram. However, there are up to three "shortest" path
// candidates for any given cuboid and the shortest route is not always integer.
// By considering all cuboid rooms up to a maximum size of M by M by M, there
// are exactly 2060 cuboids for which the shortest distance is integer when
// M=100, and this is the least value of M for which the number of solutions
// first exceeds two thousand; the number of solutions is 1975 when M=99. Find
// the least value of M such that the number of solutions first exceeds one
// million.
// Rather than do brute force search of cuboids, I decided to generate
// Pythagorean triples using
// http://en.wikipedia.org/wiki/Pythagorean_triple#Parent.2Fchild_relationships
// . This approach is more interesting than brute force and may prove useful
// for later problems.
// Once we have the triples we fit cuboids inside them. Actually, we compute
// how many cuboids will fit, since the problem doesn't require us to generate
// actual cuboids, only count them.
// The problem with generating triples is that they don't arrive in any
// simple-to-characterize order, so it's hard to know how many triples we need
// to generate to get all the solutions for a given M. To force the triples
// arrive in a useful order, we use a priority queue where the next parent we
// allow to spawn is the one with the smallest longer leg.
class Problem86 extends Problem(86, "1818"):
def solve =
case class Triple(a: Int, b: Int, c: Int):
def *(k : Int) = Triple(a * k, b * k, c * k)
def canonical = if b > a then this else swap
def swap = Triple(b, a, c)
def children =
List(Triple( a - 2 * b + 2 * c,
2 * a - b + 2 * c,
2 * a - 2 * b + 3 * c),
Triple( a + 2 * b + 2 * c,
2 * a + b + 2 * c,
2 * a + 2 * b + 3 * c),
Triple( - a + 2 * b + 2 * c,
-2 * a + b + 2 * c,
-2 * a + 2 * b + 3 * c))
// Can't remember the logic behind this formula. I think the idea was
// something like, b can't be too big or too small or the shortest path
// changes.
def cuboidCount: Int =
(b min (a / 2)) - (1 max (a - b)) + 1
val primitiveTriples = {
given Ordering[Triple] = Ordering.by[Triple, Int](_.b).reverse
val heap = collection.mutable.PriorityQueue[Triple]()
heap += Triple(3, 4, 5)
LazyList.continually {
val result = heap.dequeue()
heap ++= result.children.map(_.canonical)
result
}
}
// this gives us all the triples (primitive or not) where either leg
// equals m and the other leg is at most 2m (since to fit cuboids
// in the triple we will break the other leg into two parts)
def triples(m: Int) =
def fits(t: Triple): Boolean =
t.b <= m * 2
for t1 <- primitiveTriples.takeWhile(fits)
t2 <- LazyList.from(1).map(t1 * _).takeWhile(fits)
if t2.a == m || t2.b == m
yield if t2.b == m then t2
else t2.swap
def cuboidCount(k: Int): Int =
triples(k).map(_.cuboidCount).sum
def partialSums(ns: LazyList[Int]): LazyList[Int] =
ns.scanLeft(0)(_ + _)
val solutionCounts =
partialSums(LazyList.from(1).map(cuboidCount))
solutionCounts.takeWhile(_ < 1000000).size
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/83.scala
|
<filename>src/test/scala/net/tisue/euler/83.scala<gh_stars>1-10
package net.tisue.euler
// Find the minimal path sum in matrix.txt (from problems 81 and 82), from the top left to the
// bottom right by moving left, right, up, and down.
// http://en.wikipedia.org/wiki/Dijkstra%27s_algorithm
import collection.immutable.Vector
class Problem83 extends Problem(83, "425185"):
val matrix = io.Source.fromResource("81.txt")
.getLines.map(_.trim.split(",").map(_.toInt).to(Vector))
.to(Vector)
def neighbors(loc: (Int, Int)) =
List((-1, 0), (0, -1), (1, 0), (0, 1))
.map(offsets => (loc._1 + offsets._1, loc._2 + offsets._2))
.filter(coords => matrix.isDefinedAt(coords._1) &&
matrix.isDefinedAt(coords._2))
val dist = collection.mutable.HashMap[(Int, Int), Int]()
dist((0, 0)) = matrix(0)(0)
var queue = matrix.indices.flatMap(i => matrix(i).indices.map((i, _))).toList
while !queue.isEmpty do
val u = queue.filter(dist.isDefinedAt).minBy(dist)
queue = queue.filter(_ != u)
for v <- neighbors(u) do
val alt = dist(u) + matrix(v._1)(v._2)
if !dist.isDefinedAt(v) || alt < dist(v) then
dist(v) = alt
def solve = dist((matrix.indices.last, matrix.head.indices.last))
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/27.scala
|
<reponame>SethTisue/Project-Euler<filename>src/test/scala/net/tisue/euler/27.scala
package net.tisue.euler
import Primes.*
// Considering quadratics of the form:
// n^2 + an + b, where |a| < 1000 and |b| < 1000
// where |n| is the absolute value of n
// Find the product of the coefficients, a and b, for the quadratic
// expression that produces the maximum number of primes for consecutive
// values of n, starting with n = 0.
// takes 11 seconds
class Problem27 extends Problem(27, "-59231"):
def solve =
def primeCount(a: Int, b: Int): Int =
def polynomial(n: Int) = n * (a + n) + b
LazyList.from(0)
.map(polynomial)
.takeWhile(p => p > 0 && isSievedPrime(p))
.size
def search(limit: Int) =
val range = -limit to limit
val candidates = for a <- range
b <- range
yield (a, b)
val (a, b) = candidates.maxBy(primeCount.tupled)
a * b
search(999)
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/108.scala
|
<reponame>SethTisue/Project-Euler<gh_stars>1-10
package net.tisue.euler
import Primes.*
// In the following equation x, y, and n are positive integers:
// 1/x + 1/y = 1/n
// For n = 4 there are exactly three distinct solutions:
// 1/5 + 1/20 = 1/4
// 1/6 + 1/12 = 1/4
// 1/8 + 1/ 8 = 1/4
// What is the least value of n for which the number of distinct solutions exceeds one thousand?
// I wrote some brute force code to count the solutions for small n, then looked up the resulting
// sequence and found it is integer sequence A018892. The A018892 page gives an easy formula based
// on decomposing n into prime factors, so let's just go ahead and use that. Runtime is under 2
// seconds.
// A018892 is closely related to A046079 which is the "number of ways in which n can be the leg
// (other than the hypotenuse) of a primitive or nonprimitive right triangle." So another
// possibility would be to use the pythagorean triple code from problem 86.
class Problem108 extends Problem(108, "180180"):
// thank you On-Line Encyclopedia of Integer Sequences!
def A018892(n: Int) =
(factorCounts(n).map(_ * 2 + 1).product + 1) / 2
def solve =
LazyList.from(2).find(A018892(_) > 1000).get
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/98.scala
|
<reponame>SethTisue/Project-Euler
package net.tisue.euler
// Find all the square anagram word pairs (e.g. CARE 1296 = 36^2, RACE 9216 = 96^2).
// What is the largest square number formed by any member of such a pair?
class Problem98 extends Problem(98, "18769"):
val words: Iterable[String] =
io.Source.fromResource("98.txt").mkString.trim.split(",")
.map(_.drop(1).dropRight(1).mkString)
val anagrams: Iterable[List[String]] =
words.groupBy(_.toSeq.sorted.mkString).values.map(_.toList).filter(_.size > 1)
val squares = LazyList.from(1).map(n => n * n)
def squaresOfLength(n: Int) =
squares
.dropWhile(_.toString.size < n)
.takeWhile(_.toString.size == n)
// e.g. scramble(1296, "CARE", "RACE") => 9216
def scramble(n: Int, word1: String, word2: String): Option[Int] =
val substitutions = (word1 zip n.toString).toMap
// "a letter [may not] have the same digital value as another letter"
if substitutions.size == substitutions.values.toSet.size then
Some(word2.map(substitutions(_)).mkString.toInt)
else None
// slight loss of generality here: assume there are no anagram triples in the input. in the actual
// input there is one triple but the words are short so we can ignore it.
val solutions =
for case List(word1, word2) <- anagrams ++ anagrams.map(_.reverse)
squares = squaresOfLength(word1.size).toSet
s <- squares
scrambled <- scramble(s, word1, word2)
if s != scrambled && squares.contains(scrambled)
yield s
def solve = solutions.max
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/101.scala
|
package net.tisue.euler
// There are a lot of different ways you can solve this.
// Cramer's Rule is one, Pascal's triangle is another.
// Instead of anything fancy I used the method of successive differences which
// is taught to schoolchildren and which is easy to do with pencil and paper.
// The "diagonal" method returns the first numbers in the rows of successive differences.
// From those we can re-extrapolate the sequence.
// generalbaguette's Haskell solution is mostly like mine, but more concisely and elegantly
// expressed. I'm not sure whether I'm more happy that we used a similar solution plan, or more sad
// that his is better.
class Problem101 extends Problem(101, "37076114526"):
def differences(ns: Seq[BigInt]) =
ns.tail.lazyZip(ns).map(_ - _)
def diagonal(ns: Seq[BigInt]) =
Iterator.iterate(ns)(differences)
.takeWhile(_.nonEmpty)
.map(_.head)
.toList.reverse
def extrapolate(ns: Seq[BigInt]) =
def addDifferences(diffs: LazyList[BigInt], init: BigInt): LazyList[BigInt] =
init #:: addDifferences(diffs.tail, init + diffs.head)
diagonal(ns).foldLeft(LazyList(BigInt(0)).cycle)(addDifferences)
def mismatch[T](master: Seq[T], copy: Seq[T]) =
(master zip copy).find(p => p._1 != p._2).get._2
def solve =
val coefficients = List[BigInt](1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1)
val sequence = LazyList.from(1).map(n => coefficients.reduceLeft(_ * n + _))
sequence.take(coefficients.size - 1) // subtract one so there's always a mismatch
.inits
.map(extrapolate)
.map(mismatch(sequence, _))
.sum
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/54.scala
|
<reponame>SethTisue/Project-Euler
package net.tisue.euler
// The input file contains one thousand random poker hands dealt to two players.
// How many hands does player one win?
// (note: glguy's Haskell solution on the Euler forum is more elegant)
class Problem54 extends Problem(54, solution = "376"):
case class Card(rank: Int, suit: Char)
type Hand = List[Card]
// We'll often need to know what groups of same-ranked cards exist.
// We'll sort the groups in descending order by size, then by rank.
// So for example for the hand 9-9-8-8-7 (in any order), the result
// here is Seq((2, 9), (2, 8), (1, 7)).
def groups(hand: Hand): Seq[(Int, Int)] =
hand.groupBy(_.rank)
.view.mapValues(_.size)
.toList
.map(_.swap)
.sortBy(-_._2)
.sortBy(-_._1)
// sometimes we only care what size the groups are.
// for example if hand is 9-9-8-8-7, result here is Seq(2, 2, 1)
def groupSizes(hand: Hand): Seq[Int] =
groups(hand).map(_._1)
// higher-is-better return value, so:
// 0 for 1 of a kind
// 1 for 2 of a kind
// 2 for 2 pairs
// ...
def handKind(hand: Hand): Int =
def isStraightFlush =
isStraight && isFlush
def isNOfAKind(n: Int) =
groupSizes(hand).head >= n
def isFullHouse =
groupSizes(hand) == List(3, 2)
def isFlush =
hand.map(_.suit).distinct.size == 1
def isTwoPairs =
groupSizes(hand) == List(2, 2, 1)
def isStraight =
hand.map(_.rank)
.sorted
.sliding(2)
.forall{case Seq(r1, r2) : Seq[Int] @unchecked => r2 == r1 + 1}
val handFunctions =
IndexedSeq(
() => isNOfAKind(1),
() => isNOfAKind(2),
() => isTwoPairs,
() => isNOfAKind(3),
() => isStraight,
() => isFlush,
() => isFullHouse,
() => isNOfAKind(4),
() => isStraightFlush)
handFunctions.lastIndexWhere(_.apply)
def beats(hand1: Hand, hand2: Hand): Boolean =
type Score = (Int, Seq[(Int, Int)])
def score(hand: Hand): Score =
(handKind(hand), groups(hand))
// tuples get ordered fieldwise, so we can just:
import Ordering.Implicits.{given Ordering[?]}
Ordering[Score].gt(score(hand1), score(hand2))
def solve =
val input: List[(Hand, Hand)] =
def readCard(s: String) =
Card("23456789TJQKA".indexOf(s(0)), s(1))
for line <- io.Source.fromResource("54.txt").getLines.toList
cards = line.split(" ").toList.map(readCard)
yield cards.splitAt(5)
input.count{case (hand1, hand2) =>
beats(hand1, hand2)}
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/29.scala
|
<reponame>SethTisue/Project-Euler<filename>src/test/scala/net/tisue/euler/29.scala<gh_stars>1-10
package net.tisue.euler
import Primes.*
// How many distinct terms are in the sequence generated by a^b for
// 2 <= a <= 100 and 2 <= b <= 100?
class Problem29 extends Problem(29, "9183"):
def solve =
def factors(n: Int): List[Int] =
if n == 1 then Nil
else
val f = primes.find(n % _ == 0).get
f :: factors(n / f)
val terms = for a <- (2 to 100)
b <- (2 to 100)
yield factors(a).flatMap(List.fill(b)(_))
terms.toSet.size
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/69.scala
|
<reponame>SethTisue/Project-Euler
package net.tisue.euler
import Primes.*
// Euler's Totient function phi(n) is used to determine the number of numbers less than n which are
// relatively prime to n. n=6 produces a maximum n/phi(n) for n <= 10. Find the value of n <=
// 1,000,000 for which n/phi(n) is a maximum.
// I wasn't 100% sure this approach would be guaranteed to produce the right answer, but it does.
// (People on the forum seem confident the algorithm is guaranteed to work.)
class Problem69 extends Problem(69, "510510"):
def solve =
primes.scanLeft(1)(_ * _)
.takeWhile(_ <= 1000000)
.last
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/5.scala
|
<reponame>SethTisue/Project-Euler
package net.tisue.euler
// What is the smallest number that is evenly divisible by all of the numbers from 1 to 20?
class Problem5 extends Problem(5, "232792560"):
// en.wikipedia.org/wiki/Least_common_multiple#Calculating_the_least_common_multiple
def lcm(a: BigInt, b: BigInt): BigInt =
a * b / a.gcd(b)
def solve = (BigInt(2) to 20).reduce(lcm)
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/58.scala
|
package net.tisue.euler
import Primes.*
// Starting with 1 and spiralling anticlockwise in the following way, a square spiral with side
// length 7 is formed.
// 37 36 35 34 33 32 31
// 38 17 16 15 14 13 30
// 39 18 5 4 3 12 29
// 40 19 6 1 2 11 28
// 41 20 7 8 9 10 27
// 42 21 22 23 24 25 26
// 43 44 45 46 47 48 49
// It is interesting to note that the odd squares lie along the bottom right diagonal, but what is
// more interesting is that 8 out of the 13 numbers lying along both diagonals are prime. If this
// process is continued, what is the side length of the square spiral for which the ratio of primes
// along both diagonals first falls below 10%?
class Problem58 extends Problem(58, "26241"):
def next(n: Int, primeCount: Int) =
(n + 2,
primeCount + List(n * n + n + 1,
n * n + n * 2 + 2,
n * n + n * 3 + 3)
.count(isPrime))
def isSolution(n: Int, primeCount: Int) =
primeCount.toDouble / (2 * n - 1) < 0.10
def solve =
LazyList.iterate((1, 0))(next.tupled)
.tail
.find(isSolution.tupled)
.get._1
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/88.scala
|
package net.tisue.euler
import Primes.*
// A natural number, N, that can be written as the sum and product of a given set of at least two
// natural numbers, {a1, a2, ... , ak} is called a product-sum number: N = a1 + a2 + ... + ak = a1
// * a2 * ... * ak.
// For a given set of size, k, we shall call the smallest N with this property a minimal product-sum
// number. The minimal product-sum numbers for sets of size, k = 2, 3, 4, 5, and 6 are as follows.
// k=2: 4 = 2 2 = 2 + 2
// k=3: 6 = 1 2 3 = 1 + 2 + 3
// k=4: 8 = 1 1 2 4 = 1 + 1 + 2 + 4
// k=5: 8 = 1 1 2 2 2 = 1 + 1 + 2 + 2 + 2
// k=6: 12 = 1 1 1 1 2 6 = 1 + 1 + 1 + 1 + 2 + 6
// Hence for 2 <= k <= 6, the sum of all the minimal product-sum numbers is 4+6+8+12 = 30; note that 8
// is only counted once in the sum. And as the complete set of minimal product-sum numbers for
// 2 <= k <=12 is {4, 6, 8, 12, 15, 16}, the sum is 61.
// What is the sum of all the minimal product-sum numbers for 2 <= k <= 12000?
// This runs in 8.2 seconds. It's not as elegant as I'd like, but I'm just so happy
// to have something that runs so fast. We use a rolling cache, "stream", that remembers
// already-computed values, but we also drop no-longer needed entries, using
// assignment (stream = stream.dropWhile(...)).
class Problem88 extends Problem(88, "7587457"):
val divisors = LazyList.from(0).map(n => (2 to n).filter(n % _ == 0).toList)
def factorizations(n: Int) =
def helper(n: Int, ceiling: Int): List[List[Int]] =
if n == 1 then List(Nil)
else
for d <- divisors(n).takeWhile(_ <= ceiling)
f <- helper(n / d, d)
yield d :: f
helper(n, n)
def getK(factors: List[Int]) = factors.size + factors.product - factors.sum
var stream = LazyList.from(2).filter(!isPrime(_)).map(n => (n, factorizations(n)))
def solve(limit: Int) =
(for k <- (2 to limit).toList
_ = (stream = stream.dropWhile(_._1 < k))
n = stream.find(_._2.exists(fs => getK(fs) == k)).get._1
yield n).distinct.sum
def solve = solve(12000)
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/85.scala
|
<gh_stars>1-10
package net.tisue.euler
// By counting carefully it can be seen that a rectangular grid
// measuring 3 by 2 contains eighteen rectangles: Although there
// exists no rectangular grid that contains exactly two million
// rectangles, find the area of the grid with the nearest solution.
// Analytically, 1200 is an upper bound on the solution since
// a 2x1200 grid has over 2 million rectangles.
class Problem85 extends Problem(85, "2772"):
def tri(n: Int) = n * (n + 1) / 2
def rectangles(w: Int, h: Int) = tri(w) * tri(h)
val candidates =
for w <- 2 to 1200
h <- w to 1200
yield (w, h)
def closeness(w: Int, h: Int) =
math.abs(rectangles(w, h) - 2000000)
def solve =
val (w, h) = candidates.minBy(closeness.tupled)
w * h
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/33.scala
|
package net.tisue.euler
// The fraction 49/98 is a curious fraction, as an inexperienced
// mathematician in attempting to simplify it may incorrectly believe
// that 49/98 = 4/8, which is correct, is obtained by cancelling the
// 9s. We shall consider fractions like, 30/50 = 3/5, to be trivial
// examples. There are exactly four non-trivial examples of this type
// of fraction, less than one in value, and containing two digits in
// the numerator and denominator. If the product of these four
// fractions is given in its lowest common terms, find the value of
// the denominator.
// This code is excessively general; it would have been easier, I
// think, to iterate over possible individual digits rather than
// iterating over possible two-digit numbers.
class Problem33 extends Problem(33, "100"):
def solve =
val fractions =
for a <- 10 to 98
b <- (a + 1) to 99
uniqueDigits = s"$a$b".toSet
if uniqueDigits.size == 3
sharedDigit <- uniqueDigits.find(d => a.toString.contains(d) && b.toString.contains(d))
if sharedDigit != '0'
newA = a.toString.filter(_ != sharedDigit).mkString.toInt
newB = b.toString.filter(_ != sharedDigit).mkString.toInt
if a * newB == b * newA
yield (a, b)
val (a, b) = (fractions.map(_._1).product, fractions.map(_._2).product)
b / a.gcd(b)
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/51.scala
|
<gh_stars>1-10
package net.tisue.euler
import Primes.*
// By replacing the 1st digit of *57, it turns out that six of the possible values: 157, 257, 457,
// 557, 757, and 857, are all prime.
// By replacing the 3rd and 4th digits of 56**3 with the same digit, this 5-digit number is the
// first example having seven primes, yielding the family: 56003, 56113, 56333, 56443, 56663, 56773,
// and 56993. Consequently 56003, being the first member of this family, is the smallest prime with
// this property.
// Find the smallest prime which, by replacing part of the number (not necessarily adjacent digits)
// with the same digit, is part of an eight prime value family.
class Problem51 extends Problem(51, "121313"):
def solve =
def nDigitPrimes(n: Int) =
val lowerLimit = List.fill(n - 1)(10).product
primesBelow(lowerLimit * 10).dropWhile(_ < lowerLimit).toSet
def templates(n: Int): List[String] =
if n == 0 then
List("")
else
templates(n - 1).flatMap(template => "0123456789*".map(_.toString + template))
val solutions =
for numDigits <- LazyList.from(1)
primes = nDigitPrimes(numDigits)
template <- templates(numDigits)
if template(0) != '0' && template.contains('*')
yield (0 to 9).map(d => template.replaceAll("\\*", d.toString).toInt).filter(primes.contains(_))
solutions.find(_.size == 8).get.head
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/105.scala
|
package net.tisue.euler
// Identify the special sum sets in sets.txt and find the sum of their sums.
class Problem105 extends Problem(105, "73702"):
type SumSet = List[List[Int]]
def augment(ss: SumSet, x: Int) =
if ss.isEmpty then
List(List(x))
else
(ss.head :+ x) +: ss.sliding(2).collect{case List(l1, l2) => l2 ++ l1.map(_ + x)}.toList :+ List(ss.head.sum + x)
def isSpecial(ss: SumSet): Boolean =
ss.size < 2 || ss.sliding(2).forall{
case List(xs1, xs2) =>
xs1.size == xs1.distinct.size &&
xs2.size == xs2.distinct.size &&
xs1.last < xs2.head
case _ => throw IllegalStateException()
}
def solve =
io.Source.fromResource("105.txt").getLines
.map(_.split(",").map(_.toInt).sorted)
.map(_.foldLeft(Nil: SumSet)(augment))
.filter(isSpecial)
.map(_.head.sum)
.sum
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/91.scala
|
<reponame>SethTisue/Project-Euler
package net.tisue.euler
// The points P (x1, y1) and Q (x2, y2) are plotted at integer co-ordinates and are joined to the
// origin, O(0, 0), to form triangle OPQ. Given that 0 <= x1, y1, x2, y2 <= 50, how many right
// triangles can be formed?
class Problem91 extends Problem(91, "14234"):
def square(d: Double) = d * d
def distance(x1: Double, y1: Double, x2: Double, y2: Double) =
math.sqrt(square(x2 - x1) + square(y2 - y1))
def near(d1: Double, d2: Double) =
math.abs(d1 - d2) < 0.00000000001
case class Triangle(x1: Int, y1: Int, x2: Int, y2: Int)
def isSolution(t: Triangle) =
val List(side0, side1, side2) = {
import Ordering.Double.TotalOrdering
List(distance(t.x1, t.y1, t.x2, t.y2),
distance(t.x1, t.y1, 0, 0),
distance(t.x2, t.y2, 0, 0))
.sorted.reverse
}: @unchecked
near(square(side0), square(side1) + square(side2)) &&
!near(side0, side1 + side2)
def candidates =
for x1 <- 0 to 50
y1 <- 0 to 50
x2 <- x1 to 50
y2 <- 0 to 50
if x1 < x2 || y1 < y2
yield Triangle(x1, y1, x2, y2)
def solve = candidates.count(isSolution)
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/41.scala
|
<gh_stars>1-10
package net.tisue.euler
import Primes.*
// We shall say that an n-digit number is pandigital if it makes use
// of all the digits 1 to n exactly once. For example, 2143 is a
// 4-digit pandigital and is also prime.
// What is the largest n-digit pandigital prime that exists?
// It's not necessary to check 8 and 9 digit pandigitals, since
// they are all divisible by 3.
class Problem41 extends Problem(41, "7652413"):
def solve =
(1 to 7).permutations.map(_.mkString.toInt).filter(isPrime).max
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/204.scala
|
package net.tisue.euler
import Primes.*
class Problem204 extends Problem(204, "2944730"):
val ps = primes.takeWhile(_ < 100)
def count(n: Long, p: Int): Int =
if n * p > 1000000000L then
0
else
1 + ps.dropWhile(_ < p)
.map(count(n * p, _))
.sum
def solve = count(1, 1)
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/70.scala
|
<filename>src/test/scala/net/tisue/euler/70.scala
package net.tisue.euler
import Primes.*
// Find the value of n, 1 < n < 10^7, for which the totient phi(n) is a permutation of n and the ratio
// n/phi(n) produces a minimum.
// We're looking for a number that's a product of exactly two fairly large primes.
// That's the idea behind the optimizations in isSolution.
class Problem70 extends Problem(70, "8319823"):
// use Stream so we don't compute more factors than we need
def factors(n: Int): LazyList[Int] =
if isSievedPrime(n) then
LazyList(n)
else
val f = primes.find(n % _ == 0).get
f #:: factors(n / f)
// pass factors in so we avoid recomputing any we already have
def totient(n: Int, factors: Seq[Int]) =
factors.toSet.foldLeft(n.toLong)((t, f) => t * (f - 1) / f).toInt
def solve(limit: Int) =
val factorMin = math.sqrt(limit) / 10 // "fairly large"
def isSolution(n: Int) =
val fs = factors(n)
fs.head > factorMin && fs.take(3).size == 2 &&
n.digits.sorted == totient(n, fs).digits.sorted
// we redundantly recompute the factors here but it doesn't matter
// since there aren't that many solutions to test
def ratio(n: Int) = n.toDouble / totient(n, factors(n))
import Ordering.Double.TotalOrdering
(2 until limit).filter(isSolution).minBy(ratio)
def solve = solve(10000000) // takes about 17 seconds
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/10.scala
|
<reponame>SethTisue/Project-Euler<gh_stars>1-10
package net.tisue.euler
import Primes.*
// Find the sum of all the primes below two million.
// this is much too slow: primes.takeWhile(_ < 2000000)
// so we use the prime sieve.
class Problem10 extends Problem(10, "142913828922"):
def solve = primesBelow(2000000).map(BigInt(_)).sum
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/19.scala
|
<gh_stars>1-10
package net.tisue.euler
// How many Sundays fell on the first of the month during the
// twentieth century (1 Jan 1901 to 31 Dec 2000)?
class Problem19 extends Problem(19, "171"):
// The Java APIs for this are imperative so it's a bit awkward
import java.util.Calendar
def centuryBegin =
val c = Calendar.getInstance
c.setTime(java.text.SimpleDateFormat("MMM d yyyy")
.parse("Jan 1 1901"))
c
def nextMonth(cal: Calendar) =
val newCal = cal.clone.asInstanceOf[Calendar]
newCal.add(java.util.Calendar.MONTH, 1)
newCal
def solve =
LazyList.iterate(centuryBegin)(nextMonth)
.take(1200)
.count(_.get(Calendar.DAY_OF_WEEK) == Calendar.SUNDAY)
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/87.scala
|
<reponame>SethTisue/Project-Euler<gh_stars>1-10
package net.tisue.euler
import Primes.*
// How many numbers below fifty million can be expressed as the sum of a prime square, prime cube, and
// prime fourth power? (There are exactly four such numbers below fifty.)
class Problem87 extends Problem(87, "1097343"):
def solve(limit: Int) =
val powers2 = primes.map(n => n * n ).takeWhile(_ < limit)
val powers3 = primes.map(n => n * n * n ).takeWhile(_ < limit)
val powers4 = primes.map(n => n * n * n * n).takeWhile(_ < limit)
(for p4 <- powers4
p3 <- powers3
p2 <- powers2.takeWhile(_ <= limit - p4 - p3)
yield p4 + p3 + p2).toSet.size
def solve = solve(50000000)
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/72.scala
|
package net.tisue.euler
import Primes.*
// How many elements would be contained in the set of reduced proper fractions with denominator <=
// 1,000,000? (for denominator <= 8, there are 21)
// This is basically asking us for a sum of totients. See
// http://en.wikipedia.org/wiki/Euler%27s_totient_function
// (The "Other formulas involving Euler's function" section of this page has a formula for computing
// the sum of the totients using the Moebius function, but the following approach is easier to
// understand and plenty fast enough.)
// I didn't look at this until after I wrote the code below:
// http://en.wikipedia.org/wiki/Farey_sequence
// but it might be useful if there are more problems later on this theme.
class Problem72 extends Problem(72, "303963552391"):
def factors(n: Int): List[Int] =
if isSievedPrime(n) then
List(n)
else
val f = primes.find(n % _ == 0).get
f :: factors(n / f)
def totient(n: Int) =
factors(n).distinct
.foldLeft(n.toLong)((t, f) => t * (f - 1) / f)
def solve = (2 to 1000000).map(totient).sum
|
SethTisue/Project-Euler
|
src/main/scala/net/tisue/euler/BigRational.scala
|
<gh_stars>1-10
package net.tisue.euler
/// from Programming in Scala book, but converted to use BigInt
import annotation.alpha
class BigRational(n: BigInt, d: BigInt):
require(d != 0)
private val g = gcd(n.abs, d.abs)
private val Precision = 17
val numer = n / g
val denom = d / g
def this(n: BigInt) = this(n, 1)
@alpha("plus")
def +(that: BigRational): BigRational =
BigRational(
numer * that.denom + that.numer * denom,
denom * that.denom
)
@alpha("plus")
def +(i: BigInt): BigRational =
BigRational(numer + i * denom, denom)
@alpha("minus")
def -(that: BigRational): BigRational =
BigRational(
numer * that.denom - that.numer * denom,
denom * that.denom
)
@alpha("minus")
def -(i: BigInt): BigRational =
BigRational(numer - i * denom, denom)
@alpha("times")
def *(that: BigRational): BigRational =
BigRational(numer * that.numer, denom * that.denom)
@alpha("times")
def *(i: BigInt): BigRational =
BigRational(numer * i, denom)
@alpha("divide")
def /(that: BigRational): BigRational =
BigRational(numer * that.denom, denom * that.numer)
@alpha("divide")
def /(i: BigInt): BigRational =
BigRational(numer, denom * i)
def reciprocal: BigRational =
BigRational(denom, numer)
override def toString: String =
s"$numer/$denom"
def toDouble: Double =
def div(d1: BigDecimal, d2: BigDecimal) = // drop down to java.math.BigDecimal
BigDecimal(d1.bigDecimal.divide(d2.bigDecimal, Precision, java.math.RoundingMode.DOWN))
div(BigDecimal(numer), BigDecimal(denom))
.setScale(Precision).doubleValue
private def gcd(a: BigInt, b: BigInt): BigInt =
if b == BigInt(0) then a else gcd(b, a % b)
override def hashCode: Int =
(numer, denom).hashCode
override def equals(other: Any): Boolean =
other.asInstanceOf[Matchable] match // sigh: https://github.com/lampepfl/dotty/issues/10855
case that: BigRational =>
this.numer == that.numer && this.denom == that.denom ||
this.numer == 0 && that.numer == 0
case _ => false
object BigRational:
def unapply(b: BigRational): Some[(BigInt,BigInt)] =
Some((b.numer, b.denom))
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/123.scala
|
<gh_stars>1-10
package net.tisue.euler
import Primes.*
class Problem123 extends Problem(123, "21035"):
def r(n: Int) =
val p = BigInt(primes(n - 1))
((p - 1).pow(n) + (p + 1).pow(n)) % (p * p)
def solve =
def tooBig(n: Int) = r(n * 2 - 1) > 10000000000L // 10 ^ 9
val lowerBound =
LazyList.iterate(1)(_ * 2).takeWhile(!tooBig(_)).last
binarySearch(lowerBound, lowerBound * 2)(tooBig) * 2 + 1
|
SethTisue/Project-Euler
|
src/test/scala/net/tisue/euler/121.scala
|
package net.tisue.euler
class Problem121 extends Problem(121, "2269"):
// the inputs are how many discs of each color I have drawn so far
def winChance(blue: Int, red: Int): BigRational =
if blue + red == 15 then
if blue > red then BigRational(1) else BigRational(0)
else
val rounds = blue + red
val chanceIfBlue = winChance(blue + 1, red) * BigRational(1, rounds + 2)
val chanceIfRed = winChance(blue, red + 1) * BigRational(rounds + 1, rounds + 2)
chanceIfBlue + chanceIfRed
def solve = winChance(0, 0) match
case BigRational(n, d) => d / n
|
tamani-coding/scala-akka-http-server-example
|
src/main/scala/Main.scala
|
// akka specific
import akka.actor.typed.ActorSystem
import akka.actor.typed.scaladsl.Behaviors
// akka http specific
import akka.http.scaladsl.Http
import akka.http.scaladsl.model._
import akka.http.scaladsl.server.Directives.*
// spray specific (JSON marshalling)
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport.*
import spray.json.DefaultJsonProtocol.*
// cors
import ch.megard.akka.http.cors.scaladsl.CorsDirectives.*
final case class User (id: Long, name: String, email: String)
@main def userSerice: Unit =
implicit val actorSystem = ActorSystem(Behaviors.empty, "akka-http")
implicit val userMarshaller: spray.json.RootJsonFormat[User] = jsonFormat3(User.apply)
val getUser = get {
concat(
path("hello") {
complete(HttpEntity(ContentTypes.`text/plain(UTF-8)`, "Hello world from scala akka http server!"))
},
path("user" / LongNumber) {
userid => {
println("get user by id")
userid match {
case 1 => complete(User(userid, "Testuser", "<EMAIL>"))
case _ => complete(StatusCodes.NotFound)
}
}
}
)
}
val createUser = post {
path("user") {
entity(as[User]) {
user => {
println("save user")
complete(User(user.id, "Testuser", "<EMAIL>"))
}
}
}
}
val updateUser = put {
path("user") {
entity (as[User]) {
user => {
println("update user")
complete(User(user.id, "Testuser", "<EMAIL>"))
}
}
}
}
val deleteUser = delete {
path ("user" / LongNumber) {
userid => {
println(s"user ${userid}")
complete(User(userid, "Testuser", "<EMAIL>"))
}
}
}
val route = cors() {
concat(getUser, createUser, updateUser, deleteUser)
}
val bindFuture = Http().newServerAt("127.0.0.1", 8080).bind(route)
|
woggioni/sbt-delombok
|
DelombokJavadoc/src/main/scala/com/thoughtworks/sbt/DelombokJavadoc.scala
|
package com.thoughtworks.sbt
import com.thoughtworks.sbt.Delombok.autoImport._
import sbt.Keys._
import sbt._
/**
* @author 杨博 (<NAME>)
*/
object DelombokJavadoc extends AutoPlugin {
override def trigger = allRequirements
override def requires: Plugins = Delombok
override def projectSettings: Seq[Def.Setting[_]] =
Seq(Compile, Test).flatMap(
inConfig(_)(
inTask(doc)(
Seq(
sources := {
val delombokMap = delombok.value
sources.value.map { sourceFile: File =>
delombokMap.getOrElse(sourceFile, sourceFile)
}
}
)
)
)
)
}
|
woggioni/sbt-delombok
|
build.sbt
|
organization in ThisBuild := "com.thoughtworks.sbt"
val Delombok = project
val DelombokJavadoc = project.dependsOn(Delombok)
|
woggioni/sbt-delombok
|
Delombok/src/main/scala/com/thoughtworks/sbt/Delombok.scala
|
<reponame>woggioni/sbt-delombok
package com.thoughtworks.sbt
import sbt.Keys._
import sbt._
import sbt.plugins.JvmPlugin
/**
* @author 杨博 (<NAME>)
*/
object Delombok extends AutoPlugin {
import autoImport._
override def trigger = allRequirements
override def requires = JvmPlugin
override def projectSettings: Seq[Def.Setting[_]] =
Seq(Compile, Test).flatMap(
inConfig(_)(
inTask(delombok)(
Seq(
Defaults.TaskZero / delombokDirectory := target.value / "delombok",
Defaults.TaskZero / delombok := {
val classPathArgument = s"--classpath=${Path.makeString(dependencyClasspath.value.map(_.data))}"
val targetArgument = s"--target=${delombokDirectory.value.getPath}"
val sourceDirectoryArguments = sourceDirectories.value.map(_.getPath)
val sourceFileArguments = for {
sourceFile <- sources.value
if sourceFile.ext == "java" &&
sourceDirectories.value.forall(sourceFile.relativeTo(_).isEmpty)
} yield sourceFile.getPath
runner.value
.run(
"lombok.launch.Main",
dependencyClasspath.value.map(_.data),
Seq("delombok", "--nocopy", "--onlyChanged", classPathArgument, targetArgument) ++
sourceDirectoryArguments ++ sourceFileArguments,
streams.value.log
)
.get
sources.value.view.collect {
case sourceFile if sourceFile.ext == "java" =>
val relativeJavaFile = sourceDirectories.value
.collectFirst(Function.unlift(IO.relativize(_, sourceFile)))
.getOrElse(sourceFile.getName)
sourceFile -> delombokDirectory.value / relativeJavaFile
}.filter(_._2.exists()).toMap
}
)
)
)
)
object autoImport {
val delombokDirectory = settingKey[File]("Directory to save delomboked files to")
val delombok = taskKey[Map[File, File]]("Run delombok and return translated file pairs")
}
}
|
woggioni/sbt-delombok
|
DelombokJavadoc/build.sbt
|
<reponame>woggioni/sbt-delombok<filename>DelombokJavadoc/build.sbt
sbtPlugin := true
|
mscharley/drupal-deps
|
build.sbt
|
name := "drupal-deps"
scalaVersion := "2.11.8"
|
mscharley/drupal-deps
|
src/main/scala/Main.scala
|
import java.nio.file._
import scala.collection.JavaConverters._
object Main {
def main(args: Array[String]): Unit = {
val start =
if (args.length > 0) Paths.get(args(0))
else Paths.get("./")
val modules = Files
.walk(start)
.iterator()
.asScala
.filter { f =>
f.getFileName.toString.endsWith(".info")
}
.map {
DrupalModule.fromFile
}
.foldLeft(Map[String, DrupalModule[String]]()) { (m, dm) =>
m + (dm.slug -> dm)
}
val uninstalledDependencies = modules
.flatMap {
_._2.dependencies
}
.toSet
.filter { m =>
!modules.isDefinedAt(m)
}
.foldLeft(Map[String, DrupalModule[String]]()) { (m, dm) =>
m + (dm -> DrupalModule(dm, dm, dm, Vector()))
}
val graph =
modules.map {
_._2.mapDependencies(dep => (modules orElse uninstalledDependencies)(dep))
}.toSet
val projects =
modules.foldLeft(Map[String, Map[String, DrupalModule[String]]]().withDefault(_ => Map())) {
case (m: Map[String, Map[String, DrupalModule[String]]], (name, dm)) =>
m + (dm.project -> (m(dm.project) + (name -> dm)))
}
println("digraph {")
projects foreach {
case ((project, mods)) =>
println(s" subgraph cluster_${project} {")
println(s""" label="${project}"""")
println(""" graph [style="solid,filled",fillcolor="#DADADA"]""")
val deps = mods flatMap {
case ((m, dm)) =>
val slug = m.replace("\"", "\\\"")
val name = dm.name.replace("\"", "\\\"")
val project = dm.project.replace("\"", "\\\"")
val deps = dm.dependencies map { _.replace("\"", "\\\"") }
println(s""" "${slug}" [label="${name}",URL="https://drupal.org/project/${project}"]""")
deps map { d =>
val targetProject =
modules.get(d).map(_.project).getOrElse("uninstalled")
if (project == targetProject) s""" ${slug} -> "${d}""""
else s""" "cluster_${project}" -> "cluster_${targetProject}""""
}
}
println(" }")
deps.toSet foreach { println }
}
println(" subgraph cluster_uninstalled {")
println(""" label="Uninstalled"""")
println(""" graph [style="solid,filled",fillcolor="#DADADA"]""")
uninstalledDependencies foreach {
case ((u, dm)) =>
val slug = u.replace("\"", "\\\"")
println(s""" "${slug}" [URL="https://drupal.org/project/${slug}"]""")
}
println(" }")
println("}")
}
}
|
mscharley/drupal-deps
|
src/main/scala/DrupalModule.scala
|
<reponame>mscharley/drupal-deps<gh_stars>0
import java.nio.charset.Charset
import java.nio.file.{Files, Path}
import scala.collection.JavaConverters._
object DrupalModule {
private val utf8 = Charset forName "UTF-8"
def fromFile(file: Path): DrupalModule[String] = {
val lines = Files.readAllLines(file, utf8).asScala
val filename = file.getFileName.toString
val slug = filename.substring(0, filename.length - 5)
fromLines(slug, lines)
}
def fromString(slug: String, contents: String): DrupalModule[String] =
fromLines(slug, contents.lines.toSeq)
def fromLines(slug: String, lines: Seq[String]): DrupalModule[String] = {
val config = parseConfig(lines)
val name = config.getOrElse("name", Vector(slug))(0)
val project =
if (config.keys.map { _ startsWith "regions" }.foldLeft(false) { _ || _ })
"theme"
else if (slug.startsWith("e_") || slug.contains("equiem")) "equiem"
else config.getOrElse("project", Vector(slug))(0)
val dependencies = config.getOrElse("dependencies[]", Vector()) map { dep =>
dep.split(" ", 2)(0)
}
DrupalModule(slug, name, project, dependencies)
}
private def parseConfig(lines: Seq[String]): Map[String, IndexedSeq[String]] =
(filterComments andThen parseLines)(lines)
private val filterComments: (Seq[String]) => Seq[String] =
_.filter { l =>
!l.startsWith(";") && l.contains("=")
}
private val parseLines: (Seq[String]) => Map[String, IndexedSeq[String]] =
_.map { l =>
l.split("=", 2).map { x =>
x.trim()
}
}.foldLeft(Map[String, Vector[String]]().withDefault(_ => Vector())) { (m, l) =>
val v =
if (l(1).length > 2 && l(1)(0) == '\"')
l(1).substring(1, l(1).length - 1)
else l(1)
m + (l(0) -> (m(l(0)) :+ v))
}
}
case class DrupalModule[T](slug: String, name: String, project: String, dependencies: IndexedSeq[T]) {
def mapDependencies[U](f: T => U): DrupalModule[U] =
copy(dependencies = dependencies.map(f))
}
|
vaibkv/websockets-exercise-someone-askedme-in-an-interview
|
src/main/scala/Server/Utils.scala
|
package Server
import java.io.{BufferedReader, InputStreamReader, PrintStream}
import java.net.Socket
object Utils {
def getInputStream(socket: Socket): BufferedReader = {
new BufferedReader(new InputStreamReader(socket.getInputStream))
}
def getOutputStream(socket: Socket): PrintStream = {
new PrintStream(socket.getOutputStream)
}
}
|
vaibkv/websockets-exercise-someone-askedme-in-an-interview
|
src/main/scala/Server/UserWorker.scala
|
<filename>src/main/scala/Server/UserWorker.scala
package Server
import java.net.Socket
import scala.concurrent.Future
import Utils._
import InitServer._
import scala.concurrent.ExecutionContext.Implicits.global
case class UserWorker(socket: Socket) {
var userName = ""
var isLoggedIn = false
//list - her maitaining groups you are on
Future {
handleConnection(socket)
}
private def handleConnection(socket: Socket): Unit = {
var msg = ""
val iStream = getInputStream(socket)
val oStream = getOutputStream(socket)
while({msg = iStream.readLine(); msg} != ":quit") {
val tokens = msg.split("\\s+")
val cmd = tokens.head
cmd match {
case ":login" => handleLogin(tokens)
case ":msg" => handleDirectMessages(tokens, msg)
case ":broadcast" => handleBroadcast(tokens, msg)
case wrongCommand => oStream.println(s"Hey! that's not even a command: $wrongCommand")
}
}
}
private def handleBroadcast(tokens: Array[String], msg: String): Unit = {
if (tokens != null && tokens.length >= 1) {
val actualMsg = msg.split("\\s+", 2).last
userList.filter(kvPair => !kvPair._1.equalsIgnoreCase(this.userName)).foreach(kv => {
getOutputStream(kv._2).println(s"Message from $userName: $actualMsg")
})
}
}
private def handleLogin(tokens: Array[String]): Unit = {
if (tokens != null && tokens.length >= 2) {
val username = tokens(1)
this.userName = username
this.isLoggedIn = true
//update central list
InitServer.userList.update(username, socket)
} else {
throw new Exception("you don't know how to login!")
}
}
private def handleDirectMessages(tokens: Array[String], msg: String): Unit = {
if(tokens != null && tokens.length >= 2) {
val actualMsg = msg.split("\\s+", 3).last
val sendTo = tokens(1)
val optTargetSocket = InitServer.userList.get(sendTo)
if(optTargetSocket.isDefined) {
val targetOutputStream = getOutputStream(optTargetSocket.get)
targetOutputStream.println(s"$actualMsg : msg from ${this.userName}")
}
}
}
}
//https://serverip:port/login?params=user&pad=
//ws starts out as http -- upgrade
//wss starts out as https -- upgrade
|
vaibkv/websockets-exercise-someone-askedme-in-an-interview
|
build.sbt
|
<filename>build.sbt
name := "segment-exercise"
version := "0.1"
scalaVersion := "2.13.6"
|
vaibkv/websockets-exercise-someone-askedme-in-an-interview
|
src/main/scala/Server/InitServer.scala
|
package Server
import java.io.InputStreamReader
import java.net.{ServerSocket, Socket}
import Utils._
import java.util.concurrent.ConcurrentHashMap
import scala.collection.JavaConverters.mapAsScalaConcurrentMapConverter
import scala.collection.mutable.ListBuffer
object InitServer extends App {
//initial list
val port = 6000
val userList = new ConcurrentHashMap[String, Socket]().asScala //new mutable.HashSet[Int] with mutable.SynchronizedSet[Int]new ListBuffer[UserWorker]()
val serverSocket = new ServerSocket(port)
println("Started to listen")
while(true) {
val clientSocket = serverSocket.accept() //blocking call, server is listening for messages
val userWorker = UserWorker(clientSocket)
}
serverSocket.close()
}
|
tomis9/cookbook
|
scratchpad/scala/fileWriter.scala
|
import java.io._
object fileWriter {
def main(args: Array[String]) {
val pw = new PrintWriter(new File("hello.txt" ))
val to_write = "some data to write"
pw.write(to_write)
pw.close
}
}
|
tomis9/cookbook
|
scratchpad/scala/db.scala
|
import java.sql.{Connection,DriverManager}
object ScalaJdbcConnectSelect extends App {
def main(args: Array[String]) {
val url = "jdbc:mysql://localhost:8889/mysql"
val driver = "com.mysql.jdbc.Driver"
val username = "dyrkat"
val password = "<PASSWORD>!"
var connection:Connection = _
try {
Class.forName(driver)
connection = DriverManager.getConnection(url, username, password)
val statement = connection.createStatement
val rs = statement.executeQuery("SELECT host, user FROM user")
while (rs.next) {
val host = rs.getString("host")
val user = rs.getString("user")
println("host = %s, user = %s".format(host,user))
}
} catch {
case e: Exception => e.printStackTrace
}
connection.close
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.