_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
6b5c921a793e8ef71720ec14f184b899ac5df9270d052e30fcccda75ea794660 | callum-oakley/advent-of-code | 01.clj | (ns aoc.2016.01
(:require
[aoc.vector :refer [+v *v manhattan-distance]]
[clojure.test :refer [deftest is]]))
(defn parse [s]
(map (fn [[_ turn steps]] [(symbol turn) (read-string steps)])
(re-seq #"(R|L)(\d+)" s)))
(defn turn [state t]
(case t
L (update state :dir (fn [[x y]] [(- y) x]))
R (update state :dir (fn [[x y]] [y (- x)]))))
(defn walk [state steps]
(update state :path into (map #(+v (-> state :path peek) (*v % (:dir state)))
(range 1 (inc steps)))))
(defn path [instructions]
(:path (reduce (fn [state [t steps]] (-> state (turn t) (walk steps)))
{:dir [0 1] :path [[0 0]]}
instructions)))
(defn part-1 [instructions]
(manhattan-distance (peek (path instructions))))
(defn part-2 [instructions]
(manhattan-distance (reduce (fn [seen pos]
(if (seen pos)
(reduced pos)
(conj seen pos)))
#{}
(path instructions))))
(deftest test-part-1*
(is (= 5 (part-1 (parse "R2, L3"))))
(is (= 2 (part-1 (parse "R2, R2, R2"))))
(is (= 12 (part-1 (parse "R5, L5, R5, R3")))))
(deftest test-part-2*
(is (= 4 (part-2 (parse "R8, R4, R4, R8")))))
| null | https://raw.githubusercontent.com/callum-oakley/advent-of-code/da5233fc0fd3d3773d35ee747fd837c59c2b1c04/src/aoc/2016/01.clj | clojure | (ns aoc.2016.01
(:require
[aoc.vector :refer [+v *v manhattan-distance]]
[clojure.test :refer [deftest is]]))
(defn parse [s]
(map (fn [[_ turn steps]] [(symbol turn) (read-string steps)])
(re-seq #"(R|L)(\d+)" s)))
(defn turn [state t]
(case t
L (update state :dir (fn [[x y]] [(- y) x]))
R (update state :dir (fn [[x y]] [y (- x)]))))
(defn walk [state steps]
(update state :path into (map #(+v (-> state :path peek) (*v % (:dir state)))
(range 1 (inc steps)))))
(defn path [instructions]
(:path (reduce (fn [state [t steps]] (-> state (turn t) (walk steps)))
{:dir [0 1] :path [[0 0]]}
instructions)))
(defn part-1 [instructions]
(manhattan-distance (peek (path instructions))))
(defn part-2 [instructions]
(manhattan-distance (reduce (fn [seen pos]
(if (seen pos)
(reduced pos)
(conj seen pos)))
#{}
(path instructions))))
(deftest test-part-1*
(is (= 5 (part-1 (parse "R2, L3"))))
(is (= 2 (part-1 (parse "R2, R2, R2"))))
(is (= 12 (part-1 (parse "R5, L5, R5, R3")))))
(deftest test-part-2*
(is (= 4 (part-2 (parse "R8, R4, R4, R8")))))
| |
3a4958973628941bc0c2d49dfbdfc06c1e1cc056f80e03756fd9f62ac49ecd5e | danieljharvey/mimsa | Repl.hs | # LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS -Wno-orphans #-}
module Calc.Repl
( repl,
)
where
import qualified Calc.Compile.RunLLVM as Run
import Calc.Compile.ToLLVM
import Calc.Parser
import Calc.Parser.Types
import Control.Monad.IO.Class
import Data.Text (Text)
import qualified Data.Text as T
import Data.Void
import qualified Error.Diagnose as Diag
import Error.Diagnose.Compat.Megaparsec
import System.Console.Haskeline
instance HasHints Void msg where
hints _ = mempty
repl :: IO ()
repl = do
putStrLn "Welcome to llvm-calc"
putStrLn "Exit with :quit"
runInputT defaultSettings loop
where
loop :: InputT IO ()
loop = do
minput <- getInputLine ":> "
case minput of
Nothing -> return ()
Just ":quit" -> return ()
Just input -> do
case parseExpr (T.pack input) of
Left bundle -> do
Diag.printDiagnostic
Diag.stderr
True
True
4
Diag.defaultStyle
(fromErrorBundle bundle input)
loop
Right expr -> do
resp <- liftIO $ fmap Run.rrResult (Run.run (toLLVM expr))
liftIO $ putStrLn (T.unpack resp)
loop
| turn error + input into a Diagnostic
fromErrorBundle :: ParseErrorType -> String -> Diag.Diagnostic Text
fromErrorBundle bundle input =
let diag =
errorDiagnosticFromBundle
Nothing
"Parse error on input"
Nothing
bundle
in Diag.addFile diag replFilename input
| null | https://raw.githubusercontent.com/danieljharvey/mimsa/296ab9bcbdbaf682fa76921ce3c80d4bbafb52ae/llvm-calc/src/Calc/Repl.hs | haskell | # LANGUAGE OverloadedStrings #
# OPTIONS -Wno-orphans # | # LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
module Calc.Repl
( repl,
)
where
import qualified Calc.Compile.RunLLVM as Run
import Calc.Compile.ToLLVM
import Calc.Parser
import Calc.Parser.Types
import Control.Monad.IO.Class
import Data.Text (Text)
import qualified Data.Text as T
import Data.Void
import qualified Error.Diagnose as Diag
import Error.Diagnose.Compat.Megaparsec
import System.Console.Haskeline
instance HasHints Void msg where
hints _ = mempty
repl :: IO ()
repl = do
putStrLn "Welcome to llvm-calc"
putStrLn "Exit with :quit"
runInputT defaultSettings loop
where
loop :: InputT IO ()
loop = do
minput <- getInputLine ":> "
case minput of
Nothing -> return ()
Just ":quit" -> return ()
Just input -> do
case parseExpr (T.pack input) of
Left bundle -> do
Diag.printDiagnostic
Diag.stderr
True
True
4
Diag.defaultStyle
(fromErrorBundle bundle input)
loop
Right expr -> do
resp <- liftIO $ fmap Run.rrResult (Run.run (toLLVM expr))
liftIO $ putStrLn (T.unpack resp)
loop
| turn error + input into a Diagnostic
fromErrorBundle :: ParseErrorType -> String -> Diag.Diagnostic Text
fromErrorBundle bundle input =
let diag =
errorDiagnosticFromBundle
Nothing
"Parse error on input"
Nothing
bundle
in Diag.addFile diag replFilename input
|
115a0972420ca9fc7fcfcd593c10fd245f2f0632afb04e4ab47ec2a79cc8461c | UU-ComputerScience/uhc | DerivingRead1.hs | {- ----------------------------------------------------------------------------------------
what : deriving, for Read (and Show), on some datatype
expected: ok
---------------------------------------------------------------------------------------- -}
module DerivingRead1 where
data T a = S | L Int | T a a | a :^: a
deriving (Show,Read)
main :: IO ()
main
= do putStrLn (show (read "S" :: T Int))
putStrLn (show (read "L 5" :: T Int))
putStrLn (show (read "T 2 3" :: T Int))
putStrLn (show (read "5 :^: 4" :: T Int))
| null | https://raw.githubusercontent.com/UU-ComputerScience/uhc/f2b94a90d26e2093d84044b3832a9a3e3c36b129/EHC/test/regress/99/DerivingRead1.hs | haskell | ----------------------------------------------------------------------------------------
what : deriving, for Read (and Show), on some datatype
expected: ok
---------------------------------------------------------------------------------------- |
module DerivingRead1 where
data T a = S | L Int | T a a | a :^: a
deriving (Show,Read)
main :: IO ()
main
= do putStrLn (show (read "S" :: T Int))
putStrLn (show (read "L 5" :: T Int))
putStrLn (show (read "T 2 3" :: T Int))
putStrLn (show (read "5 :^: 4" :: T Int))
|
c6f671c3fdaf5e7b47f7d5391eb83172685cdcc5a54b69dc9c95657dcee960b7 | paulrzcz/hquantlib | Models.hs | module QuantLib.Models
( module QuantLib.Models.Volatility
) where
import QuantLib.Models.Volatility | null | https://raw.githubusercontent.com/paulrzcz/hquantlib/7689b93fa9724b44755148b104ec6c6916d241c3/src/QuantLib/Models.hs | haskell | module QuantLib.Models
( module QuantLib.Models.Volatility
) where
import QuantLib.Models.Volatility | |
f2c6abaab7334c2df73121d7f327de537e277033aa6b41930c838ec641e81205 | arrdem/katamari | extensions.clj | Copyright ( c ) . All rights reserved .
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (-1.0.php)
; which can be found in the file epl-v10.html at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
(ns clojure.tools.deps.alpha.extensions
(:require [me.raynes.fs :as fs]))
;; Helper for autodetect of manifest type
;; vector to control ordering
(def manifest-types
["deps.edn" :deps,
"pom.xml" :pom
" project.clj " :
])
(defn detect-manifest
"Given a directory, detect the manifest type and return the manifest info."
[dir]
(loop [[file-name manifest-type & others] manifest-types]
(when file-name
(let [f (fs/file dir file-name)]
(if (and (.exists f) (.isFile f))
{:deps/manifest manifest-type, :deps/root dir}
(recur others))))))
;; Methods switching on coordinate type
(defn coord-type
"The namespace (as a keyword) of the only qualified key in the coordinate,
excluding the reserved deps namespace."
[coord]
(when (map? coord)
(->> coord keys (keep namespace) (remove #(= "deps" %)) first keyword)))
(defmulti lib-location
"Takes a coordinate and returns the location where the lib would be
installed locally. Location keys:
:base local repo base directory path
:path path within base dir
:type coordinate type"
(fn [lib coord config] (coord-type coord)))
(defmulti canonicalize
"Takes a lib and coordinate and returns a canonical form.
For example, a Maven coordinate might resolve LATEST to a version or a Git
coordinate might resolve a partial sha to a full sha. Returns [lib coord]."
(fn [lib coord config] (coord-type coord)))
(defmethod canonicalize :default [lib coord config]
[lib coord])
(defmulti dep-id
"Returns an identifier value that can be used to detect a lib/coord cycle while
expanding deps. This will only be called after canonicalization so it can rely
on the canonical form."
(fn [lib coord config] (coord-type coord)))
(defn- throw-bad-coord
[lib coord]
(throw (ex-info (str "Coordinate type " (coord-type coord) " not loaded for library " lib " in coordinate " (pr-str coord))
{:lib lib :coord coord})))
(defmethod dep-id :default [lib coord config]
(throw-bad-coord lib coord))
(defmulti manifest-type
"Takes a lib, a coord, and the root config. Dispatch based on the
coordinate type. Detect and return the manifest type and location
for this coordinate."
(fn [lib coord config] (coord-type coord)))
(defmethod manifest-type :default [lib coord config]
(throw-bad-coord lib coord))
(defmulti coord-summary
"Takes a coord, and returns a concise description, used when printing tree"
(fn [lib coord] (coord-type coord)))
(defmethod coord-summary :default [lib coord]
(str lib " " (coord-type coord)))
;; Version comparison, either within or across coordinate types
(defmulti compare-versions
"Given two coordinates, use this as a comparator returning a negative number, zero,
or positive number when coord-x is logically 'less than', 'equal to', or 'greater than'
coord-y. The dispatch occurs on the type of x and y."
(fn [lib coord-x coord-y config] [(coord-type coord-x) (coord-type coord-y)]))
(defmethod compare-versions :default
[lib coord-x coord-y config]
(throw (ex-info (str "Unable to compare versions for " lib ": " (pr-str coord-x) " and " (pr-str coord-y))
{:lib lib :coord-x coord-x :coord-y coord-y})))
;; Methods switching on manifest type
(defn- throw-bad-manifest
[lib coord manifest-type]
(if manifest-type
(throw (ex-info (str "Manifest type " manifest-type " not loaded when finding deps for " lib " in coordinate " (pr-str coord))
{:lib lib :coord coord}))
(throw (ex-info (str "Manifest type not detected when finding deps for " lib " in coordinate " (pr-str coord))
{:lib lib :coord coord}))))
(defmulti coord-deps
"Return coll of immediate [lib coord] external deps for this library."
(fn [lib coord manifest-type config] manifest-type))
(defmethod coord-deps :default [lib coord manifest-type config]
(throw-bad-manifest lib coord manifest-type))
(defmulti coord-paths
"Return coll of classpath roots for this library on disk."
(fn [lib coord manifest-type config] manifest-type))
(defmethod coord-paths :default [lib coord manifest-type config]
(throw-bad-manifest lib coord manifest-type))
| null | https://raw.githubusercontent.com/arrdem/katamari/55e2da2c37c02774a1332e410ceebee0a0742d27/src/clojure-tools-deps/src/clojure/tools/deps/alpha/extensions.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
Helper for autodetect of manifest type
vector to control ordering
Methods switching on coordinate type
Version comparison, either within or across coordinate types
Methods switching on manifest type | Copyright ( c ) . All rights reserved .
(ns clojure.tools.deps.alpha.extensions
(:require [me.raynes.fs :as fs]))
(def manifest-types
["deps.edn" :deps,
"pom.xml" :pom
" project.clj " :
])
(defn detect-manifest
"Given a directory, detect the manifest type and return the manifest info."
[dir]
(loop [[file-name manifest-type & others] manifest-types]
(when file-name
(let [f (fs/file dir file-name)]
(if (and (.exists f) (.isFile f))
{:deps/manifest manifest-type, :deps/root dir}
(recur others))))))
(defn coord-type
"The namespace (as a keyword) of the only qualified key in the coordinate,
excluding the reserved deps namespace."
[coord]
(when (map? coord)
(->> coord keys (keep namespace) (remove #(= "deps" %)) first keyword)))
(defmulti lib-location
"Takes a coordinate and returns the location where the lib would be
installed locally. Location keys:
:base local repo base directory path
:path path within base dir
:type coordinate type"
(fn [lib coord config] (coord-type coord)))
(defmulti canonicalize
"Takes a lib and coordinate and returns a canonical form.
For example, a Maven coordinate might resolve LATEST to a version or a Git
coordinate might resolve a partial sha to a full sha. Returns [lib coord]."
(fn [lib coord config] (coord-type coord)))
(defmethod canonicalize :default [lib coord config]
[lib coord])
(defmulti dep-id
"Returns an identifier value that can be used to detect a lib/coord cycle while
expanding deps. This will only be called after canonicalization so it can rely
on the canonical form."
(fn [lib coord config] (coord-type coord)))
(defn- throw-bad-coord
[lib coord]
(throw (ex-info (str "Coordinate type " (coord-type coord) " not loaded for library " lib " in coordinate " (pr-str coord))
{:lib lib :coord coord})))
(defmethod dep-id :default [lib coord config]
(throw-bad-coord lib coord))
(defmulti manifest-type
"Takes a lib, a coord, and the root config. Dispatch based on the
coordinate type. Detect and return the manifest type and location
for this coordinate."
(fn [lib coord config] (coord-type coord)))
(defmethod manifest-type :default [lib coord config]
(throw-bad-coord lib coord))
(defmulti coord-summary
"Takes a coord, and returns a concise description, used when printing tree"
(fn [lib coord] (coord-type coord)))
(defmethod coord-summary :default [lib coord]
(str lib " " (coord-type coord)))
(defmulti compare-versions
"Given two coordinates, use this as a comparator returning a negative number, zero,
or positive number when coord-x is logically 'less than', 'equal to', or 'greater than'
coord-y. The dispatch occurs on the type of x and y."
(fn [lib coord-x coord-y config] [(coord-type coord-x) (coord-type coord-y)]))
(defmethod compare-versions :default
[lib coord-x coord-y config]
(throw (ex-info (str "Unable to compare versions for " lib ": " (pr-str coord-x) " and " (pr-str coord-y))
{:lib lib :coord-x coord-x :coord-y coord-y})))
(defn- throw-bad-manifest
[lib coord manifest-type]
(if manifest-type
(throw (ex-info (str "Manifest type " manifest-type " not loaded when finding deps for " lib " in coordinate " (pr-str coord))
{:lib lib :coord coord}))
(throw (ex-info (str "Manifest type not detected when finding deps for " lib " in coordinate " (pr-str coord))
{:lib lib :coord coord}))))
(defmulti coord-deps
"Return coll of immediate [lib coord] external deps for this library."
(fn [lib coord manifest-type config] manifest-type))
(defmethod coord-deps :default [lib coord manifest-type config]
(throw-bad-manifest lib coord manifest-type))
(defmulti coord-paths
"Return coll of classpath roots for this library on disk."
(fn [lib coord manifest-type config] manifest-type))
(defmethod coord-paths :default [lib coord manifest-type config]
(throw-bad-manifest lib coord manifest-type))
|
ce212a29850b37ccf00674cc5c3bbc4cd3595709dd87363cdebb29214c8c28e1 | input-output-hk/ouroboros-network | Common.hs | {-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DerivingStrategies #-}
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE TypeFamilies #
| Various things common to iterations of the Praos protocol .
module Ouroboros.Consensus.Protocol.Praos.Common (
MaxMajorProtVer (..)
, PraosCanBeLeader (..)
, PraosChainSelectView (..)
-- * node support
, PraosNonces (..)
, PraosProtocolSupportsNode (..)
) where
import qualified Cardano.Crypto.VRF as VRF
import Cardano.Ledger.BaseTypes (Nonce, Version)
import Cardano.Ledger.Crypto (Crypto, VRF)
import Cardano.Ledger.Keys (KeyHash, KeyRole (BlockIssuer))
import qualified Cardano.Ledger.Shelley.API as SL
import qualified Cardano.Protocol.TPraos.OCert as OCert
import Cardano.Slotting.Block (BlockNo)
import Cardano.Slotting.Slot (SlotNo)
import Data.Function (on)
import Data.Map.Strict (Map)
import Data.Ord (Down (Down))
import Data.Word (Word64)
import GHC.Generics (Generic)
import NoThunks.Class (NoThunks)
import Ouroboros.Consensus.Protocol.Abstract
-- | The maximum major protocol version.
--
-- Must be at least the current major protocol version. For Cardano mainnet, the
era has major protocol verison _ _ 2 _ _ .
newtype MaxMajorProtVer = MaxMajorProtVer
{ getMaxMajorProtVer :: Version
}
deriving (Eq, Show, Generic)
deriving newtype NoThunks
-- | View of the ledger tip for chain selection.
--
-- We order between chains as follows:
--
1 . By chain length , with longer chains always preferred .
2 . If the tip of each chain was issued by the same agent , then we prefer
-- the chain whose tip has the highest ocert issue number.
3 . By a VRF value from the chain tip , with lower values preferred . See
-- @pTieBreakVRFValue@ for which one is used.
data PraosChainSelectView c = PraosChainSelectView
{ csvChainLength :: BlockNo,
csvSlotNo :: SlotNo,
csvIssuer :: SL.VKey 'SL.BlockIssuer c,
csvIssueNo :: Word64,
csvTieBreakVRF :: VRF.OutputVRF (VRF c)
}
deriving (Show, Eq, Generic, NoThunks)
instance Crypto c => Ord (PraosChainSelectView c) where
compare =
mconcat
[ compare `on` csvChainLength,
whenSame csvIssuer (compare `on` csvIssueNo),
compare `on` Down . csvTieBreakVRF
]
where
When the @a@s are equal , use the given comparison function ,
-- otherwise, no preference.
whenSame ::
Eq a =>
(view -> a) ->
(view -> view -> Ordering) ->
(view -> view -> Ordering)
whenSame f comp v1 v2
| f v1 == f v2 =
comp v1 v2
| otherwise =
EQ
data PraosCanBeLeader c = PraosCanBeLeader
{ -- | Certificate delegating rights from the stake pool cold key (or
-- genesis stakeholder delegate cold key) to the online KES key.
praosCanBeLeaderOpCert :: !(OCert.OCert c),
-- | Stake pool cold key or genesis stakeholder delegate cold key.
praosCanBeLeaderColdVerKey :: !(SL.VKey 'SL.BlockIssuer c),
praosCanBeLeaderSignKeyVRF :: !(SL.SignKeyVRF c)
}
deriving (Generic)
instance Crypto c => NoThunks (PraosCanBeLeader c)
-- | See 'PraosProtocolSupportsNode'
data PraosNonces = PraosNonces {
candidateNonce :: !Nonce
, epochNonce :: !Nonce
, evolvingNonce :: !Nonce
-- | Nonce constructed from the hash of the Last Applied Block
, labNonce :: !Nonce
-- | Nonce corresponding to the LAB nonce of the last block of the previous
-- epoch
, previousLabNonce :: !Nonce
}
-- | The node has Praos-aware code that inspects nonces in order to support
-- some Cardano API queries that are crucial to the user exprience
--
-- The interface being used for that has grown and needs review, but we're
-- adding to it here under time pressure. See
< -output-hk/cardano-node/issues/3864 >
class ConsensusProtocol p => PraosProtocolSupportsNode p where
type PraosProtocolSupportsNodeCrypto p
getPraosNonces :: proxy p -> ChainDepState p -> PraosNonces
getOpCertCounters :: proxy p -> ChainDepState p -> Map (KeyHash 'BlockIssuer (PraosProtocolSupportsNodeCrypto p)) Word64
| null | https://raw.githubusercontent.com/input-output-hk/ouroboros-network/17889be3e1b6d9b5ee86022b91729837051e6fbb/ouroboros-consensus-protocol/src/Ouroboros/Consensus/Protocol/Praos/Common.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE DeriveAnyClass #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
* node support
| The maximum major protocol version.
Must be at least the current major protocol version. For Cardano mainnet, the
| View of the ledger tip for chain selection.
We order between chains as follows:
the chain whose tip has the highest ocert issue number.
@pTieBreakVRFValue@ for which one is used.
otherwise, no preference.
| Certificate delegating rights from the stake pool cold key (or
genesis stakeholder delegate cold key) to the online KES key.
| Stake pool cold key or genesis stakeholder delegate cold key.
| See 'PraosProtocolSupportsNode'
| Nonce constructed from the hash of the Last Applied Block
| Nonce corresponding to the LAB nonce of the last block of the previous
epoch
| The node has Praos-aware code that inspects nonces in order to support
some Cardano API queries that are crucial to the user exprience
The interface being used for that has grown and needs review, but we're
adding to it here under time pressure. See | # LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE TypeFamilies #
| Various things common to iterations of the Praos protocol .
module Ouroboros.Consensus.Protocol.Praos.Common (
MaxMajorProtVer (..)
, PraosCanBeLeader (..)
, PraosChainSelectView (..)
, PraosNonces (..)
, PraosProtocolSupportsNode (..)
) where
import qualified Cardano.Crypto.VRF as VRF
import Cardano.Ledger.BaseTypes (Nonce, Version)
import Cardano.Ledger.Crypto (Crypto, VRF)
import Cardano.Ledger.Keys (KeyHash, KeyRole (BlockIssuer))
import qualified Cardano.Ledger.Shelley.API as SL
import qualified Cardano.Protocol.TPraos.OCert as OCert
import Cardano.Slotting.Block (BlockNo)
import Cardano.Slotting.Slot (SlotNo)
import Data.Function (on)
import Data.Map.Strict (Map)
import Data.Ord (Down (Down))
import Data.Word (Word64)
import GHC.Generics (Generic)
import NoThunks.Class (NoThunks)
import Ouroboros.Consensus.Protocol.Abstract
era has major protocol verison _ _ 2 _ _ .
newtype MaxMajorProtVer = MaxMajorProtVer
{ getMaxMajorProtVer :: Version
}
deriving (Eq, Show, Generic)
deriving newtype NoThunks
1 . By chain length , with longer chains always preferred .
2 . If the tip of each chain was issued by the same agent , then we prefer
3 . By a VRF value from the chain tip , with lower values preferred . See
data PraosChainSelectView c = PraosChainSelectView
{ csvChainLength :: BlockNo,
csvSlotNo :: SlotNo,
csvIssuer :: SL.VKey 'SL.BlockIssuer c,
csvIssueNo :: Word64,
csvTieBreakVRF :: VRF.OutputVRF (VRF c)
}
deriving (Show, Eq, Generic, NoThunks)
instance Crypto c => Ord (PraosChainSelectView c) where
compare =
mconcat
[ compare `on` csvChainLength,
whenSame csvIssuer (compare `on` csvIssueNo),
compare `on` Down . csvTieBreakVRF
]
where
When the @a@s are equal , use the given comparison function ,
whenSame ::
Eq a =>
(view -> a) ->
(view -> view -> Ordering) ->
(view -> view -> Ordering)
whenSame f comp v1 v2
| f v1 == f v2 =
comp v1 v2
| otherwise =
EQ
data PraosCanBeLeader c = PraosCanBeLeader
praosCanBeLeaderOpCert :: !(OCert.OCert c),
praosCanBeLeaderColdVerKey :: !(SL.VKey 'SL.BlockIssuer c),
praosCanBeLeaderSignKeyVRF :: !(SL.SignKeyVRF c)
}
deriving (Generic)
instance Crypto c => NoThunks (PraosCanBeLeader c)
data PraosNonces = PraosNonces {
candidateNonce :: !Nonce
, epochNonce :: !Nonce
, evolvingNonce :: !Nonce
, labNonce :: !Nonce
, previousLabNonce :: !Nonce
}
< -output-hk/cardano-node/issues/3864 >
class ConsensusProtocol p => PraosProtocolSupportsNode p where
type PraosProtocolSupportsNodeCrypto p
getPraosNonces :: proxy p -> ChainDepState p -> PraosNonces
getOpCertCounters :: proxy p -> ChainDepState p -> Map (KeyHash 'BlockIssuer (PraosProtocolSupportsNodeCrypto p)) Word64
|
0027430f0809f58fd65740ffa97447792344a4a887d8a26dc16dd744a76fc12c | al3623/rippl | ast.mli | type expr =
| IntLit of int | FloatLit of float | BoolLit of bool
| CharLit of char | WildCard
| Add | Sub | Mult | Div | Mod | Pow
| AddF | SubF | MultF | DivF | PowF | Neg | NegF
| Eq | EqF | Neq | NeqF | Geq | GeqF | Leq | LeqF
| Less | LessF | Greater | GreaterF
| And | Or | Not
| Cons | Cat | Len | Head | Tail
(* Tuple operations *)
| First | Sec
| Tuple of (expr * expr)
(* Maybe operations *)
| Is_none | From_just | Just of expr | None
| Int_to_float
| Var of string
| Let of (assign * expr)
| Lambda of (string * expr)
| App of (expr * expr)
| Ite of (expr * expr * expr)
| ListComp of (expr * clause list)
| ListRange of (expr * expr)
| ListLit of expr list
and clause =
| ListVBind of (string * expr)
| Filter of expr
and assign = Assign of (string * expr)
type decl =
| Annot of (string * ty)
| Vdef of (string * expr)
and ty = Int | Bool | Float | Char
| Tvar of string
| Tarrow of (ty * ty)
| TconList of ty
| TconTuple of (ty * ty)
| Tforall of ((string list) * ty)
| Tmaybe of ty
type program = decl list
type lambda_def = {
lname: string;
ltyp: ty;
rtyp: ty;
lexp: expr;
rexp: expr;
}
| null | https://raw.githubusercontent.com/al3623/rippl/a7e5c24f67935b3513324148279791042856a1fa/src/ast.mli | ocaml | Tuple operations
Maybe operations | type expr =
| IntLit of int | FloatLit of float | BoolLit of bool
| CharLit of char | WildCard
| Add | Sub | Mult | Div | Mod | Pow
| AddF | SubF | MultF | DivF | PowF | Neg | NegF
| Eq | EqF | Neq | NeqF | Geq | GeqF | Leq | LeqF
| Less | LessF | Greater | GreaterF
| And | Or | Not
| Cons | Cat | Len | Head | Tail
| First | Sec
| Tuple of (expr * expr)
| Is_none | From_just | Just of expr | None
| Int_to_float
| Var of string
| Let of (assign * expr)
| Lambda of (string * expr)
| App of (expr * expr)
| Ite of (expr * expr * expr)
| ListComp of (expr * clause list)
| ListRange of (expr * expr)
| ListLit of expr list
and clause =
| ListVBind of (string * expr)
| Filter of expr
and assign = Assign of (string * expr)
type decl =
| Annot of (string * ty)
| Vdef of (string * expr)
and ty = Int | Bool | Float | Char
| Tvar of string
| Tarrow of (ty * ty)
| TconList of ty
| TconTuple of (ty * ty)
| Tforall of ((string list) * ty)
| Tmaybe of ty
type program = decl list
type lambda_def = {
lname: string;
ltyp: ty;
rtyp: ty;
lexp: expr;
rexp: expr;
}
|
3e2ac6ef391066cf9412955260c6884fdd4277ef7bc3256ea3267bbda14be2ef | input-output-hk/marlowe-cardano | FSSemantics.hs | {-# OPTIONS_GHC -Wno-name-shadowing #-}
# OPTIONS_GHC -Wno - incomplete - uni - patterns #
# OPTIONS_GHC -Wno - incomplete - patterns #
{-# OPTIONS_GHC -Wno-unused-matches #-}
{-# OPTIONS_GHC -Wno-unused-imports #-}
module Language.Marlowe.Analysis.FSSemantics
where
import Data.List (foldl', genericIndex)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe (isNothing)
import Data.SBV
import qualified Data.SBV.Either as SE
import Data.SBV.Internals (SMTModel(..))
import qualified Data.SBV.List as SL
import qualified Data.SBV.Maybe as SM
import qualified Data.SBV.Tuple as ST
import Data.Set (Set)
import qualified Data.Set as S
import Language.Marlowe.Core.V1.Semantics
import Language.Marlowe.Core.V1.Semantics.Types
import Plutus.V2.Ledger.Api (POSIXTime(POSIXTime, getPOSIXTime))
import qualified PlutusTx.AssocMap as AssocMap
import qualified PlutusTx.Prelude as P
import qualified PlutusTx.Ratio as P
---------------------------------------------------
-- Static analysis logic and symbolic operations --
---------------------------------------------------
-- Symbolic version of Input (with symbolic value but concrete identifiers)
data SymInput = SymDeposit AccountId Party Token SInteger
| SymChoice ChoiceId SInteger
| SymNotify
Symbolic version of State :
-- We keep as much things concrete as possible.
In addition to normal State information we also store symbolic values that
-- represent the symbolic trace we are evaluating (the way we got to the current
-- part of the contract).
--
-- Symbolic trace is composed of:
--
-- *** Current transaction info
-- lowTime, highTime -- time interval for the most recent transaction
symInput -- input for the most recent transaction
-- whenPos -- position in the When for the most recen transaction (see trace and paramTrace)
--
-- *** Previous transaction info
-- traces -- symbolic information about previous transactions (when we reach a When we
-- consider adding the current transaction to this list)
first integer is lowTime , second is highTime , last integer is the position in
-- the When (which case of the When the input corresponds to 0 is timeout)
-- *** Input parameter transaction info
-- paramTrace -- this is actually an input parameter, we get it as input for the SBV
-- property and we constrain it to match traces for any of the executions,
SBV will try to find a paramTrace that matches , and that will be the
-- solution to the analysis (the counterexample if any). It has a fixed
length that is calculated as the maximum bound given by countWhens ,
-- which is the maximum number of transactions that are necessary to explore
the whole contract . This bound is proven in TransactionBound.thy
--
The rest of the symbolic state just corresponds directly to State with symbolic values :
symAccounts , symChoices , and symBoundValues
--
-- minTime just corresponds to lowTime, because it is just a lower bound for the minimum
-- time, and it gets updated with the minimum time.
data SymState = SymState { lowTime :: SInteger
, highTime :: SInteger
, traces :: [(SInteger, SInteger, Maybe SymInput, Integer)]
, paramTrace :: [(SInteger, SInteger, SInteger, SInteger)]
, symInput :: Maybe SymInput
, whenPos :: Integer
, symAccounts :: Map (AccountId, Token) SInteger
, symChoices :: Map ChoiceId SInteger
, symBoundValues :: Map ValueId SInteger
}
-- It generates a valid symbolic interval with lower bound ms (if provided)
generateSymbolicInterval :: Maybe Integer -> Symbolic (SInteger, SInteger)
generateSymbolicInterval Nothing =
do hs <- sInteger_
ls <- sInteger_
constrain (ls .<= hs)
return (ls, hs)
generateSymbolicInterval (Just ms) =
do i@(ls, _) <- generateSymbolicInterval Nothing
constrain (ls .>= literal ms)
return i
foldWithKey for ' AssocMap
foldAssocMapWithKey :: (a -> k -> b -> a) -> a -> AssocMap.Map k b -> a
foldAssocMapWithKey f acc = foldl' decF acc . AssocMap.toList
where decF a (k, v) = f a k v
Convert ' AssocMap into a Map with symbolic values , which are literals of
the content of the original AssocMap
toSymMap :: Ord k => SymVal v => AssocMap.Map k v -> Map k (SBV v)
toSymMap = foldAssocMapWithKey toSymItem mempty
where toSymItem :: Ord k => SymVal v => Map k (SBV v) -> k -> v -> Map k (SBV v)
toSymItem acc k v = M.insert k (literal v) acc
Create initial symbolic state , it takes an optional concrete State to serve
as initial state , this way analysis can be done from a half - executed contract .
First parameter ( pt ) is the input parameter trace , which is just a fixed length
-- list of symbolic integers that are matched to trace.
When Nothing is passed as second parameter it acts like emptyState .
mkInitialSymState :: [(SInteger, SInteger, SInteger, SInteger)] -> Maybe State
-> Symbolic SymState
mkInitialSymState pt Nothing = do (ls, hs) <- generateSymbolicInterval Nothing
return $ SymState { lowTime = ls
, highTime = hs
, traces = []
, paramTrace = pt
, symInput = Nothing
, whenPos = 0
, symAccounts = mempty
, symChoices = mempty
, symBoundValues = mempty }
mkInitialSymState pt (Just State { accounts = accs
, choices = cho
, boundValues = bVal
, minTime = ms }) =
do (ls, hs) <- generateSymbolicInterval (Just (getPOSIXTime ms))
return $ SymState { lowTime = ls
, highTime = hs
, traces = []
, paramTrace = pt
, symInput = Nothing
, whenPos = 0
, symAccounts = toSymMap accs
, symChoices = toSymMap cho
, symBoundValues = toSymMap bVal }
It converts a symbolic trace into a list of 4 - uples of symbolic integers ,
-- this is a minimalistic representation of the counter-example trace that aims
to minimise the functionalities from SBV that we use ( just integers ) for efficiency .
-- The integers in the tuple represent:
1st - time interval min time
2nd - time interval max time
3rd - When clause used ( 0 for timeout branch )
4rd - Symbolic value ( money in deposit , chosen value in choice )
--
-- Because the param trace has fixed length we fill the unused transactions with -1,
-- these are pruned after analysis.
--
The identifiers for Deposit and Choice are calculated using the When clause and
-- the contract (which is concrete), and using the semantics after a counter example is
-- found.
convertRestToSymbolicTrace :: [(SInteger, SInteger, Maybe SymInput, Integer)] ->
[(SInteger, SInteger, SInteger, SInteger)] -> SBool
convertRestToSymbolicTrace [] [] = sTrue
convertRestToSymbolicTrace ((lowS, highS, inp, pos):t) ((a, b, c, d):t2) =
(lowS .== a) .&& (highS .== b) .&& (getSymValFrom inp .== c) .&& (literal pos .== d) .&&
convertRestToSymbolicTrace t t2
where
getSymValFrom :: Maybe SymInput -> SInteger
getSymValFrom Nothing = 0
getSymValFrom (Just (SymDeposit _ _ _ val)) = val
getSymValFrom (Just (SymChoice _ val)) = val
getSymValFrom (Just SymNotify) = 0
convertRestToSymbolicTrace _ _ = error "Symbolic trace is the wrong length"
isPadding :: [(SInteger, SInteger, SInteger, SInteger)] -> SBool
isPadding ((a, b, c, d):t) = (a .== -1) .&& (b .== -1) .&& (c .== -1) .&&
(d .== -1) .&& isPadding t
isPadding [] = sTrue
convertToSymbolicTrace :: [(SInteger, SInteger, Maybe SymInput, Integer)] ->
[(SInteger, SInteger, SInteger, SInteger)] -> SBool
convertToSymbolicTrace refL symL =
let lenRefL = length refL
lenSymL = length symL in
if lenRefL <= lenSymL
then let lenPadding = lenSymL - lenRefL in
isPadding (take lenPadding symL) .&& convertRestToSymbolicTrace refL (drop lenPadding symL)
else error "Provided symbolic trace is not long enough"
-- Symbolic version evalValue
symEvalVal :: Value Observation -> SymState -> SInteger
symEvalVal (AvailableMoney accId tok) symState =
M.findWithDefault (literal 0) (accId, tok) (symAccounts symState)
symEvalVal (Constant inte) symState = literal inte
symEvalVal (NegValue val) symState = - symEvalVal val symState
symEvalVal (AddValue lhs rhs) symState = symEvalVal lhs symState +
symEvalVal rhs symState
symEvalVal (SubValue lhs rhs) symState = symEvalVal lhs symState -
symEvalVal rhs symState
symEvalVal (MulValue lhs rhs) symState = symEvalVal lhs symState *
symEvalVal rhs symState
symEvalVal (DivValue lhs rhs) symState =
let n = symEvalVal lhs symState
d = symEvalVal rhs symState
in ite (d .== 0) 0 (n `sQuot` d)
symEvalVal (ChoiceValue choId) symState =
M.findWithDefault (literal 0) choId (symChoices symState)
symEvalVal TimeIntervalStart symState = lowTime symState
symEvalVal TimeIntervalEnd symState = highTime symState
symEvalVal (UseValue valId) symState =
M.findWithDefault (literal 0) valId (symBoundValues symState)
symEvalVal (Cond cond v1 v2) symState = ite (symEvalObs cond symState)
(symEvalVal v1 symState)
(symEvalVal v2 symState)
-- Symbolic version evalObservation
symEvalObs :: Observation -> SymState -> SBool
symEvalObs (AndObs obs1 obs2) symState = symEvalObs obs1 symState .&&
symEvalObs obs2 symState
symEvalObs (OrObs obs1 obs2) symState = symEvalObs obs1 symState .||
symEvalObs obs2 symState
symEvalObs (NotObs obs) symState = sNot $ symEvalObs obs symState
symEvalObs (ChoseSomething choiceId) symState =
literal (M.member choiceId (symChoices symState))
symEvalObs (ValueGE lhs rhs) symState = symEvalVal lhs symState .>=
symEvalVal rhs symState
symEvalObs (ValueGT lhs rhs) symState = symEvalVal lhs symState .>
symEvalVal rhs symState
symEvalObs (ValueLT lhs rhs) symState = symEvalVal lhs symState .<
symEvalVal rhs symState
symEvalObs (ValueLE lhs rhs) symState = symEvalVal lhs symState .<=
symEvalVal rhs symState
symEvalObs (ValueEQ lhs rhs) symState = symEvalVal lhs symState .==
symEvalVal rhs symState
symEvalObs TrueObs _ = sTrue
symEvalObs FalseObs _ = sFalse
-- Update the symbolic state given a symbolic input (just the maps)
updateSymInput :: Maybe SymInput -> SymState -> Symbolic SymState
updateSymInput Nothing symState = return symState
updateSymInput (Just (SymDeposit accId _ tok val)) symState =
let resultVal = M.findWithDefault 0 (accId, tok) (symAccounts symState)
+ smax (literal 0) val in
return (symState {symAccounts =
M.insert (accId, tok) resultVal
(symAccounts symState)})
updateSymInput (Just (SymChoice choId val)) symState =
return (symState {symChoices = M.insert choId val (symChoices symState)})
updateSymInput (Just SymNotify) symState = return symState
-- Moves the current transaction to the list of transactions and creates a
new one . It takes newLowSlot and newHighSlot as parameters because the
-- values and observations are evaluated using those, so we cannot just generate
them here ( they are linked to the SymInput ( 3rd parameter ) .
If SymInput is Nothing it means the transaction went to timeout .
-- If the transaction didn't go to timeout, we know the new transaction has maxSlot smaller
-- than timeout. If it went to timeout we know the new transaction has minSlot greater or
-- equal than timeout. We also need to check previous transaction does not have ambiguous
-- interval with the current When, because that would mean the transaction is invalid.
-- In the case of timeout it is possible we don't actually need a new transaction,
-- we can reuse the previous transaction, we model this by allowing both low and high
time to be equal to the ones of the previous transaction . That will typically make one
-- of the transactions useless, but we discard useless transactions by the end so that
-- is fine.
addTransaction :: SInteger -> SInteger -> Maybe SymInput -> Timeout -> SymState -> Integer
-> Symbolic (SBool, SymState)
addTransaction newLowSlot newHighSlot Nothing slotTim
symState@SymState { lowTime = oldLowSlot
, highTime = oldHighSlot
, traces = oldTraces
, symInput = prevSymInp
, whenPos = oldPos } pos =
do let tim = getPOSIXTime slotTim
constrain (newLowSlot .<= newHighSlot)
let conditions = ((oldHighSlot .< literal tim) .||
((oldLowSlot .== newLowSlot) .&& (oldHighSlot .== newHighSlot))) .&&
(newLowSlot .>= literal tim)
uSymInput <- updateSymInput Nothing
(symState { lowTime = newLowSlot
, highTime = newHighSlot
, traces = (oldLowSlot, oldHighSlot,
prevSymInp, oldPos):oldTraces
, symInput = Nothing
, whenPos = pos })
return (conditions, uSymInput)
addTransaction newLowSlot newHighSlot newSymInput slotTim
symState@SymState { lowTime = oldLowSlot
, highTime = oldHighSlot
, traces = oldTraces
, symInput = prevSymInp
, whenPos = oldPos } pos =
do let tim = getPOSIXTime slotTim
constrain (newLowSlot .<= newHighSlot)
let conditions = (oldHighSlot .< literal tim) .&&
(newHighSlot .< literal tim) .&&
(newLowSlot .>= oldLowSlot)
uSymInput <- updateSymInput newSymInput
(symState { lowTime = newLowSlot
, highTime = newHighSlot
, traces = (oldLowSlot, oldHighSlot, prevSymInp, oldPos)
:oldTraces
, symInput = newSymInput
, whenPos = pos })
return (conditions, uSymInput)
It only " or"s the first symbolic boolean to the second one if the
concrete boolean is False , otherwise it just passes the second
-- symbolic parameter through
onlyAssertionsPatch :: Bool -> SBool -> SBool -> SBool
onlyAssertionsPatch b p1 p2
| b = p2
| otherwise = p1 .|| p2
-- This is the main static analysis loop for contracts.
-- - oa -- indicates whether we want to report only failing assertions (not any warning)
-- - hasErr -- indicates whether the current symbolic execution has already
-- raised a warning (this is a necessary condition for it to be a counter-example)
-- - contract -- remaining contract
-- - sState -- symbolic state
--
-- The result of this function is a boolean that indicates whether:
1 . The transaction is valid ( according to the semantics )
2 . It has issued a warning ( as indicated by hasErr )
isValidAndFailsAux :: Bool -> SBool -> Contract -> SymState
-> Symbolic SBool
isValidAndFailsAux oa hasErr Close sState =
return (hasErr .&& convertToSymbolicTrace ((lowTime sState, highTime sState,
symInput sState, whenPos sState)
:traces sState) (paramTrace sState))
isValidAndFailsAux oa hasErr (Pay accId payee token val cont) sState =
do let concVal = symEvalVal val sState
let originalMoney = M.findWithDefault 0 (accId, token) (symAccounts sState)
let remainingMoneyInAccount = originalMoney - smax (literal 0) concVal
let newAccs = M.insert (accId, token) (smax (literal 0) remainingMoneyInAccount)
(symAccounts sState)
let finalSState = sState { symAccounts =
case payee of
(Account destAccId) ->
M.insert (destAccId, token)
(smin originalMoney (smax (literal 0) concVal)
+ M.findWithDefault 0 (destAccId, token) newAccs)
newAccs
_ -> newAccs }
isValidAndFailsAux oa (onlyAssertionsPatch
oa
((remainingMoneyInAccount .< 0) -- Partial payment
.|| (concVal .<= 0)) -- Non-positive payment
hasErr) cont finalSState
isValidAndFailsAux oa hasErr (If obs cont1 cont2) sState =
do let obsVal = symEvalObs obs sState
contVal1 <- isValidAndFailsAux oa hasErr cont1 sState
contVal2 <- isValidAndFailsAux oa hasErr cont2 sState
return (ite obsVal contVal1 contVal2)
isValidAndFailsAux oa hasErr (When list timeout cont) sState =
isValidAndFailsWhen oa hasErr list timeout cont (const $ const sFalse) sState 1
isValidAndFailsAux oa hasErr (Let valId val cont) sState =
do let concVal = symEvalVal val sState
let newBVMap = M.insert valId concVal (symBoundValues sState)
let newSState = sState { symBoundValues = newBVMap }
isValidAndFailsAux oa (onlyAssertionsPatch
oa
(literal (M.member valId (symBoundValues sState))) -- Shadowed definition
hasErr) cont newSState
isValidAndFailsAux oa hasErr (Assert obs cont) sState =
isValidAndFailsAux oa (hasErr .|| sNot obsVal) cont sState
where obsVal = symEvalObs obs sState
Returns sTrue iif the given sinteger is in the list of bounds
ensureBounds :: SInteger -> [Bound] -> SBool
ensureBounds cho [] = sFalse
ensureBounds cho (Bound lowBnd hiBnd:t) =
((cho .>= literal lowBnd) .&& (cho .<= literal hiBnd)) .|| ensureBounds cho t
Just combines addTransaction and isValidAndFailsAux
applyInputConditions :: Bool -> SInteger -> SInteger -> SBool -> Maybe SymInput -> Timeout
-> SymState -> Integer -> Contract
-> Symbolic (SBool, SBool)
applyInputConditions oa ls hs hasErr maybeSymInput timeout sState pos cont =
do (newCond, newSState) <- addTransaction ls hs maybeSymInput timeout sState pos
newTrace <- isValidAndFailsAux oa hasErr cont newSState
return (newCond, newTrace)
Generates two new slot numbers and puts them in the symbolic state
addFreshSlotsToState :: SymState -> Symbolic (SInteger, SInteger, SymState)
addFreshSlotsToState sState =
do newLowSlot <- sInteger_
newHighSlot <- sInteger_
return (newLowSlot, newHighSlot, sState {lowTime = newLowSlot, highTime = newHighSlot})
-- Analysis loop for When construct. Essentially, it iterates over all the cases and
-- branches the static analysis. All parameters are the same as isValidAndFailsAux except
-- for previousMatch and pos:
-- - previousMatch - Is a function that tells whether some previous case has matched, if
-- that happened then the current case would never be reached, we keep adding conditions
-- to the function and pass it to the next iteration of isValidAndFailsWhen.
-- - pos - Is the position of the current Case clause [1..], 0 means timeout branch.
isValidAndFailsWhen :: Bool -> SBool -> [Case Contract] -> Timeout -> Contract -> (SymInput -> SymState -> SBool)
-> SymState -> Integer -> Symbolic SBool
isValidAndFailsWhen oa hasErr [] timeout cont previousMatch sState pos =
do newLowSlot <- sInteger_
newHighSlot <- sInteger_
(cond, newTrace)
<- applyInputConditions oa newLowSlot newHighSlot
hasErr Nothing timeout sState 0 cont
return (ite cond newTrace sFalse)
isValidAndFailsWhen oa hasErr (Case (Deposit accId party token val) cont:rest)
timeout timCont previousMatch sState pos =
do (newLowSlot, newHighSlot, sStateWithInput) <- addFreshSlotsToState sState
let concVal = symEvalVal val sStateWithInput
let symInput = SymDeposit accId party token concVal
let clashResult = previousMatch symInput sStateWithInput
let newPreviousMatch otherSymInput pmSymState =
let pmConcVal = symEvalVal val pmSymState in
case otherSymInput of
SymDeposit otherAccId otherParty otherToken otherConcVal ->
if (otherAccId == accId) && (otherParty == party)
&& (otherToken == token)
then (otherConcVal .== pmConcVal) .|| previousMatch otherSymInput pmSymState
else previousMatch otherSymInput pmSymState
_ -> previousMatch otherSymInput pmSymState
(newCond, newTrace)
<- applyInputConditions oa newLowSlot newHighSlot
(onlyAssertionsPatch oa
(concVal .<= 0) -- Non-positive deposit warning
hasErr)
(Just symInput) timeout sState pos cont
contTrace <- isValidAndFailsWhen oa hasErr rest timeout timCont
newPreviousMatch sState (pos + 1)
return (ite (newCond .&& sNot clashResult) newTrace contTrace)
isValidAndFailsWhen oa hasErr (Case (Choice choId bnds) cont:rest)
timeout timCont previousMatch sState pos =
do (newLowSlot, newHighSlot, sStateWithInput) <- addFreshSlotsToState sState
concVal <- sInteger_
let symInput = SymChoice choId concVal
let clashResult = previousMatch symInput sStateWithInput
let newPreviousMatch otherSymInput pmSymState =
case otherSymInput of
SymChoice otherChoId otherConcVal ->
if otherChoId == choId
then ensureBounds otherConcVal bnds .|| previousMatch otherSymInput pmSymState
else previousMatch otherSymInput pmSymState
_ -> previousMatch otherSymInput pmSymState
(newCond, newTrace)
<- applyInputConditions oa newLowSlot newHighSlot
hasErr (Just symInput) timeout sState pos cont
contTrace <- isValidAndFailsWhen oa hasErr rest timeout timCont
newPreviousMatch sState (pos + 1)
return (ite (newCond .&& sNot clashResult .&& ensureBounds concVal bnds)
newTrace
contTrace)
isValidAndFailsWhen oa hasErr (Case (Notify obs) cont:rest)
timeout timCont previousMatch sState pos =
do (newLowSlot, newHighSlot, sStateWithInput) <- addFreshSlotsToState sState
let obsRes = symEvalObs obs sStateWithInput
let symInput = SymNotify
let clashResult = previousMatch symInput sStateWithInput
let newPreviousMatch otherSymInput pmSymState =
let pmObsRes = symEvalObs obs pmSymState in
case otherSymInput of
SymNotify -> pmObsRes .|| previousMatch otherSymInput pmSymState
_ -> previousMatch otherSymInput pmSymState
(newCond, newTrace)
<- applyInputConditions oa newLowSlot newHighSlot
hasErr (Just symInput) timeout sState pos cont
contTrace <- isValidAndFailsWhen oa hasErr rest timeout timCont
newPreviousMatch sState (pos + 1)
return (ite (newCond .&& obsRes .&& sNot clashResult) newTrace contTrace)
isValidAndFailsWhen oa hasErr (MerkleizedCase (Deposit accId party token val) _:rest)
timeout timCont previousMatch sState pos =
let newPreviousMatch otherSymInput pmSymState =
let pmConcVal = symEvalVal val pmSymState in
case otherSymInput of
SymDeposit otherAccId otherParty otherToken otherConcVal ->
if (otherAccId == accId) && (otherParty == party)
&& (otherToken == token)
then (otherConcVal .== pmConcVal) .|| previousMatch otherSymInput pmSymState
else previousMatch otherSymInput pmSymState
_ -> previousMatch otherSymInput pmSymState in
isValidAndFailsWhen oa hasErr rest timeout timCont
newPreviousMatch sState (pos + 1)
isValidAndFailsWhen oa hasErr (MerkleizedCase (Choice choId bnds) _:rest)
timeout timCont previousMatch sState pos =
let newPreviousMatch otherSymInput pmSymState =
case otherSymInput of
SymChoice otherChoId otherConcVal ->
if otherChoId == choId
then ensureBounds otherConcVal bnds .|| previousMatch otherSymInput pmSymState
else previousMatch otherSymInput pmSymState
_ -> previousMatch otherSymInput pmSymState in
isValidAndFailsWhen oa hasErr rest timeout timCont newPreviousMatch sState (pos + 1)
isValidAndFailsWhen oa hasErr (MerkleizedCase (Notify obs) _:rest)
timeout timCont previousMatch sState pos =
let newPreviousMatch otherSymInput pmSymState =
let pmObsRes = symEvalObs obs pmSymState in
case otherSymInput of
SymNotify -> pmObsRes .|| previousMatch otherSymInput pmSymState
_ -> previousMatch otherSymInput pmSymState in
isValidAndFailsWhen oa hasErr rest timeout timCont
newPreviousMatch sState (pos + 1)
--------------------------------------------------
-- Wrapper - SBV handling and result extraction --
--------------------------------------------------
-- Counts the maximum number of nested Whens. This acts as a bound for the maximum
-- necessary number of transactions for exploring the whole contract. This bound
has been proven in TransactionBound.thy
countWhens :: Contract -> Integer
countWhens Close = 0
countWhens (Pay uv uw ux uy c) = countWhens c
countWhens (If uz c c2) = max (countWhens c) (countWhens c2)
countWhens (When cl t c) = 1 + max (countWhensCaseList cl) (countWhens c)
countWhens (Let va vb c) = countWhens c
countWhens (Assert o c) = countWhens c
Same as countWhens but it starts with a Case list
countWhensCaseList :: [Case Contract] -> Integer
countWhensCaseList (Case uu c : tail) = max (countWhens c) (countWhensCaseList tail)
countWhensCaseList (MerkleizedCase uu c : tail) = countWhensCaseList tail
countWhensCaseList [] = 0
Main wrapper of the static analysis takes a that indicates whether only
assertions should be checked , a Contract , a paramTrace , and an optional
State . paramTrace is actually an output parameter . We do not put it in the result of
-- this function because then we would have to return a symbolic list that would make
the whole process slower . It is meant to be used just with SBV , with a symbolic
-- paramTrace, and we use the symbolic paramTrace to know which is the counterexample.
wrapper :: Bool -> Contract -> [(SInteger, SInteger, SInteger, SInteger)] -> Maybe State
-> Symbolic SBool
wrapper oa c st maybeState = do ess <- mkInitialSymState st maybeState
isValidAndFailsAux oa sFalse c ess
-- It generates a list of variable names for the variables that conform paramTrace.
The list will account for the given number of transactions ( four vars per transaction ) .
generateLabels :: Integer -> [String]
generateLabels = go 1
where go :: Integer -> Integer -> [String]
go n m
| n > m = []
| otherwise = (action_label ++ "minSlot"):
(action_label ++ "maxSlot"):
(action_label ++ "value"):
(action_label ++ "branch"):
go (n + 1) m
where action_label = "action_" ++ show n ++ "_"
-- Takes a list of variable names for the paramTrace and generates the list of symbolic
variables . It returns the list of symbolic variables generated ( list of 4 - uples ) .
generateParameters :: [String] -> Symbolic [(SInteger, SInteger, SInteger, SInteger)]
generateParameters (sl:sh:v:b:t) =
do isl <- sInteger sl
ish <- sInteger sh
iv <- sInteger v
ib <- sInteger b
rest <- generateParameters t
return ((isl, ish, iv, ib):rest)
generateParameters [] = return []
generateParameters _ = error "Wrong number of labels generated"
-- Takes the list of paramTrace variable names and the list of mappings of these
names to concrete values , and reconstructs a concrete list of 4 - uples of the ordered
-- concrete values.
groupResult :: [String] -> Map String Integer -> [(Integer, Integer, Integer, Integer)]
groupResult (sl:sh:v:b:t) mappings =
if ib == -1 then groupResult t mappings
else (isl, ish, iv, ib):groupResult t mappings
where (Just isl) = M.lookup sl mappings
(Just ish) = M.lookup sh mappings
(Just iv) = M.lookup v mappings
(Just ib) = M.lookup b mappings
groupResult [] _ = []
groupResult _ _ = error "Wrong number of labels generated"
Reconstructs an input from a Case list a Case position and a value ( deposit amount or
-- chosen value)
caseToInput :: [Case a] -> Integer -> Integer -> Input
caseToInput [] _ _ = error "Wrong number of cases interpreting result"
caseToInput (Case h _:t) c v
| c > 1 = caseToInput t (c - 1) v
| c == 1 = NormalInput $ case h of
Deposit accId party tok _ -> IDeposit accId party tok v
Choice choId _ -> IChoice choId v
Notify _ -> INotify
| otherwise = error "Negative case number"
caseToInput (MerkleizedCase _ _:t) c v
| c > 1 = caseToInput t (c - 1) v
| c == 1 = error "Finding this counter example would have required finding a hash preimage"
| otherwise = error "Negative case number"
-- Given an input, state, and contract, it runs the semantics on the transaction,
-- and it adds the transaction and warnings issued to the result as long as the
-- transaction was not useless. It assumes the transaction is either valid or useless,
-- other errors would mean the counterexample is not valid.
-- Input is passed as a combination and function from input list to transaction input and
input list for convenience . The list of 4 - uples is passed through because it is used
-- to recursively call executeAndInterpret (co-recursive funtion).
computeAndContinue :: ([Input] -> TransactionInput) -> [Input] -> State -> Contract
-> [(Integer, Integer, Integer, Integer)]
-> [([TransactionInput], [TransactionWarning])]
computeAndContinue transaction inps sta cont t =
case computeTransaction (transaction inps) sta cont of
Error TEUselessTransaction -> executeAndInterpret sta t cont
TransactionOutput { txOutWarnings = war
, txOutState = newSta
, txOutContract = newCont}
-> ([transaction inps], war)
:executeAndInterpret newSta t newCont
Takes a list of 4 - uples ( and state and contract ) and interprets it as a list of
-- transactions and also computes the resulting list of warnings.
executeAndInterpret :: State -> [(Integer, Integer, Integer, Integer)] -> Contract
-> [([TransactionInput], [TransactionWarning])]
executeAndInterpret _ [] _ = []
executeAndInterpret sta ((l, h, v, b):t) cont
| b == 0 = computeAndContinue transaction [] sta cont t
| otherwise =
case reduceContractUntilQuiescent env sta cont of
ContractQuiescent _ _ _ _ tempCont ->
case tempCont of
When cases _ _ -> computeAndContinue transaction
[caseToInput cases b v] sta cont t
_ -> error "Cannot interpret result"
_ -> error "Error reducing contract when interpreting result"
where myTimeInterval = (POSIXTime l, POSIXTime h)
env = Environment { timeInterval = myTimeInterval }
transaction inputs = TransactionInput { txInterval = myTimeInterval
, txInputs = inputs
}
It wraps executeAndInterpret so that it takes an optional State , and also
combines the results of executeAndInterpret in one single tuple .
interpretResult :: [(Integer, Integer, Integer, Integer)] -> Contract -> Maybe State
-> (POSIXTime, [TransactionInput], [TransactionWarning])
interpretResult [] _ _ = error "Empty result"
interpretResult t@((l, _, _, _):_) c maybeState = (POSIXTime l, tin, twa)
where (tin, twa) = foldl' (\(accInp, accWarn) (elemInp, elemWarn) ->
(accInp ++ elemInp, accWarn ++ elemWarn)) ([], []) $
executeAndInterpret initialState t c
initialState = case maybeState of
Nothing -> emptyState (POSIXTime l)
Just x -> x
It interprets the counter example found by SBV ( SMTModel ) , given the contract ,
-- and initial state (optional), and the list of variables used.
extractCounterExample :: SMTModel -> Contract -> Maybe State -> [String]
-> (POSIXTime, [TransactionInput], [TransactionWarning])
extractCounterExample smtModel cont maybeState maps = interpretedResult
where assocs = map (\(a, b) -> (a, fromCV b :: Integer)) $ modelAssocs smtModel
counterExample = groupResult maps (M.fromList assocs)
interpretedResult = interpretResult (reverse counterExample) cont maybeState
-- Wrapper function that carries the static analysis and interprets the result.
It generates variables , runs SBV , and it interprets the result in terms .
warningsTraceCustom :: Bool
-> Contract
-> Maybe State
-> IO (Either ThmResult
(Maybe (POSIXTime, [TransactionInput], [TransactionWarning])))
warningsTraceCustom onlyAssertions con maybeState =
do thmRes@(ThmResult result) <- satCommand
return (case result of
Unsatisfiable _ _ -> Right Nothing
Satisfiable _ smtModel ->
Right (Just (extractCounterExample smtModel con maybeState params))
_ -> Left thmRes)
where maxActs = 1 + countWhens con
params = generateLabels maxActs
property = do v <- generateParameters params
r <- wrapper onlyAssertions con v maybeState
return (sNot r)
satCommand = proveWith z3 property
Like warningsTraceCustom but checks all warnings ( including assertions )
warningsTraceWithState :: Contract
-> Maybe State
-> IO (Either ThmResult
(Maybe (POSIXTime, [TransactionInput], [TransactionWarning])))
warningsTraceWithState = warningsTraceCustom False
Like warningsTraceCustom but only checks assertions .
onlyAssertionsWithState :: Contract
-> Maybe State
-> IO (Either ThmResult
(Maybe (POSIXTime, [TransactionInput], [TransactionWarning])))
onlyAssertionsWithState = warningsTraceCustom True
Like warningsTraceWithState but without initialState .
warningsTrace :: Contract
-> IO (Either ThmResult
(Maybe (POSIXTime, [TransactionInput], [TransactionWarning])))
warningsTrace con = warningsTraceWithState con Nothing
| null | https://raw.githubusercontent.com/input-output-hk/marlowe-cardano/78a3dbb1cd692146b7d1a32e1e66faed884f2432/marlowe/src/Language/Marlowe/Analysis/FSSemantics.hs | haskell | # OPTIONS_GHC -Wno-name-shadowing #
# OPTIONS_GHC -Wno-unused-matches #
# OPTIONS_GHC -Wno-unused-imports #
-------------------------------------------------
Static analysis logic and symbolic operations --
-------------------------------------------------
Symbolic version of Input (with symbolic value but concrete identifiers)
We keep as much things concrete as possible.
represent the symbolic trace we are evaluating (the way we got to the current
part of the contract).
Symbolic trace is composed of:
*** Current transaction info
lowTime, highTime -- time interval for the most recent transaction
input for the most recent transaction
whenPos -- position in the When for the most recen transaction (see trace and paramTrace)
*** Previous transaction info
traces -- symbolic information about previous transactions (when we reach a When we
consider adding the current transaction to this list)
the When (which case of the When the input corresponds to 0 is timeout)
*** Input parameter transaction info
paramTrace -- this is actually an input parameter, we get it as input for the SBV
property and we constrain it to match traces for any of the executions,
solution to the analysis (the counterexample if any). It has a fixed
which is the maximum number of transactions that are necessary to explore
minTime just corresponds to lowTime, because it is just a lower bound for the minimum
time, and it gets updated with the minimum time.
It generates a valid symbolic interval with lower bound ms (if provided)
list of symbolic integers that are matched to trace.
this is a minimalistic representation of the counter-example trace that aims
The integers in the tuple represent:
Because the param trace has fixed length we fill the unused transactions with -1,
these are pruned after analysis.
the contract (which is concrete), and using the semantics after a counter example is
found.
Symbolic version evalValue
Symbolic version evalObservation
Update the symbolic state given a symbolic input (just the maps)
Moves the current transaction to the list of transactions and creates a
values and observations are evaluated using those, so we cannot just generate
If the transaction didn't go to timeout, we know the new transaction has maxSlot smaller
than timeout. If it went to timeout we know the new transaction has minSlot greater or
equal than timeout. We also need to check previous transaction does not have ambiguous
interval with the current When, because that would mean the transaction is invalid.
In the case of timeout it is possible we don't actually need a new transaction,
we can reuse the previous transaction, we model this by allowing both low and high
of the transactions useless, but we discard useless transactions by the end so that
is fine.
symbolic parameter through
This is the main static analysis loop for contracts.
- oa -- indicates whether we want to report only failing assertions (not any warning)
- hasErr -- indicates whether the current symbolic execution has already
raised a warning (this is a necessary condition for it to be a counter-example)
- contract -- remaining contract
- sState -- symbolic state
The result of this function is a boolean that indicates whether:
Partial payment
Non-positive payment
Shadowed definition
Analysis loop for When construct. Essentially, it iterates over all the cases and
branches the static analysis. All parameters are the same as isValidAndFailsAux except
for previousMatch and pos:
- previousMatch - Is a function that tells whether some previous case has matched, if
that happened then the current case would never be reached, we keep adding conditions
to the function and pass it to the next iteration of isValidAndFailsWhen.
- pos - Is the position of the current Case clause [1..], 0 means timeout branch.
Non-positive deposit warning
------------------------------------------------
Wrapper - SBV handling and result extraction --
------------------------------------------------
Counts the maximum number of nested Whens. This acts as a bound for the maximum
necessary number of transactions for exploring the whole contract. This bound
this function because then we would have to return a symbolic list that would make
paramTrace, and we use the symbolic paramTrace to know which is the counterexample.
It generates a list of variable names for the variables that conform paramTrace.
Takes a list of variable names for the paramTrace and generates the list of symbolic
Takes the list of paramTrace variable names and the list of mappings of these
concrete values.
chosen value)
Given an input, state, and contract, it runs the semantics on the transaction,
and it adds the transaction and warnings issued to the result as long as the
transaction was not useless. It assumes the transaction is either valid or useless,
other errors would mean the counterexample is not valid.
Input is passed as a combination and function from input list to transaction input and
to recursively call executeAndInterpret (co-recursive funtion).
transactions and also computes the resulting list of warnings.
and initial state (optional), and the list of variables used.
Wrapper function that carries the static analysis and interprets the result. | # OPTIONS_GHC -Wno - incomplete - uni - patterns #
# OPTIONS_GHC -Wno - incomplete - patterns #
module Language.Marlowe.Analysis.FSSemantics
where
import Data.List (foldl', genericIndex)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe (isNothing)
import Data.SBV
import qualified Data.SBV.Either as SE
import Data.SBV.Internals (SMTModel(..))
import qualified Data.SBV.List as SL
import qualified Data.SBV.Maybe as SM
import qualified Data.SBV.Tuple as ST
import Data.Set (Set)
import qualified Data.Set as S
import Language.Marlowe.Core.V1.Semantics
import Language.Marlowe.Core.V1.Semantics.Types
import Plutus.V2.Ledger.Api (POSIXTime(POSIXTime, getPOSIXTime))
import qualified PlutusTx.AssocMap as AssocMap
import qualified PlutusTx.Prelude as P
import qualified PlutusTx.Ratio as P
data SymInput = SymDeposit AccountId Party Token SInteger
| SymChoice ChoiceId SInteger
| SymNotify
Symbolic version of State :
In addition to normal State information we also store symbolic values that
first integer is lowTime , second is highTime , last integer is the position in
SBV will try to find a paramTrace that matches , and that will be the
length that is calculated as the maximum bound given by countWhens ,
the whole contract . This bound is proven in TransactionBound.thy
The rest of the symbolic state just corresponds directly to State with symbolic values :
symAccounts , symChoices , and symBoundValues
data SymState = SymState { lowTime :: SInteger
, highTime :: SInteger
, traces :: [(SInteger, SInteger, Maybe SymInput, Integer)]
, paramTrace :: [(SInteger, SInteger, SInteger, SInteger)]
, symInput :: Maybe SymInput
, whenPos :: Integer
, symAccounts :: Map (AccountId, Token) SInteger
, symChoices :: Map ChoiceId SInteger
, symBoundValues :: Map ValueId SInteger
}
generateSymbolicInterval :: Maybe Integer -> Symbolic (SInteger, SInteger)
generateSymbolicInterval Nothing =
do hs <- sInteger_
ls <- sInteger_
constrain (ls .<= hs)
return (ls, hs)
generateSymbolicInterval (Just ms) =
do i@(ls, _) <- generateSymbolicInterval Nothing
constrain (ls .>= literal ms)
return i
foldWithKey for ' AssocMap
foldAssocMapWithKey :: (a -> k -> b -> a) -> a -> AssocMap.Map k b -> a
foldAssocMapWithKey f acc = foldl' decF acc . AssocMap.toList
where decF a (k, v) = f a k v
Convert ' AssocMap into a Map with symbolic values , which are literals of
the content of the original AssocMap
toSymMap :: Ord k => SymVal v => AssocMap.Map k v -> Map k (SBV v)
toSymMap = foldAssocMapWithKey toSymItem mempty
where toSymItem :: Ord k => SymVal v => Map k (SBV v) -> k -> v -> Map k (SBV v)
toSymItem acc k v = M.insert k (literal v) acc
Create initial symbolic state , it takes an optional concrete State to serve
as initial state , this way analysis can be done from a half - executed contract .
First parameter ( pt ) is the input parameter trace , which is just a fixed length
When Nothing is passed as second parameter it acts like emptyState .
mkInitialSymState :: [(SInteger, SInteger, SInteger, SInteger)] -> Maybe State
-> Symbolic SymState
mkInitialSymState pt Nothing = do (ls, hs) <- generateSymbolicInterval Nothing
return $ SymState { lowTime = ls
, highTime = hs
, traces = []
, paramTrace = pt
, symInput = Nothing
, whenPos = 0
, symAccounts = mempty
, symChoices = mempty
, symBoundValues = mempty }
mkInitialSymState pt (Just State { accounts = accs
, choices = cho
, boundValues = bVal
, minTime = ms }) =
do (ls, hs) <- generateSymbolicInterval (Just (getPOSIXTime ms))
return $ SymState { lowTime = ls
, highTime = hs
, traces = []
, paramTrace = pt
, symInput = Nothing
, whenPos = 0
, symAccounts = toSymMap accs
, symChoices = toSymMap cho
, symBoundValues = toSymMap bVal }
It converts a symbolic trace into a list of 4 - uples of symbolic integers ,
to minimise the functionalities from SBV that we use ( just integers ) for efficiency .
1st - time interval min time
2nd - time interval max time
3rd - When clause used ( 0 for timeout branch )
4rd - Symbolic value ( money in deposit , chosen value in choice )
The identifiers for Deposit and Choice are calculated using the When clause and
convertRestToSymbolicTrace :: [(SInteger, SInteger, Maybe SymInput, Integer)] ->
[(SInteger, SInteger, SInteger, SInteger)] -> SBool
convertRestToSymbolicTrace [] [] = sTrue
convertRestToSymbolicTrace ((lowS, highS, inp, pos):t) ((a, b, c, d):t2) =
(lowS .== a) .&& (highS .== b) .&& (getSymValFrom inp .== c) .&& (literal pos .== d) .&&
convertRestToSymbolicTrace t t2
where
getSymValFrom :: Maybe SymInput -> SInteger
getSymValFrom Nothing = 0
getSymValFrom (Just (SymDeposit _ _ _ val)) = val
getSymValFrom (Just (SymChoice _ val)) = val
getSymValFrom (Just SymNotify) = 0
convertRestToSymbolicTrace _ _ = error "Symbolic trace is the wrong length"
isPadding :: [(SInteger, SInteger, SInteger, SInteger)] -> SBool
isPadding ((a, b, c, d):t) = (a .== -1) .&& (b .== -1) .&& (c .== -1) .&&
(d .== -1) .&& isPadding t
isPadding [] = sTrue
convertToSymbolicTrace :: [(SInteger, SInteger, Maybe SymInput, Integer)] ->
[(SInteger, SInteger, SInteger, SInteger)] -> SBool
convertToSymbolicTrace refL symL =
let lenRefL = length refL
lenSymL = length symL in
if lenRefL <= lenSymL
then let lenPadding = lenSymL - lenRefL in
isPadding (take lenPadding symL) .&& convertRestToSymbolicTrace refL (drop lenPadding symL)
else error "Provided symbolic trace is not long enough"
symEvalVal :: Value Observation -> SymState -> SInteger
symEvalVal (AvailableMoney accId tok) symState =
M.findWithDefault (literal 0) (accId, tok) (symAccounts symState)
symEvalVal (Constant inte) symState = literal inte
symEvalVal (NegValue val) symState = - symEvalVal val symState
symEvalVal (AddValue lhs rhs) symState = symEvalVal lhs symState +
symEvalVal rhs symState
symEvalVal (SubValue lhs rhs) symState = symEvalVal lhs symState -
symEvalVal rhs symState
symEvalVal (MulValue lhs rhs) symState = symEvalVal lhs symState *
symEvalVal rhs symState
symEvalVal (DivValue lhs rhs) symState =
let n = symEvalVal lhs symState
d = symEvalVal rhs symState
in ite (d .== 0) 0 (n `sQuot` d)
symEvalVal (ChoiceValue choId) symState =
M.findWithDefault (literal 0) choId (symChoices symState)
symEvalVal TimeIntervalStart symState = lowTime symState
symEvalVal TimeIntervalEnd symState = highTime symState
symEvalVal (UseValue valId) symState =
M.findWithDefault (literal 0) valId (symBoundValues symState)
symEvalVal (Cond cond v1 v2) symState = ite (symEvalObs cond symState)
(symEvalVal v1 symState)
(symEvalVal v2 symState)
symEvalObs :: Observation -> SymState -> SBool
symEvalObs (AndObs obs1 obs2) symState = symEvalObs obs1 symState .&&
symEvalObs obs2 symState
symEvalObs (OrObs obs1 obs2) symState = symEvalObs obs1 symState .||
symEvalObs obs2 symState
symEvalObs (NotObs obs) symState = sNot $ symEvalObs obs symState
symEvalObs (ChoseSomething choiceId) symState =
literal (M.member choiceId (symChoices symState))
symEvalObs (ValueGE lhs rhs) symState = symEvalVal lhs symState .>=
symEvalVal rhs symState
symEvalObs (ValueGT lhs rhs) symState = symEvalVal lhs symState .>
symEvalVal rhs symState
symEvalObs (ValueLT lhs rhs) symState = symEvalVal lhs symState .<
symEvalVal rhs symState
symEvalObs (ValueLE lhs rhs) symState = symEvalVal lhs symState .<=
symEvalVal rhs symState
symEvalObs (ValueEQ lhs rhs) symState = symEvalVal lhs symState .==
symEvalVal rhs symState
symEvalObs TrueObs _ = sTrue
symEvalObs FalseObs _ = sFalse
updateSymInput :: Maybe SymInput -> SymState -> Symbolic SymState
updateSymInput Nothing symState = return symState
updateSymInput (Just (SymDeposit accId _ tok val)) symState =
let resultVal = M.findWithDefault 0 (accId, tok) (symAccounts symState)
+ smax (literal 0) val in
return (symState {symAccounts =
M.insert (accId, tok) resultVal
(symAccounts symState)})
updateSymInput (Just (SymChoice choId val)) symState =
return (symState {symChoices = M.insert choId val (symChoices symState)})
updateSymInput (Just SymNotify) symState = return symState
new one . It takes newLowSlot and newHighSlot as parameters because the
them here ( they are linked to the SymInput ( 3rd parameter ) .
If SymInput is Nothing it means the transaction went to timeout .
time to be equal to the ones of the previous transaction . That will typically make one
addTransaction :: SInteger -> SInteger -> Maybe SymInput -> Timeout -> SymState -> Integer
-> Symbolic (SBool, SymState)
addTransaction newLowSlot newHighSlot Nothing slotTim
symState@SymState { lowTime = oldLowSlot
, highTime = oldHighSlot
, traces = oldTraces
, symInput = prevSymInp
, whenPos = oldPos } pos =
do let tim = getPOSIXTime slotTim
constrain (newLowSlot .<= newHighSlot)
let conditions = ((oldHighSlot .< literal tim) .||
((oldLowSlot .== newLowSlot) .&& (oldHighSlot .== newHighSlot))) .&&
(newLowSlot .>= literal tim)
uSymInput <- updateSymInput Nothing
(symState { lowTime = newLowSlot
, highTime = newHighSlot
, traces = (oldLowSlot, oldHighSlot,
prevSymInp, oldPos):oldTraces
, symInput = Nothing
, whenPos = pos })
return (conditions, uSymInput)
addTransaction newLowSlot newHighSlot newSymInput slotTim
symState@SymState { lowTime = oldLowSlot
, highTime = oldHighSlot
, traces = oldTraces
, symInput = prevSymInp
, whenPos = oldPos } pos =
do let tim = getPOSIXTime slotTim
constrain (newLowSlot .<= newHighSlot)
let conditions = (oldHighSlot .< literal tim) .&&
(newHighSlot .< literal tim) .&&
(newLowSlot .>= oldLowSlot)
uSymInput <- updateSymInput newSymInput
(symState { lowTime = newLowSlot
, highTime = newHighSlot
, traces = (oldLowSlot, oldHighSlot, prevSymInp, oldPos)
:oldTraces
, symInput = newSymInput
, whenPos = pos })
return (conditions, uSymInput)
It only " or"s the first symbolic boolean to the second one if the
concrete boolean is False , otherwise it just passes the second
onlyAssertionsPatch :: Bool -> SBool -> SBool -> SBool
onlyAssertionsPatch b p1 p2
| b = p2
| otherwise = p1 .|| p2
1 . The transaction is valid ( according to the semantics )
2 . It has issued a warning ( as indicated by hasErr )
isValidAndFailsAux :: Bool -> SBool -> Contract -> SymState
-> Symbolic SBool
isValidAndFailsAux oa hasErr Close sState =
return (hasErr .&& convertToSymbolicTrace ((lowTime sState, highTime sState,
symInput sState, whenPos sState)
:traces sState) (paramTrace sState))
isValidAndFailsAux oa hasErr (Pay accId payee token val cont) sState =
do let concVal = symEvalVal val sState
let originalMoney = M.findWithDefault 0 (accId, token) (symAccounts sState)
let remainingMoneyInAccount = originalMoney - smax (literal 0) concVal
let newAccs = M.insert (accId, token) (smax (literal 0) remainingMoneyInAccount)
(symAccounts sState)
let finalSState = sState { symAccounts =
case payee of
(Account destAccId) ->
M.insert (destAccId, token)
(smin originalMoney (smax (literal 0) concVal)
+ M.findWithDefault 0 (destAccId, token) newAccs)
newAccs
_ -> newAccs }
isValidAndFailsAux oa (onlyAssertionsPatch
oa
hasErr) cont finalSState
isValidAndFailsAux oa hasErr (If obs cont1 cont2) sState =
do let obsVal = symEvalObs obs sState
contVal1 <- isValidAndFailsAux oa hasErr cont1 sState
contVal2 <- isValidAndFailsAux oa hasErr cont2 sState
return (ite obsVal contVal1 contVal2)
isValidAndFailsAux oa hasErr (When list timeout cont) sState =
isValidAndFailsWhen oa hasErr list timeout cont (const $ const sFalse) sState 1
isValidAndFailsAux oa hasErr (Let valId val cont) sState =
do let concVal = symEvalVal val sState
let newBVMap = M.insert valId concVal (symBoundValues sState)
let newSState = sState { symBoundValues = newBVMap }
isValidAndFailsAux oa (onlyAssertionsPatch
oa
hasErr) cont newSState
isValidAndFailsAux oa hasErr (Assert obs cont) sState =
isValidAndFailsAux oa (hasErr .|| sNot obsVal) cont sState
where obsVal = symEvalObs obs sState
Returns sTrue iif the given sinteger is in the list of bounds
ensureBounds :: SInteger -> [Bound] -> SBool
ensureBounds cho [] = sFalse
ensureBounds cho (Bound lowBnd hiBnd:t) =
((cho .>= literal lowBnd) .&& (cho .<= literal hiBnd)) .|| ensureBounds cho t
Just combines addTransaction and isValidAndFailsAux
applyInputConditions :: Bool -> SInteger -> SInteger -> SBool -> Maybe SymInput -> Timeout
-> SymState -> Integer -> Contract
-> Symbolic (SBool, SBool)
applyInputConditions oa ls hs hasErr maybeSymInput timeout sState pos cont =
do (newCond, newSState) <- addTransaction ls hs maybeSymInput timeout sState pos
newTrace <- isValidAndFailsAux oa hasErr cont newSState
return (newCond, newTrace)
Generates two new slot numbers and puts them in the symbolic state
addFreshSlotsToState :: SymState -> Symbolic (SInteger, SInteger, SymState)
addFreshSlotsToState sState =
do newLowSlot <- sInteger_
newHighSlot <- sInteger_
return (newLowSlot, newHighSlot, sState {lowTime = newLowSlot, highTime = newHighSlot})
isValidAndFailsWhen :: Bool -> SBool -> [Case Contract] -> Timeout -> Contract -> (SymInput -> SymState -> SBool)
-> SymState -> Integer -> Symbolic SBool
isValidAndFailsWhen oa hasErr [] timeout cont previousMatch sState pos =
do newLowSlot <- sInteger_
newHighSlot <- sInteger_
(cond, newTrace)
<- applyInputConditions oa newLowSlot newHighSlot
hasErr Nothing timeout sState 0 cont
return (ite cond newTrace sFalse)
isValidAndFailsWhen oa hasErr (Case (Deposit accId party token val) cont:rest)
timeout timCont previousMatch sState pos =
do (newLowSlot, newHighSlot, sStateWithInput) <- addFreshSlotsToState sState
let concVal = symEvalVal val sStateWithInput
let symInput = SymDeposit accId party token concVal
let clashResult = previousMatch symInput sStateWithInput
let newPreviousMatch otherSymInput pmSymState =
let pmConcVal = symEvalVal val pmSymState in
case otherSymInput of
SymDeposit otherAccId otherParty otherToken otherConcVal ->
if (otherAccId == accId) && (otherParty == party)
&& (otherToken == token)
then (otherConcVal .== pmConcVal) .|| previousMatch otherSymInput pmSymState
else previousMatch otherSymInput pmSymState
_ -> previousMatch otherSymInput pmSymState
(newCond, newTrace)
<- applyInputConditions oa newLowSlot newHighSlot
(onlyAssertionsPatch oa
hasErr)
(Just symInput) timeout sState pos cont
contTrace <- isValidAndFailsWhen oa hasErr rest timeout timCont
newPreviousMatch sState (pos + 1)
return (ite (newCond .&& sNot clashResult) newTrace contTrace)
isValidAndFailsWhen oa hasErr (Case (Choice choId bnds) cont:rest)
timeout timCont previousMatch sState pos =
do (newLowSlot, newHighSlot, sStateWithInput) <- addFreshSlotsToState sState
concVal <- sInteger_
let symInput = SymChoice choId concVal
let clashResult = previousMatch symInput sStateWithInput
let newPreviousMatch otherSymInput pmSymState =
case otherSymInput of
SymChoice otherChoId otherConcVal ->
if otherChoId == choId
then ensureBounds otherConcVal bnds .|| previousMatch otherSymInput pmSymState
else previousMatch otherSymInput pmSymState
_ -> previousMatch otherSymInput pmSymState
(newCond, newTrace)
<- applyInputConditions oa newLowSlot newHighSlot
hasErr (Just symInput) timeout sState pos cont
contTrace <- isValidAndFailsWhen oa hasErr rest timeout timCont
newPreviousMatch sState (pos + 1)
return (ite (newCond .&& sNot clashResult .&& ensureBounds concVal bnds)
newTrace
contTrace)
isValidAndFailsWhen oa hasErr (Case (Notify obs) cont:rest)
timeout timCont previousMatch sState pos =
do (newLowSlot, newHighSlot, sStateWithInput) <- addFreshSlotsToState sState
let obsRes = symEvalObs obs sStateWithInput
let symInput = SymNotify
let clashResult = previousMatch symInput sStateWithInput
let newPreviousMatch otherSymInput pmSymState =
let pmObsRes = symEvalObs obs pmSymState in
case otherSymInput of
SymNotify -> pmObsRes .|| previousMatch otherSymInput pmSymState
_ -> previousMatch otherSymInput pmSymState
(newCond, newTrace)
<- applyInputConditions oa newLowSlot newHighSlot
hasErr (Just symInput) timeout sState pos cont
contTrace <- isValidAndFailsWhen oa hasErr rest timeout timCont
newPreviousMatch sState (pos + 1)
return (ite (newCond .&& obsRes .&& sNot clashResult) newTrace contTrace)
isValidAndFailsWhen oa hasErr (MerkleizedCase (Deposit accId party token val) _:rest)
timeout timCont previousMatch sState pos =
let newPreviousMatch otherSymInput pmSymState =
let pmConcVal = symEvalVal val pmSymState in
case otherSymInput of
SymDeposit otherAccId otherParty otherToken otherConcVal ->
if (otherAccId == accId) && (otherParty == party)
&& (otherToken == token)
then (otherConcVal .== pmConcVal) .|| previousMatch otherSymInput pmSymState
else previousMatch otherSymInput pmSymState
_ -> previousMatch otherSymInput pmSymState in
isValidAndFailsWhen oa hasErr rest timeout timCont
newPreviousMatch sState (pos + 1)
isValidAndFailsWhen oa hasErr (MerkleizedCase (Choice choId bnds) _:rest)
timeout timCont previousMatch sState pos =
let newPreviousMatch otherSymInput pmSymState =
case otherSymInput of
SymChoice otherChoId otherConcVal ->
if otherChoId == choId
then ensureBounds otherConcVal bnds .|| previousMatch otherSymInput pmSymState
else previousMatch otherSymInput pmSymState
_ -> previousMatch otherSymInput pmSymState in
isValidAndFailsWhen oa hasErr rest timeout timCont newPreviousMatch sState (pos + 1)
isValidAndFailsWhen oa hasErr (MerkleizedCase (Notify obs) _:rest)
timeout timCont previousMatch sState pos =
let newPreviousMatch otherSymInput pmSymState =
let pmObsRes = symEvalObs obs pmSymState in
case otherSymInput of
SymNotify -> pmObsRes .|| previousMatch otherSymInput pmSymState
_ -> previousMatch otherSymInput pmSymState in
isValidAndFailsWhen oa hasErr rest timeout timCont
newPreviousMatch sState (pos + 1)
has been proven in TransactionBound.thy
countWhens :: Contract -> Integer
countWhens Close = 0
countWhens (Pay uv uw ux uy c) = countWhens c
countWhens (If uz c c2) = max (countWhens c) (countWhens c2)
countWhens (When cl t c) = 1 + max (countWhensCaseList cl) (countWhens c)
countWhens (Let va vb c) = countWhens c
countWhens (Assert o c) = countWhens c
Same as countWhens but it starts with a Case list
countWhensCaseList :: [Case Contract] -> Integer
countWhensCaseList (Case uu c : tail) = max (countWhens c) (countWhensCaseList tail)
countWhensCaseList (MerkleizedCase uu c : tail) = countWhensCaseList tail
countWhensCaseList [] = 0
Main wrapper of the static analysis takes a that indicates whether only
assertions should be checked , a Contract , a paramTrace , and an optional
State . paramTrace is actually an output parameter . We do not put it in the result of
the whole process slower . It is meant to be used just with SBV , with a symbolic
wrapper :: Bool -> Contract -> [(SInteger, SInteger, SInteger, SInteger)] -> Maybe State
-> Symbolic SBool
wrapper oa c st maybeState = do ess <- mkInitialSymState st maybeState
isValidAndFailsAux oa sFalse c ess
The list will account for the given number of transactions ( four vars per transaction ) .
generateLabels :: Integer -> [String]
generateLabels = go 1
where go :: Integer -> Integer -> [String]
go n m
| n > m = []
| otherwise = (action_label ++ "minSlot"):
(action_label ++ "maxSlot"):
(action_label ++ "value"):
(action_label ++ "branch"):
go (n + 1) m
where action_label = "action_" ++ show n ++ "_"
variables . It returns the list of symbolic variables generated ( list of 4 - uples ) .
generateParameters :: [String] -> Symbolic [(SInteger, SInteger, SInteger, SInteger)]
generateParameters (sl:sh:v:b:t) =
do isl <- sInteger sl
ish <- sInteger sh
iv <- sInteger v
ib <- sInteger b
rest <- generateParameters t
return ((isl, ish, iv, ib):rest)
generateParameters [] = return []
generateParameters _ = error "Wrong number of labels generated"
names to concrete values , and reconstructs a concrete list of 4 - uples of the ordered
groupResult :: [String] -> Map String Integer -> [(Integer, Integer, Integer, Integer)]
groupResult (sl:sh:v:b:t) mappings =
if ib == -1 then groupResult t mappings
else (isl, ish, iv, ib):groupResult t mappings
where (Just isl) = M.lookup sl mappings
(Just ish) = M.lookup sh mappings
(Just iv) = M.lookup v mappings
(Just ib) = M.lookup b mappings
groupResult [] _ = []
groupResult _ _ = error "Wrong number of labels generated"
Reconstructs an input from a Case list a Case position and a value ( deposit amount or
caseToInput :: [Case a] -> Integer -> Integer -> Input
caseToInput [] _ _ = error "Wrong number of cases interpreting result"
caseToInput (Case h _:t) c v
| c > 1 = caseToInput t (c - 1) v
| c == 1 = NormalInput $ case h of
Deposit accId party tok _ -> IDeposit accId party tok v
Choice choId _ -> IChoice choId v
Notify _ -> INotify
| otherwise = error "Negative case number"
caseToInput (MerkleizedCase _ _:t) c v
| c > 1 = caseToInput t (c - 1) v
| c == 1 = error "Finding this counter example would have required finding a hash preimage"
| otherwise = error "Negative case number"
input list for convenience . The list of 4 - uples is passed through because it is used
computeAndContinue :: ([Input] -> TransactionInput) -> [Input] -> State -> Contract
-> [(Integer, Integer, Integer, Integer)]
-> [([TransactionInput], [TransactionWarning])]
computeAndContinue transaction inps sta cont t =
case computeTransaction (transaction inps) sta cont of
Error TEUselessTransaction -> executeAndInterpret sta t cont
TransactionOutput { txOutWarnings = war
, txOutState = newSta
, txOutContract = newCont}
-> ([transaction inps], war)
:executeAndInterpret newSta t newCont
Takes a list of 4 - uples ( and state and contract ) and interprets it as a list of
executeAndInterpret :: State -> [(Integer, Integer, Integer, Integer)] -> Contract
-> [([TransactionInput], [TransactionWarning])]
executeAndInterpret _ [] _ = []
executeAndInterpret sta ((l, h, v, b):t) cont
| b == 0 = computeAndContinue transaction [] sta cont t
| otherwise =
case reduceContractUntilQuiescent env sta cont of
ContractQuiescent _ _ _ _ tempCont ->
case tempCont of
When cases _ _ -> computeAndContinue transaction
[caseToInput cases b v] sta cont t
_ -> error "Cannot interpret result"
_ -> error "Error reducing contract when interpreting result"
where myTimeInterval = (POSIXTime l, POSIXTime h)
env = Environment { timeInterval = myTimeInterval }
transaction inputs = TransactionInput { txInterval = myTimeInterval
, txInputs = inputs
}
It wraps executeAndInterpret so that it takes an optional State , and also
combines the results of executeAndInterpret in one single tuple .
interpretResult :: [(Integer, Integer, Integer, Integer)] -> Contract -> Maybe State
-> (POSIXTime, [TransactionInput], [TransactionWarning])
interpretResult [] _ _ = error "Empty result"
interpretResult t@((l, _, _, _):_) c maybeState = (POSIXTime l, tin, twa)
where (tin, twa) = foldl' (\(accInp, accWarn) (elemInp, elemWarn) ->
(accInp ++ elemInp, accWarn ++ elemWarn)) ([], []) $
executeAndInterpret initialState t c
initialState = case maybeState of
Nothing -> emptyState (POSIXTime l)
Just x -> x
It interprets the counter example found by SBV ( SMTModel ) , given the contract ,
extractCounterExample :: SMTModel -> Contract -> Maybe State -> [String]
-> (POSIXTime, [TransactionInput], [TransactionWarning])
extractCounterExample smtModel cont maybeState maps = interpretedResult
where assocs = map (\(a, b) -> (a, fromCV b :: Integer)) $ modelAssocs smtModel
counterExample = groupResult maps (M.fromList assocs)
interpretedResult = interpretResult (reverse counterExample) cont maybeState
It generates variables , runs SBV , and it interprets the result in terms .
warningsTraceCustom :: Bool
-> Contract
-> Maybe State
-> IO (Either ThmResult
(Maybe (POSIXTime, [TransactionInput], [TransactionWarning])))
warningsTraceCustom onlyAssertions con maybeState =
do thmRes@(ThmResult result) <- satCommand
return (case result of
Unsatisfiable _ _ -> Right Nothing
Satisfiable _ smtModel ->
Right (Just (extractCounterExample smtModel con maybeState params))
_ -> Left thmRes)
where maxActs = 1 + countWhens con
params = generateLabels maxActs
property = do v <- generateParameters params
r <- wrapper onlyAssertions con v maybeState
return (sNot r)
satCommand = proveWith z3 property
Like warningsTraceCustom but checks all warnings ( including assertions )
warningsTraceWithState :: Contract
-> Maybe State
-> IO (Either ThmResult
(Maybe (POSIXTime, [TransactionInput], [TransactionWarning])))
warningsTraceWithState = warningsTraceCustom False
Like warningsTraceCustom but only checks assertions .
onlyAssertionsWithState :: Contract
-> Maybe State
-> IO (Either ThmResult
(Maybe (POSIXTime, [TransactionInput], [TransactionWarning])))
onlyAssertionsWithState = warningsTraceCustom True
Like warningsTraceWithState but without initialState .
warningsTrace :: Contract
-> IO (Either ThmResult
(Maybe (POSIXTime, [TransactionInput], [TransactionWarning])))
warningsTrace con = warningsTraceWithState con Nothing
|
8ed841788edab29cafb1a838131465448369463c16e33efc343f8d5c1fa44963 | yapsterapp/er-cassandra | statement_test.clj | (ns er-cassandra.record.statement-test
(:require
[er-cassandra.record.statement :as sut]
[clojure.test :as t]
[er-cassandra.util.test :as tu]
[schema.test :as st]
[clojure.test :refer [deftest is testing use-fixtures]]
[clj-uuid :as uuid])
(:import
[clojure.lang ExceptionInfo]))
(use-fixtures :once st/validate-schemas)
(use-fixtures :each (tu/with-session-fixture))
(deftest select-statement-test
(testing "partition selects"
(testing "simplest select"
(is (=
{:select :foos :columns :* :where [[:= :id "foo"]]}
(sut/select-statement
:foos
:id
"foo"
{}))))
(testing "compound key"
(is (=
{:select :foos :columns :* :where [[:= :foo "foo"] [:= :bar "bar"]]}
(sut/select-statement
:foos
[[:foo] :bar]
["foo" "bar"]
{}))))
(testing "with columns"
(is (=
{:select :foos :columns [:id] :where [[:= :id "foo"]]}
(sut/select-statement
:foos
:id
"foo"
{:columns [:id]}))))
(testing "with extra where"
(is (=
{:select :foos :columns :* :where [[:= :id "foo"] [:= :bar "bar"]]}
(sut/select-statement
:foos
:id
"foo"
{:where [[:= :bar "bar"]]})))
(is (=
{:select :foos :columns :* :where [[:= :id "foo"] [:= :bar "bar"] [:= :baz "baz"]]}
(sut/select-statement
:foos
:id
"foo"
{:where [[:= :bar "bar"]
[:= :baz "baz"]]}))))
(testing "with order-by"
(is (=
{:select :foos :columns :* :where [[:= :id "foo"]] :order-by [[:foo :asc]]}
(sut/select-statement
:foos
:id
"foo"
{:order-by [[:foo :asc]]})))
(is (=
{:select :foos :columns :* :where [[:= :id "foo"]] :order-by [[:foo :asc] [:bar :desc]]}
(sut/select-statement
:foos
:id
"foo"
{:order-by [[:foo :asc] [:bar :desc]]}))))
(testing "limit"
(is (=
{:select :foos :columns :* :where [[:= :id "foo"]] :limit 5000}
(sut/select-statement
:foos
:id
"foo"
{:limit 5000}))))
(testing "does not permit filtering"
(is (thrown-with-msg?
ExceptionInfo #"does not match schema"
(sut/select-statement
:foos
:id
"foo"
{:allow-filtering true}))))
(testing "throws with unknown opt"
(is (thrown-with-msg?
ExceptionInfo #"does not match schema"
(sut/select-statement
:foos
:id
"foo"
{:blah true})))))
(testing "table-scan selects"
(testing "simple table scan"
(is (= {:select :foos, :columns :*}
(sut/select-statement :foos {}))))
(testing "with extra where"
(is (= {:select :foos, :columns :* :where [[:= :bar "bar"] [:= :baz "baz"]]}
(sut/select-statement
:foos
{:where [[:= :bar "bar"] [:= :baz "baz"]]}))))
(testing "with limit"
(is (= {:select :foos, :columns :* :limit 5000}
(sut/select-statement :foos {:limit 5000}))))
(testing "does permit filtering"
(is (= {:select :foos
:columns :*
:where [[:= :bar "bar"] [:= :baz "baz"]]
:allow-filtering true}
(sut/select-statement
:foos
{:where [[:= :bar "bar"] [:= :baz "baz"]]
:allow-filtering true}))))
(testing "throws with unknown opt"
(is (thrown-with-msg?
ExceptionInfo #"does not match schema"
{:select :foos :columns :*}
(sut/select-statement
:foos
{:blah true}))))))
(deftest insert-statement-test
(testing "simple insert"
(is (= {:insert :foos :values {:id "id" :foo "foo"}}
(sut/insert-statement
:foos
{:id "id"
:foo "foo"}
{}))))
(testing "collection colls"
(testing "with plain values"
(is (= {:insert :foos
:values {:id "id"
:foo [10 20]}}
(sut/insert-statement
:foos
{:id "id"
:foo [10 20]}
{})))
(is (= {:insert :foos
:values {:id "id"
:foo {"bar" 20}}}
(sut/insert-statement
:foos
{:id "id"
:foo {"bar" 20}}
{}))))
(testing "with minimal change diffs"
(is (= {:insert :foos
:values {:id "id"
:foo [10 20]}}
(sut/insert-statement
:foos
{:id "id"
:foo {:intersection []
:prepended [10]
:appended [20]
:removed [30 40]}}
{})))
(is (= {:insert :foos
:values {:id "id"
:foo [10 20]}}
(sut/insert-statement
:foos
{:id "id"
:foo {:intersection [10]
:appended [20]
:removed [200]}}
{})))))
(testing "with ttl"
(is (= {:insert :foos
:values {:id "id" :foo "foo"}
:using [[:ttl 5000]]}
(sut/insert-statement
:foos
{:id "id"
:foo "foo"}
{:using {:ttl 5000}}))))
(testing "with timestamp"
(is (= {:insert :foos
:values {:id "id" :foo "foo"}
:using [[:timestamp 5000]]}
(sut/insert-statement
:foos
{:id "id"
:foo "foo"}
{:using {:timestamp 5000}}))))
(testing "with if-not-exists"
(is (= {:insert :foos
:values {:id "id" :foo "foo"}
:if-exists false}
(sut/insert-statement
:foos
{:id "id"
:foo "foo"}
{:if-not-exists true}))))
(testing "unknown opts"
(is (thrown-with-msg? ExceptionInfo #"does not match schema"
(sut/insert-statement
:foos
{:id "id"
:foo "foo"}
{:blah true})))))
(deftest update-statement-test
(testing "simple update"
(is (= {:update :foos
:set-columns [[:foo "foo"]]
:where [[:= :id 100]]}
(sut/update-statement
:foos
[:id]
{:id 100
:foo "foo"}
{}))))
(testing "compound key, multiple cols"
(is (= {:update :foos
:set-columns [[:foo "foo"]
[:bar "bar"]]
:where [[:= :id 100] [:= :id2 200]]}
(sut/update-statement
:foos
[:id :id2]
{:id 100
:id2 200
:foo "foo"
:bar "bar"}
{}))))
(testing "set-columns"
(is (= {:update :foos
:set-columns [[:foo "foo"]]
:where [[:= :id 100]]}
(sut/update-statement
:foos
[:id]
{:id 100
:foo "foo"
:bar "bar"}
{:set-columns [:foo]}))))
(testing "only-if"
(is (= {:update :foos
:set-columns [[:foo "foo"]
[:bar "bar"]]
:where [[:= :id 100]]
:if [[:= :foo "foo"]]}
(sut/update-statement
:foos
[:id]
{:id 100
:foo "foo"
:bar "bar"}
{:only-if [[:= :foo "foo"]]}))))
(testing "if-exists"
(is (= {:update :foos
:set-columns [[:foo "foo"]
[:bar "bar"]]
:where [[:= :id 100]]
:if-exists true}
(sut/update-statement
:foos
[:id]
{:id 100
:foo "foo"
:bar "bar"}
{:if-exists true}))))
(testing "if-not-exists"
(is (= {:update :foos
:set-columns [[:foo "foo"]
[:bar "bar"]]
:where [[:= :id 100]]
:if-exists false}
(sut/update-statement
:foos
[:id]
{:id 100
:foo "foo"
:bar "bar"}
{:if-not-exists true}))))
(testing "using ttl"
(is (= {:update :foos
:set-columns [[:foo "foo"]]
:where [[:= :id 100]]
:using [[:ttl 5000]]}
(sut/update-statement
:foos
[:id]
{:id 100
:foo "foo"}
{:using {:ttl 5000}}))))
(testing "using timestamp"
(is (= {:update :foos
:set-columns [[:foo "foo"]]
:where [[:= :id 100]]
:using [[:timestamp 5000]]}
(sut/update-statement
:foos
[:id]
{:id 100
:foo "foo"}
{:using {:timestamp 5000}}))))
(testing "unknown opts"
(is (thrown-with-msg? ExceptionInfo #"does not match schema"
(sut/update-statement
:foos
[:id]
{:id 100
:foo "foo"}
{:blah true}))))
(testing "collection coll diffs"
(is (= {:update :foos
:set-columns [[:foo [10 20]]]
:where [[:= :id 100]]}
(sut/update-statement
:foos
[:id]
{:id 100
:foo {:intersection []
:appended [10 20]
:removed [1 2]}}
{})))
(is (= {:update :foos
:set-columns [[:bar 1]
[:foo #{}]]
:where [[:= :id 100]]}
(sut/update-statement
:foos
[:id]
{:id 100
:bar 1
:foo {:intersection #{}
:removed #{2}}}
{})))
(is (= {:update :foos
:set-columns [[:bar 1]
[:foo [+ {"foo" 2}]]]
:where [[:= :id 100]]}
(sut/update-statement
:foos
[:id]
{:id 100
:bar 1
:foo {:intersection {"baz" 1}
:appended {"foo" 2}}}
{})))
(is (= {:update :foos
:set-columns [[:bar 1]
[:foo [[0] +]]]
:where [[:= :id 100]]}
(sut/update-statement
:foos
[:id]
{:id 100
:bar 1
:foo {:intersection [1 2 3]
:prepended [0]}}
{})))
(is (= {:update :foos
:set-columns [[:bar 1]
[:foo [[0] +]]
[:foo [+ [4]]]]
:where [[:= :id 100]]}
(sut/update-statement
:foos
[:id]
{:id 100
:bar 1
:foo {:intersection [1 2 3]
:prepended [0]
:appended [4]}}
{})))))
(deftest delete-statement-test
(testing "simple delete"
(is (= {:delete :foos
:columns :*
:where [[:= :id 10]]}
(sut/delete-statement
:foos
:id
10
{})))
(is (= {:delete :foos
:columns :*
:where [[:= :id 10][:= :id2 20]]}
(sut/delete-statement
:foos
[:id :id2]
[10 20]
{})))
(is (= {:delete :foos
:columns :*
:where [[:= :id 10][:= :id2 20]]}
(sut/delete-statement
:foos
[:id :id2]
{:id 10 :id2 20}
{}))))
(testing "using timestamp"
(is (= {:delete :foos
:columns :*
:where [[:= :id 10]]
:using [[:timestamp 5000]]}
(sut/delete-statement
:foos
:id
10
{:using {:timestamp 5000}}))))
(testing "only-if"
(is (= {:delete :foos
:columns :*
:where [[:= :id 10]]
:if [[:= :foo "foo"]]
}
(sut/delete-statement
:foos
:id
10
{:only-if [[:= :foo "foo"]]}))))
(testing "if-exists"
(is (= {:delete :foos
:columns :*
:where [[:= :id 10]]
:if-exists true
}
(sut/delete-statement
:foos
:id
10
{:if-exists true}))))
(testing "additional where"
(is (= {:delete :foos
:columns :*
:where [[:= :id 10][:= :foo "foo"][:= :bar "bar"]]}
(sut/delete-statement
:foos
:id
10
{:where [[:= :foo "foo"][:= :bar "bar"]]}))))
(testing "unknown opts"
(is (thrown-with-msg? ExceptionInfo #"does not match schema"
(sut/delete-statement
:foos
:id
10
{:blah true})))))
(deftest prepare-update-statement-test
(testing "simple prepared statement"
(is (= {:update :foos
:set-columns [[:foo :set_foo]]
:where [[:= :id :where_id]]}
(sut/prepare-update-statement
:foos
:id
{:id 100
:foo "foo"}
{}))))
(testing "compound key, multiple cols"
(is (= {:update :foos
:set-columns [[:foo :set_foo]
[:bar :set_bar]]
:where [[:= :id :where_id]
[:= :id2 :where_id2]]}
(sut/prepare-update-statement
:foos
[:id :id2]
{:id 100
:id2 200
:foo "foo"
:bar "bar"}
{}))))
(testing "with options"
(let [base-cql {:update :foos
:set-columns [[:foo :set_foo]
[:bar :set_bar]]
:where [[:= :id :where_id]]}
base-record {:id 100
:foo "foo"
:bar "bar"}
base-key [:id]
prepare-update-statement
(fn -prepare-update-statement
([opt]
(-prepare-update-statement base-key base-record opt))
([record opt]
(-prepare-update-statement base-key record opt))
([key record opt]
(sut/prepare-update-statement :foos key record opt)))]
(testing "set-columns"
(is (= (update base-cql :set-columns (partial remove (fn [[k _]] (= :bar k))))
(prepare-update-statement {:set-columns [:foo]}))))
(testing "only-if"
(is (= (assoc base-cql :if [[:= :foo "foo"]])
(prepare-update-statement {:only-if [[:= :foo "foo"]]}))))
(testing "if-exists"
(is (= (assoc base-cql :if-exists true)
(prepare-update-statement {:if-exists true}))))
(testing "if-not-exists"
(is (= (assoc base-cql :if-exists false)
(prepare-update-statement {:if-not-exists true}))))
(testing "using ttl"
(is (= (assoc base-cql :using [[:ttl :using_ttl]])
(prepare-update-statement {:using {:ttl 5000}}))))
(testing "using timestamp"
(is (= (assoc base-cql :using [[:timestamp :using_timestamp]])
(prepare-update-statement {:using {:timestamp 5000}}))))
(testing "unknown opts"
(is (thrown-with-msg?
ExceptionInfo
#"does not match schema"
(prepare-update-statement {:blah true}))))))
(testing "collection coll diffs"
(let [base-update {:update :foos
:where [[:= :id :where_id]]}
append-to-col-cql (merge
base-update
{:set-columns [[:foo [+ :set_append_foo]]]})
prepend-to-col-cql (merge
base-update
{:set-columns [[:foo [:set_prepend_foo +]]]})
add-to-col-cql (merge
base-update
{:set-columns [[:foo [:set_prepend_foo +]]
[:foo [+ :set_append_foo]]]})
set-col-cql (merge
base-update
{:set-columns [[:foo :set_foo]]})
base-record {:id 100}
prepare-update-statement (fn [record]
(sut/prepare-update-statement
:foos
[:id]
(merge base-record record)
{}))]
(is (= append-to-col-cql
(prepare-update-statement
{:foo {:intersection {}
:appended {:a 20}}})))
(is (= prepend-to-col-cql
(prepare-update-statement
{:foo {:intersection [1 2 3]
:prepended [0]}})))
(is (= add-to-col-cql
(prepare-update-statement
{:foo {:intersection [1 2 3]
:prepended [0]
:appended [4]}})))
(is (= set-col-cql
(prepare-update-statement
{:foo {:intersection #{}
:removed #{10}}})))
(is (= set-col-cql
(prepare-update-statement
{:foo {:intersection [0]
:appended [10 20]
:removed [1 2]}}))))))
(deftest prepare-update-values-test
(testing "simple record"
(is (= {:set_foo "foo"
:set_bar "bar"
:where_id 100}
(sut/prepare-update-values
:foos
:id
{:id 100
:foo "foo"
:bar "bar"}
{}))))
(testing "with options"
(let [base-values {:set_foo "foo"
:set_bar "bar"
:where_id 100}
base-record {:id 100
:foo "foo"
:bar "bar"}
prepare-update-values
(fn [opts]
(sut/prepare-update-values
:foos
:id
base-record
opts))]
(testing "set-columns"
(is (= (dissoc base-values :set_bar)
(prepare-update-values
{:set-columns [:foo]}))))
(testing "using ttl"
(is (= (assoc base-values :using_ttl 500)
(prepare-update-values
{:using {:ttl 500}}))))
(testing "using timestamp"
(is (= (assoc base-values :using_timestamp 5000)
(prepare-update-values
{:using {:timestamp 5000}}))))))
(testing "collection values"
(let [base-record {:id 100}
base-values {:where_id 100}
prepare-update-values (fn [record]
(sut/prepare-update-values
:foos
[:id]
(merge base-record record)
{}))]
(is (= (assoc base-values :set_append_foo {:a 20})
(prepare-update-values
{:foo {:intersection {}
:appended {:a 20}}})))
(is (= (assoc base-values :set_prepend_foo [0])
(prepare-update-values
{:foo {:intersection [1 2 3]
:prepended [0]}})))
(is (= (assoc
base-values
:set_prepend_foo [0]
:set_append_foo [4])
(prepare-update-values
{:foo {:intersection [1 2 3]
:prepended [0]
:appended [4]}})))
(is (= (assoc base-values :set_foo #{})
(prepare-update-values
{:foo {:intersection #{}
:removed #{10}}})))
(is (= (assoc base-values :set_foo [0 10 20])
(prepare-update-values
{:foo {:intersection [0]
:appended [10 20]
:removed [1 2]}}))))))
(deftest prepare-record-values-test
(testing "simple record"
(is (= {:set_id 100
:set_foo "foo"
:set_bar "bar"}
(sut/prepare-record-values
:set
{:id 100
:foo "foo"
:bar "bar"}))))
(testing "collection values"
(testing "using plain values"
(is (= {:set_id 100
:set_foo [10 20]}
(sut/prepare-record-values
:set
{:id 100
:foo [10 20]}))))
(testing "using collection coll diffs"
(is (= {:set_id 100
:set_foo [10 20]}
(sut/prepare-record-values
:set
{:id 100
:foo {:intersection []
:appended [10 20]
:removed [30 40]}})))
(is (= {:set_id 100
:set_foo [10 20]}
(sut/prepare-record-values
:set
{:id 100
:foo {:intersection [10]
:appended [20]
:removed [200]}}))))))
| null | https://raw.githubusercontent.com/yapsterapp/er-cassandra/1d059f47bdf8654c7a4dd6f0759f1a114fdeba81/test/er_cassandra/record/statement_test.clj | clojure | (ns er-cassandra.record.statement-test
(:require
[er-cassandra.record.statement :as sut]
[clojure.test :as t]
[er-cassandra.util.test :as tu]
[schema.test :as st]
[clojure.test :refer [deftest is testing use-fixtures]]
[clj-uuid :as uuid])
(:import
[clojure.lang ExceptionInfo]))
(use-fixtures :once st/validate-schemas)
(use-fixtures :each (tu/with-session-fixture))
(deftest select-statement-test
(testing "partition selects"
(testing "simplest select"
(is (=
{:select :foos :columns :* :where [[:= :id "foo"]]}
(sut/select-statement
:foos
:id
"foo"
{}))))
(testing "compound key"
(is (=
{:select :foos :columns :* :where [[:= :foo "foo"] [:= :bar "bar"]]}
(sut/select-statement
:foos
[[:foo] :bar]
["foo" "bar"]
{}))))
(testing "with columns"
(is (=
{:select :foos :columns [:id] :where [[:= :id "foo"]]}
(sut/select-statement
:foos
:id
"foo"
{:columns [:id]}))))
(testing "with extra where"
(is (=
{:select :foos :columns :* :where [[:= :id "foo"] [:= :bar "bar"]]}
(sut/select-statement
:foos
:id
"foo"
{:where [[:= :bar "bar"]]})))
(is (=
{:select :foos :columns :* :where [[:= :id "foo"] [:= :bar "bar"] [:= :baz "baz"]]}
(sut/select-statement
:foos
:id
"foo"
{:where [[:= :bar "bar"]
[:= :baz "baz"]]}))))
(testing "with order-by"
(is (=
{:select :foos :columns :* :where [[:= :id "foo"]] :order-by [[:foo :asc]]}
(sut/select-statement
:foos
:id
"foo"
{:order-by [[:foo :asc]]})))
(is (=
{:select :foos :columns :* :where [[:= :id "foo"]] :order-by [[:foo :asc] [:bar :desc]]}
(sut/select-statement
:foos
:id
"foo"
{:order-by [[:foo :asc] [:bar :desc]]}))))
(testing "limit"
(is (=
{:select :foos :columns :* :where [[:= :id "foo"]] :limit 5000}
(sut/select-statement
:foos
:id
"foo"
{:limit 5000}))))
(testing "does not permit filtering"
(is (thrown-with-msg?
ExceptionInfo #"does not match schema"
(sut/select-statement
:foos
:id
"foo"
{:allow-filtering true}))))
(testing "throws with unknown opt"
(is (thrown-with-msg?
ExceptionInfo #"does not match schema"
(sut/select-statement
:foos
:id
"foo"
{:blah true})))))
(testing "table-scan selects"
(testing "simple table scan"
(is (= {:select :foos, :columns :*}
(sut/select-statement :foos {}))))
(testing "with extra where"
(is (= {:select :foos, :columns :* :where [[:= :bar "bar"] [:= :baz "baz"]]}
(sut/select-statement
:foos
{:where [[:= :bar "bar"] [:= :baz "baz"]]}))))
(testing "with limit"
(is (= {:select :foos, :columns :* :limit 5000}
(sut/select-statement :foos {:limit 5000}))))
(testing "does permit filtering"
(is (= {:select :foos
:columns :*
:where [[:= :bar "bar"] [:= :baz "baz"]]
:allow-filtering true}
(sut/select-statement
:foos
{:where [[:= :bar "bar"] [:= :baz "baz"]]
:allow-filtering true}))))
(testing "throws with unknown opt"
(is (thrown-with-msg?
ExceptionInfo #"does not match schema"
{:select :foos :columns :*}
(sut/select-statement
:foos
{:blah true}))))))
(deftest insert-statement-test
(testing "simple insert"
(is (= {:insert :foos :values {:id "id" :foo "foo"}}
(sut/insert-statement
:foos
{:id "id"
:foo "foo"}
{}))))
(testing "collection colls"
(testing "with plain values"
(is (= {:insert :foos
:values {:id "id"
:foo [10 20]}}
(sut/insert-statement
:foos
{:id "id"
:foo [10 20]}
{})))
(is (= {:insert :foos
:values {:id "id"
:foo {"bar" 20}}}
(sut/insert-statement
:foos
{:id "id"
:foo {"bar" 20}}
{}))))
(testing "with minimal change diffs"
(is (= {:insert :foos
:values {:id "id"
:foo [10 20]}}
(sut/insert-statement
:foos
{:id "id"
:foo {:intersection []
:prepended [10]
:appended [20]
:removed [30 40]}}
{})))
(is (= {:insert :foos
:values {:id "id"
:foo [10 20]}}
(sut/insert-statement
:foos
{:id "id"
:foo {:intersection [10]
:appended [20]
:removed [200]}}
{})))))
(testing "with ttl"
(is (= {:insert :foos
:values {:id "id" :foo "foo"}
:using [[:ttl 5000]]}
(sut/insert-statement
:foos
{:id "id"
:foo "foo"}
{:using {:ttl 5000}}))))
(testing "with timestamp"
(is (= {:insert :foos
:values {:id "id" :foo "foo"}
:using [[:timestamp 5000]]}
(sut/insert-statement
:foos
{:id "id"
:foo "foo"}
{:using {:timestamp 5000}}))))
(testing "with if-not-exists"
(is (= {:insert :foos
:values {:id "id" :foo "foo"}
:if-exists false}
(sut/insert-statement
:foos
{:id "id"
:foo "foo"}
{:if-not-exists true}))))
(testing "unknown opts"
(is (thrown-with-msg? ExceptionInfo #"does not match schema"
(sut/insert-statement
:foos
{:id "id"
:foo "foo"}
{:blah true})))))
(deftest update-statement-test
(testing "simple update"
(is (= {:update :foos
:set-columns [[:foo "foo"]]
:where [[:= :id 100]]}
(sut/update-statement
:foos
[:id]
{:id 100
:foo "foo"}
{}))))
(testing "compound key, multiple cols"
(is (= {:update :foos
:set-columns [[:foo "foo"]
[:bar "bar"]]
:where [[:= :id 100] [:= :id2 200]]}
(sut/update-statement
:foos
[:id :id2]
{:id 100
:id2 200
:foo "foo"
:bar "bar"}
{}))))
(testing "set-columns"
(is (= {:update :foos
:set-columns [[:foo "foo"]]
:where [[:= :id 100]]}
(sut/update-statement
:foos
[:id]
{:id 100
:foo "foo"
:bar "bar"}
{:set-columns [:foo]}))))
(testing "only-if"
(is (= {:update :foos
:set-columns [[:foo "foo"]
[:bar "bar"]]
:where [[:= :id 100]]
:if [[:= :foo "foo"]]}
(sut/update-statement
:foos
[:id]
{:id 100
:foo "foo"
:bar "bar"}
{:only-if [[:= :foo "foo"]]}))))
(testing "if-exists"
(is (= {:update :foos
:set-columns [[:foo "foo"]
[:bar "bar"]]
:where [[:= :id 100]]
:if-exists true}
(sut/update-statement
:foos
[:id]
{:id 100
:foo "foo"
:bar "bar"}
{:if-exists true}))))
(testing "if-not-exists"
(is (= {:update :foos
:set-columns [[:foo "foo"]
[:bar "bar"]]
:where [[:= :id 100]]
:if-exists false}
(sut/update-statement
:foos
[:id]
{:id 100
:foo "foo"
:bar "bar"}
{:if-not-exists true}))))
(testing "using ttl"
(is (= {:update :foos
:set-columns [[:foo "foo"]]
:where [[:= :id 100]]
:using [[:ttl 5000]]}
(sut/update-statement
:foos
[:id]
{:id 100
:foo "foo"}
{:using {:ttl 5000}}))))
(testing "using timestamp"
(is (= {:update :foos
:set-columns [[:foo "foo"]]
:where [[:= :id 100]]
:using [[:timestamp 5000]]}
(sut/update-statement
:foos
[:id]
{:id 100
:foo "foo"}
{:using {:timestamp 5000}}))))
(testing "unknown opts"
(is (thrown-with-msg? ExceptionInfo #"does not match schema"
(sut/update-statement
:foos
[:id]
{:id 100
:foo "foo"}
{:blah true}))))
(testing "collection coll diffs"
(is (= {:update :foos
:set-columns [[:foo [10 20]]]
:where [[:= :id 100]]}
(sut/update-statement
:foos
[:id]
{:id 100
:foo {:intersection []
:appended [10 20]
:removed [1 2]}}
{})))
(is (= {:update :foos
:set-columns [[:bar 1]
[:foo #{}]]
:where [[:= :id 100]]}
(sut/update-statement
:foos
[:id]
{:id 100
:bar 1
:foo {:intersection #{}
:removed #{2}}}
{})))
(is (= {:update :foos
:set-columns [[:bar 1]
[:foo [+ {"foo" 2}]]]
:where [[:= :id 100]]}
(sut/update-statement
:foos
[:id]
{:id 100
:bar 1
:foo {:intersection {"baz" 1}
:appended {"foo" 2}}}
{})))
(is (= {:update :foos
:set-columns [[:bar 1]
[:foo [[0] +]]]
:where [[:= :id 100]]}
(sut/update-statement
:foos
[:id]
{:id 100
:bar 1
:foo {:intersection [1 2 3]
:prepended [0]}}
{})))
(is (= {:update :foos
:set-columns [[:bar 1]
[:foo [[0] +]]
[:foo [+ [4]]]]
:where [[:= :id 100]]}
(sut/update-statement
:foos
[:id]
{:id 100
:bar 1
:foo {:intersection [1 2 3]
:prepended [0]
:appended [4]}}
{})))))
(deftest delete-statement-test
(testing "simple delete"
(is (= {:delete :foos
:columns :*
:where [[:= :id 10]]}
(sut/delete-statement
:foos
:id
10
{})))
(is (= {:delete :foos
:columns :*
:where [[:= :id 10][:= :id2 20]]}
(sut/delete-statement
:foos
[:id :id2]
[10 20]
{})))
(is (= {:delete :foos
:columns :*
:where [[:= :id 10][:= :id2 20]]}
(sut/delete-statement
:foos
[:id :id2]
{:id 10 :id2 20}
{}))))
(testing "using timestamp"
(is (= {:delete :foos
:columns :*
:where [[:= :id 10]]
:using [[:timestamp 5000]]}
(sut/delete-statement
:foos
:id
10
{:using {:timestamp 5000}}))))
(testing "only-if"
(is (= {:delete :foos
:columns :*
:where [[:= :id 10]]
:if [[:= :foo "foo"]]
}
(sut/delete-statement
:foos
:id
10
{:only-if [[:= :foo "foo"]]}))))
(testing "if-exists"
(is (= {:delete :foos
:columns :*
:where [[:= :id 10]]
:if-exists true
}
(sut/delete-statement
:foos
:id
10
{:if-exists true}))))
(testing "additional where"
(is (= {:delete :foos
:columns :*
:where [[:= :id 10][:= :foo "foo"][:= :bar "bar"]]}
(sut/delete-statement
:foos
:id
10
{:where [[:= :foo "foo"][:= :bar "bar"]]}))))
(testing "unknown opts"
(is (thrown-with-msg? ExceptionInfo #"does not match schema"
(sut/delete-statement
:foos
:id
10
{:blah true})))))
(deftest prepare-update-statement-test
(testing "simple prepared statement"
(is (= {:update :foos
:set-columns [[:foo :set_foo]]
:where [[:= :id :where_id]]}
(sut/prepare-update-statement
:foos
:id
{:id 100
:foo "foo"}
{}))))
(testing "compound key, multiple cols"
(is (= {:update :foos
:set-columns [[:foo :set_foo]
[:bar :set_bar]]
:where [[:= :id :where_id]
[:= :id2 :where_id2]]}
(sut/prepare-update-statement
:foos
[:id :id2]
{:id 100
:id2 200
:foo "foo"
:bar "bar"}
{}))))
(testing "with options"
(let [base-cql {:update :foos
:set-columns [[:foo :set_foo]
[:bar :set_bar]]
:where [[:= :id :where_id]]}
base-record {:id 100
:foo "foo"
:bar "bar"}
base-key [:id]
prepare-update-statement
(fn -prepare-update-statement
([opt]
(-prepare-update-statement base-key base-record opt))
([record opt]
(-prepare-update-statement base-key record opt))
([key record opt]
(sut/prepare-update-statement :foos key record opt)))]
(testing "set-columns"
(is (= (update base-cql :set-columns (partial remove (fn [[k _]] (= :bar k))))
(prepare-update-statement {:set-columns [:foo]}))))
(testing "only-if"
(is (= (assoc base-cql :if [[:= :foo "foo"]])
(prepare-update-statement {:only-if [[:= :foo "foo"]]}))))
(testing "if-exists"
(is (= (assoc base-cql :if-exists true)
(prepare-update-statement {:if-exists true}))))
(testing "if-not-exists"
(is (= (assoc base-cql :if-exists false)
(prepare-update-statement {:if-not-exists true}))))
(testing "using ttl"
(is (= (assoc base-cql :using [[:ttl :using_ttl]])
(prepare-update-statement {:using {:ttl 5000}}))))
(testing "using timestamp"
(is (= (assoc base-cql :using [[:timestamp :using_timestamp]])
(prepare-update-statement {:using {:timestamp 5000}}))))
(testing "unknown opts"
(is (thrown-with-msg?
ExceptionInfo
#"does not match schema"
(prepare-update-statement {:blah true}))))))
(testing "collection coll diffs"
(let [base-update {:update :foos
:where [[:= :id :where_id]]}
append-to-col-cql (merge
base-update
{:set-columns [[:foo [+ :set_append_foo]]]})
prepend-to-col-cql (merge
base-update
{:set-columns [[:foo [:set_prepend_foo +]]]})
add-to-col-cql (merge
base-update
{:set-columns [[:foo [:set_prepend_foo +]]
[:foo [+ :set_append_foo]]]})
set-col-cql (merge
base-update
{:set-columns [[:foo :set_foo]]})
base-record {:id 100}
prepare-update-statement (fn [record]
(sut/prepare-update-statement
:foos
[:id]
(merge base-record record)
{}))]
(is (= append-to-col-cql
(prepare-update-statement
{:foo {:intersection {}
:appended {:a 20}}})))
(is (= prepend-to-col-cql
(prepare-update-statement
{:foo {:intersection [1 2 3]
:prepended [0]}})))
(is (= add-to-col-cql
(prepare-update-statement
{:foo {:intersection [1 2 3]
:prepended [0]
:appended [4]}})))
(is (= set-col-cql
(prepare-update-statement
{:foo {:intersection #{}
:removed #{10}}})))
(is (= set-col-cql
(prepare-update-statement
{:foo {:intersection [0]
:appended [10 20]
:removed [1 2]}}))))))
(deftest prepare-update-values-test
(testing "simple record"
(is (= {:set_foo "foo"
:set_bar "bar"
:where_id 100}
(sut/prepare-update-values
:foos
:id
{:id 100
:foo "foo"
:bar "bar"}
{}))))
(testing "with options"
(let [base-values {:set_foo "foo"
:set_bar "bar"
:where_id 100}
base-record {:id 100
:foo "foo"
:bar "bar"}
prepare-update-values
(fn [opts]
(sut/prepare-update-values
:foos
:id
base-record
opts))]
(testing "set-columns"
(is (= (dissoc base-values :set_bar)
(prepare-update-values
{:set-columns [:foo]}))))
(testing "using ttl"
(is (= (assoc base-values :using_ttl 500)
(prepare-update-values
{:using {:ttl 500}}))))
(testing "using timestamp"
(is (= (assoc base-values :using_timestamp 5000)
(prepare-update-values
{:using {:timestamp 5000}}))))))
(testing "collection values"
(let [base-record {:id 100}
base-values {:where_id 100}
prepare-update-values (fn [record]
(sut/prepare-update-values
:foos
[:id]
(merge base-record record)
{}))]
(is (= (assoc base-values :set_append_foo {:a 20})
(prepare-update-values
{:foo {:intersection {}
:appended {:a 20}}})))
(is (= (assoc base-values :set_prepend_foo [0])
(prepare-update-values
{:foo {:intersection [1 2 3]
:prepended [0]}})))
(is (= (assoc
base-values
:set_prepend_foo [0]
:set_append_foo [4])
(prepare-update-values
{:foo {:intersection [1 2 3]
:prepended [0]
:appended [4]}})))
(is (= (assoc base-values :set_foo #{})
(prepare-update-values
{:foo {:intersection #{}
:removed #{10}}})))
(is (= (assoc base-values :set_foo [0 10 20])
(prepare-update-values
{:foo {:intersection [0]
:appended [10 20]
:removed [1 2]}}))))))
(deftest prepare-record-values-test
(testing "simple record"
(is (= {:set_id 100
:set_foo "foo"
:set_bar "bar"}
(sut/prepare-record-values
:set
{:id 100
:foo "foo"
:bar "bar"}))))
(testing "collection values"
(testing "using plain values"
(is (= {:set_id 100
:set_foo [10 20]}
(sut/prepare-record-values
:set
{:id 100
:foo [10 20]}))))
(testing "using collection coll diffs"
(is (= {:set_id 100
:set_foo [10 20]}
(sut/prepare-record-values
:set
{:id 100
:foo {:intersection []
:appended [10 20]
:removed [30 40]}})))
(is (= {:set_id 100
:set_foo [10 20]}
(sut/prepare-record-values
:set
{:id 100
:foo {:intersection [10]
:appended [20]
:removed [200]}}))))))
| |
8cee795fc7a477bb7ef57d566d7db4ec95fe959813901a7e3a74ca68c8a97749 | raviksharma/bartosz-basics-of-haskell | ex3.hs | the sentence parser from ( Ex 2 ) to take a pluggable parser . The new function is called several and takes as an argument a generic function String->(a , String ) , which is supposed to parse a string and return the result of type a together with the leftover string . Use it to split a string into a list of numbers .
import Data.Char
type Parser a = String -> (a, String)
several :: Parser a -> String -> [a]
several p "" = []
several p str = let (a, str') = p str
as = several p str'
in a:as
num :: Parser Int
num str =
let (digs, str') = span isDigit str
(_, str'') = span isSpace str'
in (read digs, str'')
word :: Parser String
word str = let (w, str') = span (not . isSpace) str
(_, str'') = span isSpace str'
in (w, str'')
main = do
print $ several num "12 4 128"
print $ several word "Ceci n'est pas une phrase"
| null | https://raw.githubusercontent.com/raviksharma/bartosz-basics-of-haskell/86d40d831f61415ef0022bff7fe7060ae6a23701/08-parser/ex3.hs | haskell | the sentence parser from ( Ex 2 ) to take a pluggable parser . The new function is called several and takes as an argument a generic function String->(a , String ) , which is supposed to parse a string and return the result of type a together with the leftover string . Use it to split a string into a list of numbers .
import Data.Char
type Parser a = String -> (a, String)
several :: Parser a -> String -> [a]
several p "" = []
several p str = let (a, str') = p str
as = several p str'
in a:as
num :: Parser Int
num str =
let (digs, str') = span isDigit str
(_, str'') = span isSpace str'
in (read digs, str'')
word :: Parser String
word str = let (w, str') = span (not . isSpace) str
(_, str'') = span isSpace str'
in (w, str'')
main = do
print $ several num "12 4 128"
print $ several word "Ceci n'est pas une phrase"
| |
153ecb616301ff5755574914939563df2c09e3af5e9a1fd49f448aaa5ec37887 | herd/herdtools7 | mapply.ml | (****************************************************************************)
(* the diy toolsuite *)
(* *)
, University College London , UK .
, INRIA Paris - Rocquencourt , France .
(* *)
Copyright 2015 - present Institut National de Recherche en Informatique et
(* en Automatique and the authors. All rights reserved. *)
(* *)
This software is governed by the CeCILL - B license under French law and
(* abiding by the rules of distribution of free software. You can use, *)
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
(****************************************************************************)
open Printf
type mode = Buff | File
Task engine
module type TArg = sig
val com : string
val comargs : string list
val verbose : int
val mode : mode
end
module Task(A:TArg) = struct
let stdout_chan = stdout
open Unix
module W = Warn.Make(A)
type task =
{ idx : int ; com : string ; chan : in_channel ;
oname : string ; buff : Buffer.t; }
(* Fork utility *)
let dir =
Filename.concat (Filename.get_temp_dir_name ())
(sprintf "mapply.%i" (getpid()))
let rmrf dir = ignore (Sys.command (sprintf "/bin/rm -rf %s" dir))
let _ =
match A.mode with
| File ->
let doit signum =
Sys.set_signal signum
(Sys.Signal_handle
(fun _ -> rmrf dir ; exit 2)) in
doit Sys.sigint ;
doit Sys.sigquit ;
doit Sys.sigterm ;
doit Sys.sighup ;
()
| Buff -> ()
let nobuff = Buffer.create 0
let popen idx cmd args name =
try
let base =
try
Filename.chop_extension (Filename.basename name)
with Invalid_argument _ ->
Warn.warn_always "Ignoring file %s, since it has no extension" name ;
raise Exit in
let oname = Filename.concat dir (sprintf "%s-%02i.txt" base idx) in
let com =
let opts = match args with
| [] -> ""
| _::_ -> " " ^ String.concat " " args in
match A.mode with
| File -> sprintf "%s%s %s>%s" cmd opts name oname
| Buff -> sprintf "%s%s %s" cmd opts name in
if A.verbose > 2 then eprintf "Starting: '%s' on %02i\n" com idx ;
let chan = Unix.open_process_in com in
begin match A.mode with
| File -> ()
| Buff -> set_nonblock (descr_of_in_channel chan)
end ;
let buff = match A.mode with Buff -> Buffer.create 128 | File -> nobuff in
Some { com; idx; buff; chan; oname;}
with Exit -> None
let table = Hashtbl.create 17
let get_waiting () = Hashtbl.fold (fun fd _ r -> fd::r) table []
let rec start_task idx (nrun,iter as k) = match iter with
| None -> k
| Some iter -> match Misc.next_iter iter with
| Some (name,iter) ->
let task = popen idx A.com A.comargs name in
begin match task with
| Some task ->
let fd = descr_of_in_channel task.chan in
Hashtbl.add table fd task ;
if A.verbose > 1 then eprintf "Start %02i\n%!" idx ;
nrun+1,Some iter
| None -> start_task idx (nrun,Some iter)
end
| None -> nrun,None
let sz = match A.mode with File -> 1024 | Buff -> 1024
let warn_status st =
Warn.warn_always "task ended with %s"
(match st with
| WEXITED i -> sprintf "exit %i" i
| WSIGNALED i -> sprintf "signaled %i" i
| WSTOPPED i -> sprintf "stopped %i" i)
let to_stdout oname =
Misc.input_protect
(fun chan ->
let buff = Bytes.create sz in
try
while true do
match input chan buff 0 sz with
| 0 -> raise Exit
| n -> output stdout_chan buff 0 n
done
with Exit ->())
oname ;
flush stdout_chan ;
Sys.remove oname
let task_file (nrun,files) fd =
let task =
try Hashtbl.find table fd
with Not_found -> assert false in
Hashtbl.remove table fd ;
begin match close_process_in task.chan with
| WEXITED 0 ->
to_stdout task.oname ;
start_task task.idx (nrun-1,files)
| st ->
warn_status st ;
start_task task.idx (nrun-1,files)
end
let to_buff fd t =
let buff = Bytes.create sz in
let rec to_rec () =
try
if A.verbose > 2 then eprintf "Read %02i\n%!" t.idx ;
let nread = read fd buff 0 sz in
if A.verbose > 1 then eprintf "Got %i from %02i\n%!" nread t.idx ;
match nread with
| 0 -> true
| n ->
Buffer.add_string t.buff (Bytes.sub_string buff 0 n) ;
to_rec ()
with
| Unix_error ((EWOULDBLOCK|EAGAIN),_,_) -> false
| e -> raise e in
to_rec ()
let task_buff (nrun,files as k) fd =
let task =
try Hashtbl.find table fd
with Not_found -> assert false in
let is_over = to_buff fd task in
if is_over then begin
if A.verbose > 1 then eprintf "Over %02i\n%!" task.idx ;
Hashtbl.remove table fd ;
begin match close_process_in task.chan with
| WEXITED 0 ->
Buffer.output_buffer stdout_chan task.buff ;
flush stdout_chan
| st ->
warn_status st
end ;
start_task task.idx (nrun-1,files)
end else begin
if A.verbose > 2 then eprintf "Again %02i\n%!" task.idx ;
k
end
let process_task = match A.mode with
| File -> task_file
| Buff -> task_buff
let ppok ok =
List.iter
(fun fd ->
let {idx=idx;_} =
try Hashtbl.find table fd
with Not_found -> assert false in
eprintf " %02i" idx)
ok ;
eprintf "\n%!"
let rec loop (nrun,_ as k) =
if nrun > 0 then begin
let fds = get_waiting () in
assert (List.length fds = nrun) ;
let ok,_,_ = select fds [] [] (-1.0) in
if A.verbose > 0 then begin match ok with
| []|[_] ->
if A.verbose > 1 then begin
eprintf "Select" ;
ppok ok
end
| _ ->
eprintf "Multiple select:" ;
ppok ok
end ;
let k = List.fold_left process_task k ok in
loop k
end
let run j names =
let names = match names with
| [] -> Misc.fold_stdin Misc.cons []
| _::_ -> names in
let names = Misc.mk_iter names in
begin match A.mode with
| File -> mkdir dir 0o700
| Buff -> ()
end ;
let rec start_rec k = function
| 0 -> k
| j -> start_rec (start_task j k) (j-1) in
loop (start_rec (0,Some names) j) ;
begin match A.mode with
| File ->
begin try rmdir dir
with _ -> W.warn "Cannot delete directory %s" dir end
| Buff -> ()
end
end
let args = ref []
let com = ref "echo"
let verbose = ref 0
let j = ref 1
let mode = ref Buff
let comargs = ref []
let parse_mode tag = match tag with
| "buff" -> Buff
| "file" -> File
| _ ->
raise
(Arg.Bad (sprintf "%s: bad rag for option -mode" tag))
let pp_mode = function
| Buff -> "buff"
| File -> "file"
let set_mode tag = mode := parse_mode tag
let usage = String.concat "\n" [
Printf.sprintf "Usage: %s [options] [<token> ...]" (Filename.basename Sys.argv.(0)) ;
"" ;
"Apply a command to every non-option token on the command-line. If none are" ;
"provided, tokens are read from stdin. Tokens that start with `@` are" ;
"interpreted as filepaths, and the lines of the file are read as tokens." ;
"" ;
"Options:" ;
]
let () =
Arg.parse
["-v", Arg.Unit (fun () -> incr verbose)," be verbose";
"-j", Arg.Int (fun i -> j := i),"<n> manage <n> simultaneaous tasks" ;
"-com", Arg.String (fun c -> com := c),"<com> set command (default echo)";
"-comargs",
Arg.String (fun args -> comargs := !comargs @ Misc.split_comma args),
"<args> initial arguments for command (comma separated)";
"-mode", Arg.String set_mode,
sprintf
"(buff|file) use either internal buffers or files for comunication, default %s" (pp_mode !mode);]
(fun arg -> args := arg :: !args)
usage
let names = !args
let () =
if !j <= 1 then
let do_test name =
let comargs = String.concat " " !comargs in
let com = sprintf "%s %s %s" !com comargs name in
ignore (Sys.command com) in
Misc.iter_argv_or_stdin do_test names
else
let module T =
Task
(struct
let com = !com
let comargs = !comargs
let verbose = !verbose
let mode = !mode
end) in
T.run !j names
| null | https://raw.githubusercontent.com/herd/herdtools7/b86aec8db64f8812e19468893deb1cdf5bbcfb83/tools/mapply.ml | ocaml | **************************************************************************
the diy toolsuite
en Automatique and the authors. All rights reserved.
abiding by the rules of distribution of free software. You can use,
**************************************************************************
Fork utility | , University College London , UK .
, INRIA Paris - Rocquencourt , France .
Copyright 2015 - present Institut National de Recherche en Informatique et
This software is governed by the CeCILL - B license under French law and
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
open Printf
type mode = Buff | File
Task engine
module type TArg = sig
val com : string
val comargs : string list
val verbose : int
val mode : mode
end
module Task(A:TArg) = struct
let stdout_chan = stdout
open Unix
module W = Warn.Make(A)
type task =
{ idx : int ; com : string ; chan : in_channel ;
oname : string ; buff : Buffer.t; }
let dir =
Filename.concat (Filename.get_temp_dir_name ())
(sprintf "mapply.%i" (getpid()))
let rmrf dir = ignore (Sys.command (sprintf "/bin/rm -rf %s" dir))
let _ =
match A.mode with
| File ->
let doit signum =
Sys.set_signal signum
(Sys.Signal_handle
(fun _ -> rmrf dir ; exit 2)) in
doit Sys.sigint ;
doit Sys.sigquit ;
doit Sys.sigterm ;
doit Sys.sighup ;
()
| Buff -> ()
let nobuff = Buffer.create 0
let popen idx cmd args name =
try
let base =
try
Filename.chop_extension (Filename.basename name)
with Invalid_argument _ ->
Warn.warn_always "Ignoring file %s, since it has no extension" name ;
raise Exit in
let oname = Filename.concat dir (sprintf "%s-%02i.txt" base idx) in
let com =
let opts = match args with
| [] -> ""
| _::_ -> " " ^ String.concat " " args in
match A.mode with
| File -> sprintf "%s%s %s>%s" cmd opts name oname
| Buff -> sprintf "%s%s %s" cmd opts name in
if A.verbose > 2 then eprintf "Starting: '%s' on %02i\n" com idx ;
let chan = Unix.open_process_in com in
begin match A.mode with
| File -> ()
| Buff -> set_nonblock (descr_of_in_channel chan)
end ;
let buff = match A.mode with Buff -> Buffer.create 128 | File -> nobuff in
Some { com; idx; buff; chan; oname;}
with Exit -> None
let table = Hashtbl.create 17
let get_waiting () = Hashtbl.fold (fun fd _ r -> fd::r) table []
let rec start_task idx (nrun,iter as k) = match iter with
| None -> k
| Some iter -> match Misc.next_iter iter with
| Some (name,iter) ->
let task = popen idx A.com A.comargs name in
begin match task with
| Some task ->
let fd = descr_of_in_channel task.chan in
Hashtbl.add table fd task ;
if A.verbose > 1 then eprintf "Start %02i\n%!" idx ;
nrun+1,Some iter
| None -> start_task idx (nrun,Some iter)
end
| None -> nrun,None
let sz = match A.mode with File -> 1024 | Buff -> 1024
let warn_status st =
Warn.warn_always "task ended with %s"
(match st with
| WEXITED i -> sprintf "exit %i" i
| WSIGNALED i -> sprintf "signaled %i" i
| WSTOPPED i -> sprintf "stopped %i" i)
let to_stdout oname =
Misc.input_protect
(fun chan ->
let buff = Bytes.create sz in
try
while true do
match input chan buff 0 sz with
| 0 -> raise Exit
| n -> output stdout_chan buff 0 n
done
with Exit ->())
oname ;
flush stdout_chan ;
Sys.remove oname
let task_file (nrun,files) fd =
let task =
try Hashtbl.find table fd
with Not_found -> assert false in
Hashtbl.remove table fd ;
begin match close_process_in task.chan with
| WEXITED 0 ->
to_stdout task.oname ;
start_task task.idx (nrun-1,files)
| st ->
warn_status st ;
start_task task.idx (nrun-1,files)
end
let to_buff fd t =
let buff = Bytes.create sz in
let rec to_rec () =
try
if A.verbose > 2 then eprintf "Read %02i\n%!" t.idx ;
let nread = read fd buff 0 sz in
if A.verbose > 1 then eprintf "Got %i from %02i\n%!" nread t.idx ;
match nread with
| 0 -> true
| n ->
Buffer.add_string t.buff (Bytes.sub_string buff 0 n) ;
to_rec ()
with
| Unix_error ((EWOULDBLOCK|EAGAIN),_,_) -> false
| e -> raise e in
to_rec ()
let task_buff (nrun,files as k) fd =
let task =
try Hashtbl.find table fd
with Not_found -> assert false in
let is_over = to_buff fd task in
if is_over then begin
if A.verbose > 1 then eprintf "Over %02i\n%!" task.idx ;
Hashtbl.remove table fd ;
begin match close_process_in task.chan with
| WEXITED 0 ->
Buffer.output_buffer stdout_chan task.buff ;
flush stdout_chan
| st ->
warn_status st
end ;
start_task task.idx (nrun-1,files)
end else begin
if A.verbose > 2 then eprintf "Again %02i\n%!" task.idx ;
k
end
let process_task = match A.mode with
| File -> task_file
| Buff -> task_buff
let ppok ok =
List.iter
(fun fd ->
let {idx=idx;_} =
try Hashtbl.find table fd
with Not_found -> assert false in
eprintf " %02i" idx)
ok ;
eprintf "\n%!"
let rec loop (nrun,_ as k) =
if nrun > 0 then begin
let fds = get_waiting () in
assert (List.length fds = nrun) ;
let ok,_,_ = select fds [] [] (-1.0) in
if A.verbose > 0 then begin match ok with
| []|[_] ->
if A.verbose > 1 then begin
eprintf "Select" ;
ppok ok
end
| _ ->
eprintf "Multiple select:" ;
ppok ok
end ;
let k = List.fold_left process_task k ok in
loop k
end
let run j names =
let names = match names with
| [] -> Misc.fold_stdin Misc.cons []
| _::_ -> names in
let names = Misc.mk_iter names in
begin match A.mode with
| File -> mkdir dir 0o700
| Buff -> ()
end ;
let rec start_rec k = function
| 0 -> k
| j -> start_rec (start_task j k) (j-1) in
loop (start_rec (0,Some names) j) ;
begin match A.mode with
| File ->
begin try rmdir dir
with _ -> W.warn "Cannot delete directory %s" dir end
| Buff -> ()
end
end
let args = ref []
let com = ref "echo"
let verbose = ref 0
let j = ref 1
let mode = ref Buff
let comargs = ref []
let parse_mode tag = match tag with
| "buff" -> Buff
| "file" -> File
| _ ->
raise
(Arg.Bad (sprintf "%s: bad rag for option -mode" tag))
let pp_mode = function
| Buff -> "buff"
| File -> "file"
let set_mode tag = mode := parse_mode tag
let usage = String.concat "\n" [
Printf.sprintf "Usage: %s [options] [<token> ...]" (Filename.basename Sys.argv.(0)) ;
"" ;
"Apply a command to every non-option token on the command-line. If none are" ;
"provided, tokens are read from stdin. Tokens that start with `@` are" ;
"interpreted as filepaths, and the lines of the file are read as tokens." ;
"" ;
"Options:" ;
]
let () =
Arg.parse
["-v", Arg.Unit (fun () -> incr verbose)," be verbose";
"-j", Arg.Int (fun i -> j := i),"<n> manage <n> simultaneaous tasks" ;
"-com", Arg.String (fun c -> com := c),"<com> set command (default echo)";
"-comargs",
Arg.String (fun args -> comargs := !comargs @ Misc.split_comma args),
"<args> initial arguments for command (comma separated)";
"-mode", Arg.String set_mode,
sprintf
"(buff|file) use either internal buffers or files for comunication, default %s" (pp_mode !mode);]
(fun arg -> args := arg :: !args)
usage
let names = !args
let () =
if !j <= 1 then
let do_test name =
let comargs = String.concat " " !comargs in
let com = sprintf "%s %s %s" !com comargs name in
ignore (Sys.command com) in
Misc.iter_argv_or_stdin do_test names
else
let module T =
Task
(struct
let com = !com
let comargs = !comargs
let verbose = !verbose
let mode = !mode
end) in
T.run !j names
|
3471d7795aca9883a699acbbffeb520205a73566f118a639fc78bc3b5b647d35 | minad/intro | Intro.hs | {-# OPTIONS_HADDOCK show-extensions #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TypeFamilies #
{-# LANGUAGE Safe #-}
-----------------------------------------------------------------------------
-- |
-- Module : Intro
Copyright : ( c ) 2016 - 2017
License : MIT
--
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
Intro is a modern Prelude which provides safe alternatives
-- for most of the partial functions and follows other
-- best practices, e.g., Text is preferred over String.
For String overloading the extension ' OverloadedStrings ' should be used .
Container types and Monad transformers are provided .
--
Most important - this Prelude tries to keep things simple .
-- This means it just reexports from base and commonly used libraries
-- and adds only very few additional functions.
--
-- List of design decisions:
--
* Keep everything at one place ( There are only three modules and Intro . Trustworthy is only there for Safe Haskell )
* Conservative extension over the base Prelude
-- * Rely only on very common external libraries
-- * Avoid writing custom functions
-- * Export everything explicitly to provide a stable interface and for good documentation
-- * Export only total functions or provide safe alternatives (Very few exceptions like div etc.)
* Prefer Text over String , provide ' ConvertString ' and ' EncodeString '
* Provide Monad transformers
-- * Provide container types
-- * Prefer generic functions
-- * Debugging functions, like 'trace' and 'undefined' are available but produce compile time warnings
-- * Don't provide error, only panic instead
-- * Compatibility with unqualified import of Control.Lens
--
Some ' Prelude ' functions are missing from ' Intro ' . More general variants are available for the following functions :
--
-- * '>>' = 'Control.Applicative.*>'
-- * '++' = '<>'
-- * 'concat' = 'Data.Monoid.mconcat'
-- * 'fmap' is replaced by generalized 'map'
-- * 'mapM' = 'Control.Applicative.traverse'
-- * 'mapM_' = 'Data.Foldable.traverse_'
* ' return ' = ' Control.Applicative.pure '
* ' sequence ' = ' Control . '
* ' sequence _ ' = ' Control . _ '
--
-- Integral type conversions are more restricted:
--
-- * 'toIntegral' is a safer, but more restricted version of 'fromIntegral'
-- * 'toIntegerSized' is safe and checked
-- * 'fromIntegralUnsafe' to get the unsafe, overflowing behavior
-- * 'fromIntegerUnsafe' instead of 'fromInteger'
--
Unsafe functions are not provided . For example ' read ' is replaced by ' ' .
The unsafe list functions are replaced by their ' NonEmpty ' counterparts . Furthermore ' * May ' and ' * Def '
-- functions are exported from the 'safe' package, e.g., 'headMay'.
--
* ' cycleMay ' , ' headMay ' , ' tailMay ' , ' initMay ' , ' lastMay '
-- * 'toEnumMay', 'predMay', 'succMay'
--
-- The 'maximum' and 'minimum' functions have been replaced by variants which
-- are safe for empty structures.
--
-- * 'maximumBound', 'maximumBounded', ...
* ' ' , ' ' , ...
--
-- These functions are not provided for various reasons:
--
-- * '!!' is unsafe and /O(n)/. Use a 'Data.Map.Map' instead.
-- * 'lines', 'unlines', 'words' and 'unwords' are not provided. Use qualified 'Data.Text' import instead.
-- * Instead of 'foldl', it is recommended to use 'Data.Foldable.foldl''.
-- * 'lex' is not commonly used. Use a parser combinator library instead.
-- * 'gcd' and 'lcm' are not commonly used.
-- * 'error' and 'errorWithoutStackTrace' are not provided. Use 'panic' instead.
* ' ioError ' and ' userError ' are not provided . Import modules for exception handling separately if needed .
-- * Some 'Text.Read.Read' and 'Show' class functions are not provided. Don't write these instances yourself.
--
-- Additional types and functions:
--
* ' LText ' alias for lazy ' Text '
* ' ' alias for lazy ' ByteString '
* ' fromFoldable ' to convert from ' Data . Foldable . Foldable ' to an ' IsList ' type
* ' convertList ' to convert between two ' IsList ' types .
* ' asList ' to convert from an ' IsList ' type to a ' List ' . This function is an
alias for the ' toList ' function from the ' IsList ' class .
-- * 'showT' and 'showS' are monomorphic 'show' functions.
-- * '<>^' lifted composition
-- * '.:' function composition
-- * '?:' as an alias for 'fromMaybe'
* ' skip ' as an alias for ( ) @
-- * 'panic' as a replacement for 'error'
-----------------------------------------------------------------------------
module Intro (
-- * Basic functions
-- Data.Function.id
-- , (Data.Function..)
Data.Function.const
, Data.Function.flip
, (Data.Function.$)
, (Prelude.$!)
, (Data.Function.&)
, Data.Function.fix
, Data.Function.on
, (.:)
, Prelude.until
, Prelude.asTypeOf
, Prelude.seq
-- * Basic algebraic types
-- ** Void
, Data.Void.Void
* *
, Data.Bool.Bool(False, True)
, (Data.Bool.&&)
, (Data.Bool.||)
, Data.Bool.bool
, Data.Bool.not
, Data.Bool.otherwise
-- ** Maybe
, Data.Maybe.Maybe(Nothing, Just)
, Data.Maybe.catMaybes
, Data.Maybe.fromMaybe
, (?:)
, Data.Maybe.isJust
, Data.Maybe.isNothing
, Data . -- use headMay
, Data . Maybe.maybeToList -- use toList
, Data.Maybe.mapMaybe
, Data.Maybe.maybe
-- ** List
, Intro.Trustworthy.IsList(
Item
, fromList
, toList -- renamed to asList
)
, asList
, convertList
, fromFoldable
, Data.List.break
, Data.List.Extra.breakOn
, Data.List.Extra.breakOnEnd
, Data.List.drop
, Data.List.Extra.dropEnd
, Data.List.dropWhile
, Data.List.dropWhileEnd
, Data.List.filter
, Data.List.group
, Data.List.groupBy
, Data.List.Extra.groupOn
, Data.List.Extra.groupSort
, Data.List.Extra.groupSortBy
, Data.List.Extra.groupSortOn
, Data.List.inits
, Data.List.intercalate
, Data.List.intersperse
, Data.List.isPrefixOf
, Data.List.isSuffixOf
, Data.List.iterate
, Data.List.iterate'
, Data.List.lookup
, Data.List.Extra.nubOrd
, Data.List.Extra.nubOrdBy
, Data.List.Extra.nubOrdOn
, Data.List.permutations
, Data.List.repeat
, Data.List.replicate
, Data.List.reverse
, Data.List.scanl
, Data.List.scanr
, Data.List.sort
, Data.List.sortBy
, Data.List.sortOn
, Data.List.span
, Data.List.Extra.spanEnd
, Data.List.splitAt
, Data.List.Extra.split
, Data.List.Extra.splitOn
, Data.List.subsequences
, Data.List.tails
, Data.List.take
, Data.List.Extra.takeEnd
, Data.List.takeWhile
, Data.List.Extra.takeWhileEnd
, Data.List.transpose
, Data.List.unfoldr
, Data.List.unzip
, Data.List.unzip3
, Data.List.zip
, Data.List.zip3
, Data.List.zipWith
, Data.List.zipWith3
, Safe.headDef
, Safe.headMay -- prefer pattern match
, Safe.initDef
, Safe.initMay
, Safe.lastDef
, Safe.lastMay
, Safe.tailDef
, Safe.tailMay -- prefer pattern match
, Safe.cycleMay
, Safe.cycleDef
* * NonEmpty
, Data.List.NonEmpty.NonEmpty((:|))
-- (<|), -- in lens
, Data.List.NonEmpty.scanl1
, Data.List.NonEmpty.scanr1
, Data.List.NonEmpty.head
, Data.List.NonEmpty.init
, Data.List.NonEmpty.last
, Data.List.NonEmpty.tail
, Data.List.NonEmpty.cycle
-- ** Tuple
, Data.Tuple.fst
, Data.Tuple.snd
, Data.Tuple.curry
, Data.Tuple.uncurry
, Data.Tuple.swap
-- ** Either
, Data.Either.Either(Left, Right)
, Data.Either.either
, Data.Either.Extra.fromLeft
, Data.Either.Extra.fromRight
, Data.Either.isLeft
, Data.Either.isRight
, Data.Either.lefts
, Data.Either.rights
, Data.Either.partitionEithers
, Data.Either.Extra.eitherToMaybe
, Data.Either.Extra.maybeToEither
-- * Text types
* * and String
, Data.Char.Char
, Data.String.String
-- ** Text
, Data.Text.Text
, LText
Data.Text.lines , -- Use qualified import instead
,
-- Data.Text.unlines,
-- Data.Text.unwords,
-- ** ByteString
, Data.ByteString.ByteString
, LByteString
, Data.ByteString.Short.ShortByteString
-- ** String conversion
, Data.String.IsString(fromString)
, Intro.ConvertString.ConvertString(convertString)
, Intro.ConvertString.EncodeString(encodeString, decodeString, decodeStringLenient)
, Lenient(..)
-- * Container types
-- ** Map and Set (Ordered)
, Data.Map.Map
, Data.Set.Set
, Data.IntMap.IntMap
, Data.IntSet.IntSet
* * HashedMap and HashSet
, Data.HashMap.Strict.HashMap
, Data.HashSet.HashSet
, Data.Hashable.Hashable(hash, hashWithSalt)
, Intro.Trustworthy.Hashable1
, Intro.Trustworthy.Hashable2
-- ** Seq
, Data.Sequence.Seq
-- * Numeric types
-- ** Big integers
, Prelude.Integer
, Numeric.Natural.Natural
-- ** Small integers
, Data.Int.Int
, Data.Int.Int8
, Data.Int.Int16
, Data.Int.Int32
, Data.Int.Int64
, Data.Word.Word
, Data.Word.Word8
, Data.Word.Word16
, Data.Word.Word32
, Data.Word.Word64
-- ** Floating point
, Prelude.Float
, Prelude.Double
-- * Numeric type classes
* *
, Prelude.Num((+), (-), (*), negate, abs, signum
fromInteger
)
, Prelude.subtract
, (Prelude.^) -- partial functions!
-- ** Real
, Prelude.Real(toRational)
, Prelude.realToFrac
-- ** Integral
, Prelude.Integral(quot, rem, div, mod, quotRem, divMod, toInteger) -- partial functions!
, Intro.ConvertIntegral.ToIntegral(..)
, Data.Bits.toIntegralSized
, Intro.ConvertIntegral.fromIntegralUnsafe
, Intro.ConvertIntegral.fromIntegerUnsafe
--, Prelude.fromIntegral
, Prelude.even
, Prelude.odd
,
, Prelude.lcm
-- ** Fractional
, Prelude.Fractional((/), recip, fromRational) -- partial functions
, (Prelude.^^)
-- ** Floating
, Prelude.Floating(pi, exp, log, sqrt, (**), logBase, sin, cos, tan,
asin, acos, atan, sinh, cosh, tanh, asinh, acosh, atanh)
-- ** RealFrac
, Prelude.RealFrac(properFraction, truncate, round, ceiling, floor) -- partial functions
-- ** RealFloat
, Prelude.RealFloat(floatRadix, floatDigits, floatRange, decodeFloat,
encodeFloat, exponent, significand, scaleFloat, isNaN,
isInfinite, isDenormalized, isIEEE, isNegativeZero, atan2)
-- ** Bits
, Data.Bits.Bits((.&.), (.|.), xor, complement, shift, rotate, zeroBits,
bit, setBit, clearBit, complementBit, testBit,
-- bitSize, bitSizeMaybe
isSigned,
-- unsafeShiftL
-- unsafeShiftR
-- shiftR, shiftL,
rotateL, rotateR, popCount)
, Data.Bits.FiniteBits(finiteBitSize, countLeadingZeros, countTrailingZeros)
-- * Read and Show
-- ** Show
, Text.Show.Show
, Data.Functor.Classes.Show1
, Data.Functor.Classes.Show2
, show
, showT
, showS
-- ** Read
, Text.Read.Read
, Data.Functor.Classes.Read1
, Data.Functor.Classes.Read2
, readMaybe
-- * Equality and ordering
-- ** Eq
, Data.Eq.Eq((==), (/=))
, Data.Functor.Classes.Eq1
, Data.Functor.Classes.Eq2
* *
, Data.Ord.Ord(compare, (<), (>), (<=), (>=), max, min)
, Data.Functor.Classes.Ord1
, Data.Functor.Classes.Ord2
, Data.Ord.Ordering(LT,GT,EQ)
, Data.Ord.Down(Down)
, Data.Ord.comparing
* *
, Prelude.Enum(-- toEnum, succ, pred, -- partial
fromEnum, enumFrom, enumFromThen,
enumFromTo, enumFromThenTo)
, Safe.toEnumMay
, Safe.toEnumDef
, Safe.predMay
, Safe.predDef
, Safe.succMay
, Safe.succDef
-- ** Bounded
, Prelude.Bounded(minBound, maxBound)
-- * Algebraic type classes
-- ** Category
, Control.Category.Category(id, (.))
, (Control.Category.<<<)
, (Control.Category.>>>)
-- ** Semigroup
, Data.Semigroup.Semigroup((<>), sconcat, stimes)
, Data.Semigroup.First(First, getFirst)
, Data.Semigroup.Last(Last, getLast)
, Data.Semigroup.Min(Min, getMin)
, Data.Semigroup.Max(Max, getMax)
, Data.Semigroup.Option(Option, getOption)
-- ** Monoid
, Data.Monoid.Monoid(mempty, mappend, mconcat)
, Data.Monoid.Dual(Dual, getDual)
, Data.Monoid.Endo(Endo, appEndo)
, Data.Monoid.All(All, getAll)
, Data.Monoid.Any(Any, getAny)
-- Hide because of name clash with sum functors
, Data . Monoid . Sum(Sum , getSum )
, Data . Monoid . Product(Product , )
-- Provide semigroup instances instead
, Data . Monoid . First(First , getFirst )
, Data . Monoid . Last(Last , getLast )
, Data.Monoid.Alt(Alt, getAlt)
-- ** Functor
, Data.Functor.Functor(
(<$)
--, fmap -- hide fmap, use map instead
)
, (Data.Functor.$>)
, (Data.Functor.<$>)
, (Data.Functor.<&>)
, map
, Data.Functor.void
, Control.Applicative.Const(Const, getConst) -- Data.Functor.Const
, Data.Functor.Identity.Identity(Identity, runIdentity)
* *
, Data.Functor.Contravariant.Contravariant(
(>$),
contramap
)
, (Data.Functor.Contravariant.$<)
, (Data.Functor.Contravariant.>$<)
, (Data.Functor.Contravariant.>$$<)
-- ** Foldable
, Data.Foldable.Foldable(elem, fold, foldMap,
foldr, foldr',
-- foldl, -- hide the bad one
foldl',
product, sum, toList)
, Data.Foldable.null
, Data.Foldable.length
, Data.Foldable.foldrM
, Data.Foldable.foldlM
, Data.Foldable.traverse_
, Data.Foldable.for_
, Data.Foldable.asum
, Data.Foldable.concatMap
, Data.Foldable.all
, Data.Foldable.any
, Data.Foldable.or
, Data.Foldable.and
, Data.Foldable.find
, Data.Foldable.notElem
, Data.Foldable.sequenceA_
, Safe.Foldable.foldl1May
, Safe.Foldable.foldr1May
, Safe.Foldable.maximumByMay
, Safe.Foldable.maximumBoundBy
, Safe.Foldable.minimumByMay
, Safe.Foldable.minimumBoundBy
, Safe.Foldable.maximumMay
, Safe.Foldable.maximumBounded
, Safe.Foldable.maximumBound
, Safe.Foldable.minimumMay
, Safe.Foldable.minimumBounded
, Safe.Foldable.minimumBound
* *
, Data.Traversable.Traversable(traverse, sequenceA)
, Data.Traversable.for
, Data.Traversable.mapAccumL
, Data.Traversable.mapAccumR
-- ** Applicative
, Control.Applicative.Applicative(pure, (<*>), (*>), (<*))
, Control.Applicative.ZipList(ZipList, getZipList)
, (Control.Applicative.<**>)
, Control.Applicative.liftA2
, Control.Applicative.liftA3
, skip
, (<>^)
-- ** Alternative
, Control.Applicative.Alternative((<|>), empty, many {-, some -})
, Control.Applicative.optional
, Data.List.NonEmpty.some1
-- ** Monad
, Control.Monad.Monad((>>=))
, Control.Monad.Fix.MonadFix(mfix)
, (Control.Monad.=<<)
, (Control.Monad.<=<)
, (Control.Monad.>=>)
, Control.Monad.join
, Control.Monad.guard
, Control.Monad.when
, Control.Monad.unless
, Control.Monad.replicateM
, Control.Monad.replicateM_
, (Control.Monad.<$!>)
, Control.Monad.Extra.whenM
, Control.Monad.Extra.unlessM
, Control.Monad.Extra.ifM
, Control.Monad.Extra.allM
, Control.Monad.Extra.anyM
, Control.Monad.Extra.andM
, Control.Monad.Extra.orM
, Control.Monad.Extra.concatMapM
, (Control.Monad.Extra.&&^)
, (Control.Monad.Extra.||^)
-- ** Bifunctor
, Data.Bifunctor.Bifunctor(bimap, first, second)
-- ** Bifoldable
, Data.Bifoldable.Bifoldable(bifoldr
, bifoldl -- not strict enough
, bifoldMap)
, Data.Bifoldable.bifoldl'
, Data.Bifoldable.bifoldr'
, Data.Bifoldable.bitraverse_
, Data.Bifoldable.bisequenceA_
, Data.Bifoldable.bifor_
-- ** Bitraversable
, Data.Bitraversable.Bitraversable(bitraverse)
, Data.Bitraversable.bifor
, Data.Bitraversable.bisequenceA
-- * Effects and monad transformers
, Control.Monad.Trans.MonadTrans(lift)
* * MonadPlus and MaybeT
, Control.Monad.MonadPlus
, Control.Monad.Trans.Maybe.MaybeT(MaybeT, runMaybeT)
, Control.Monad.Trans.Maybe.mapMaybeT
* * and ExceptT
, Control.Monad.Except.MonadError(throwError, catchError)
, Control.Monad.Except.Except
, Control.Monad.Except.runExcept
, Control.Monad.Except.mapExcept
, Control.Monad.Except.withExcept
, Control.Monad.Except.ExceptT(ExceptT)
, Control.Monad.Except.runExceptT
, Control.Monad.Except.mapExceptT
, Control.Monad.Except.withExceptT
-- ** MonadReader and ReaderT
, Control.Monad.Reader.MonadReader(ask, local, reader)
, Control.Monad.Reader.asks
, Control.Monad.Reader.Reader
, Control.Monad.Reader.runReader
, Control.Monad.Reader.mapReader
, Control.Monad.Reader.withReader
, Control.Monad.Reader.ReaderT(ReaderT, runReaderT)
, Control.Monad.Reader.mapReaderT
, Control.Monad.Reader.withReaderT
-- ** MonadWriter and WriterT
, Control.Monad.Writer.CPS.MonadWriter(writer, tell, listen, pass)
, Control.Monad.Writer.CPS.Writer
, Control.Monad.Writer.CPS.runWriter
, Control.Monad.Writer.CPS.execWriter
, Control.Monad.Writer.CPS.mapWriter
, Control.Monad.Writer.CPS.WriterT
, Control.Monad.Writer.CPS.writerT
, Control.Monad.Writer.CPS.runWriterT
, Control.Monad.Writer.CPS.execWriterT
, Control.Monad.Writer.CPS.mapWriterT
* * MonadState and StateT
, Control.Monad.State.Strict.MonadState(get, put, state)
, Control.Monad.State.Strict.State
, Control.Monad.State.Strict.gets
, Control.Monad.State.Strict.modify
, Control.Monad.State.Strict.modify'
, Control.Monad.State.Strict.runState
, Control.Monad.State.Strict.evalState
, Control.Monad.State.Strict.execState
, Control.Monad.State.Strict.mapState
, Control.Monad.State.Strict.withState
, Control.Monad.State.Strict.StateT(StateT, runStateT)
, Control.Monad.State.Strict.evalStateT
, Control.Monad.State.Strict.execStateT
, Control.Monad.State.Strict.mapStateT
, Control.Monad.State.Strict.withStateT
* * MonadRWS and RWST
, Control.Monad.RWS.CPS.MonadRWS
, Control.Monad.RWS.CPS.RWS
, Control.Monad.RWS.CPS.rws
, Control.Monad.RWS.CPS.runRWS
, Control.Monad.RWS.CPS.evalRWS
, Control.Monad.RWS.CPS.execRWS
, Control.Monad.RWS.CPS.mapRWS
, Control.Monad.RWS.CPS.RWST
, Control.Monad.RWS.CPS.rwsT
, Control.Monad.RWS.CPS.runRWST
, Control.Monad.RWS.CPS.evalRWST
, Control.Monad.RWS.CPS.execRWST
, Control.Monad.RWS.CPS.mapRWST
-- * Generic type classes
, GHC.Generics.Generic
, GHC.Generics.Generic1
, Data.Typeable.Typeable
-- * Type level
, Data.Kind.Type
, Intro.Trustworthy.Constraint
, Data.Proxy.Proxy(Proxy)
--, Data.Tagged.Tagged(Tagged)
--, Data.Tagged.unTagged
-- * IO
, System.IO.IO
, Control.Monad.Trans.MonadIO(liftIO)
-- ** Console
, print
, putChar
, putStr
, putStrLn
--, interact
-- ** File
, System.IO.FilePath
, readFile
, writeFile
, appendFile
, readFileUtf8
, writeFileUtf8
, appendFileUtf8
-- * Error handling and debugging
, HasCallStack
, Control.Monad.Fail.MonadFail
, fail
, panic
, undefined
, Intro.Trustworthy.trace
, Intro.Trustworthy.traceIO
, Intro.Trustworthy.traceId
, Intro.Trustworthy.traceM
, Intro.Trustworthy.traceShow
, Intro.Trustworthy.traceShowId
, Intro.Trustworthy.traceShowM
) where
import Control.Monad.Trans (MonadIO(liftIO))
import Data.ByteString (ByteString)
import Data.Char (Char)
import Data.Function ((.), ($))
import Data.Functor (Functor(fmap))
import Data.Maybe (Maybe, fromMaybe)
import Data.Semigroup (Semigroup((<>)))
import Data.String (IsString(fromString), String)
import Data.Text (Text)
import Intro.ConvertIntegral
import Intro.ConvertString
import Intro.Trustworthy
import System.IO (FilePath)
import Text.Show (Show)
import qualified Control.Applicative
import qualified Control.Category
import qualified Control.Monad
import qualified Control.Monad.Except
import qualified Control.Monad.Extra
import qualified Control.Monad.Fail
import qualified Control.Monad.Fix
import qualified Control.Monad.RWS.CPS
import qualified Control.Monad.Reader
import qualified Control.Monad.State.Strict
import qualified Control.Monad.Trans
import qualified Control.Monad.Trans.Maybe
import qualified Control.Monad.Writer.CPS
import qualified Data.Bifoldable
import qualified Data.Bifunctor
import qualified Data.Bitraversable
import qualified Data.Bits
import qualified Data.Bool
import qualified Data.ByteString
import qualified Data.ByteString.Lazy
import qualified Data.ByteString.Short
import qualified Data.Either
import qualified Data.Either.Extra
import qualified Data.Eq
import qualified Data.Foldable
import qualified Data.Function
import qualified Data.Functor
import qualified Data.Functor.Classes
import qualified Data.Functor.Identity
import qualified Data.Functor.Contravariant
import qualified Data.HashMap.Strict
import qualified Data.HashSet
import qualified Data.Hashable
import qualified Data.Int
import qualified Data.IntMap
import qualified Data.IntSet
import qualified Data.Kind
import qualified Data.List
import qualified Data.List.Extra
import qualified Data.List.NonEmpty
import qualified Data.Map
import qualified Data.Maybe
import qualified Data.Monoid
import qualified Data.Ord
import qualified Data.Proxy
import qualified Data.Semigroup
import qualified Data.Sequence
import qualified Data.Set
import qualified Data.Text.IO
import qualified Data.Text.Lazy
import qualified Data.Traversable
import qualified Data.Tuple
import qualified Data.Typeable
import qualified Data.Void
import qualified Data.Word
import qualified GHC.Generics
import qualified Numeric.Natural
import qualified Prelude
import qualified Safe
import qualified Safe.Foldable
import qualified System.IO
import qualified Text.Read
import qualified Text.Show
-- | Alias for lazy 'Data.Text.Lazy.Text'
type LText = Data.Text.Lazy.Text
| Alias for lazy ' Data . ByteString . Lazy . ByteString '
type LByteString = Data.ByteString.Lazy.ByteString
| Convert from ' Data . Foldable . Foldable ' to an ' IsList ' type .
fromFoldable :: (Data.Foldable.Foldable f, IsList a) => f (Item a) -> a
fromFoldable = fromList . Data.Foldable.toList
{-# INLINE fromFoldable #-}
| Convert between two different ' IsList ' types .
-- This function can be used instead of the 'toList' function
originally provided by the ' IsList ' class .
convertList :: (IsList a, IsList b, Item a ~ Item b) => a -> b
convertList = fromList . toList
# INLINE convertList #
| The ' asList ' function extracts a list of @Item a@ from the structure
It should satisfy fromList . asList = i d.
asList :: (IsList a) => a -> [Item a]
asList = Intro.Trustworthy.toList
# INLINE asList #
-- | A synonym for 'fmap'.
--
-- @map = 'fmap'@
map :: Functor f => (a -> b) -> f a -> f b
map = fmap
# INLINE map #
| Convert a value to a readable string type supported by ' ConvertString ' using the ' Show ' instance .
show :: (Show a, ConvertString String s) => a -> s
show = convertString . showS
# INLINE show #
-- | Convert a value to a readable 'Text' using the 'Show' instance.
showT :: Show a => a -> Text
showT = show
# INLINE showT #
-- | Convert a value to a readable 'String' using the 'Show' instance.
showS :: Show a => a -> String
showS = Text.Show.show
# INLINE showS #
-- | Parse a string type using the 'Text.Read.Read' instance.
Succeeds if there is exactly one valid result .
readMaybe :: (Text.Read.Read b, ConvertString a String) => a -> Maybe b
readMaybe = Text.Read.readMaybe . convertString
# INLINE readMaybe #
-- | The 'print' function outputs a value of any printable type to the
-- standard output device.
-- Printable types are those that are instances of class 'Show'; 'print'
-- converts values to strings for output using the 'show' operation and
-- adds a newline.
--
For example , a program to print the first 20 integers and their
powers of 2 could be written as :
--
> main = print ( [ ( n , 2^n ) | n < - [ 0 .. 19 ] ] )
--
-- __Note__: This function is lifted to the 'MonadIO' class.
print :: (MonadIO m, Show a) => a -> m ()
print = liftIO . System.IO.print
# INLINE print #
-- | Write a strict 'Text' to the standard output device.
--
-- __Note__: This function is lifted to the 'MonadIO' class.
putStr :: MonadIO m => Text -> m ()
putStr = liftIO . Data.Text.IO.putStr
{-# INLINE putStr #-}
-- | The same as 'putStr', but adds a newline character.
--
-- __Note__: This function is lifted to the 'MonadIO' class.
putStrLn :: MonadIO m => Text -> m ()
putStrLn = liftIO . Data.Text.IO.putStrLn
# INLINE putStrLn #
-- | Write a character to the standard output device.
--
-- __Note__: This function is lifted to the 'MonadIO' class.
putChar :: MonadIO m => Char -> m ()
putChar = liftIO . System.IO.putChar
# INLINE putChar #
-- | Read an entire file strictly into a 'ByteString'.
--
-- __Note__: This function is lifted to the 'MonadIO' class.
readFile :: MonadIO m => FilePath -> m ByteString
readFile = liftIO . Data.ByteString.readFile
# INLINE readFile #
-- | Write a 'ByteString' to a file.
--
-- __Note__: This function is lifted to the 'MonadIO' class.
writeFile :: MonadIO m => FilePath -> ByteString -> m ()
writeFile = liftIO .: Data.ByteString.writeFile
# INLINE writeFile #
-- | Append a 'ByteString' to a file.
--
-- __Note__: This function is lifted to the 'MonadIO' class.
appendFile :: MonadIO m => FilePath -> ByteString -> m ()
appendFile = liftIO .: Data.ByteString.appendFile
# INLINE appendFile #
| Read an entire file strictly into a ' Text ' using UTF-8 encoding .
-- The decoding is done using 'decodeStringLenient'. Invalid characters are replaced
by the Unicode replacement character ' \FFFD ' .
--
-- __Note__: This function is lifted to the 'MonadIO' class.
readFileUtf8 :: MonadIO m => FilePath -> m Text
readFileUtf8 = map decodeStringLenient . readFile
# INLINE readFileUtf8 #
| Write a ' Text ' to a file using UTF-8 encoding .
--
-- __Note__: This function is lifted to the 'MonadIO' class.
writeFileUtf8 :: MonadIO m => FilePath -> Text -> m ()
writeFileUtf8 file = writeFile file . convertString
# INLINE writeFileUtf8 #
| Append a ' Text ' to a file using UTF-8 encoding .
--
-- __Note__: This function is lifted to the 'MonadIO' class.
appendFileUtf8 :: MonadIO m => FilePath -> Text -> m ()
appendFileUtf8 file = appendFile file . convertString
# INLINE appendFileUtf8 #
-- | Throw an undefined error. Use only for debugging.
undefined :: HasCallStack => a
undefined = Prelude.undefined
{-# WARNING undefined "'undefined' should be used only for debugging" #-}
-- | '<>' lifted to 'Control.Applicative.Applicative'
(<>^) :: (Control.Applicative.Applicative f, Semigroup a) => f a -> f a -> f a
(<>^) = Control.Applicative.liftA2 (<>)
infixr 6 <>^
{-# INLINE (<>^) #-}
| Compose functions with one argument with function with two arguments .
--
-- @f .: g = \\x y -> f (g x y)@.
(.:) :: (c -> d) -> (a -> b -> c) -> a -> b -> d
(.:) = (.) . (.)
infixr 8 .:
{-# INLINE (.:) #-}
-- | An infix form of 'fromMaybe' with arguments flipped.
(?:) :: Maybe a -> a -> a
(?:) = Data.Function.flip fromMaybe
infix 1 ?:
{-# INLINE (?:) #-}
-- | @()@ lifted to an 'Control.Applicative.Applicative'.
--
@skip = ' Control.Applicative.pure ' ( ) @
skip :: Control.Applicative.Applicative m => m ()
skip = Control.Applicative.pure ()
# INLINE skip #
-- | Throw an unhandled error to terminate the program in case
-- of a logic error at runtime. Use this function instead of 'Prelude.error'.
-- A stack trace will be provided.
--
-- In general, prefer total functions. You can use 'Maybe', 'Data.Either.Either',
' Control . . Except . ExceptT ' or ' Control . . Except . ' for error handling .
panic :: HasCallStack => Text -> a
panic msg = Prelude.error $ convertString $
"Panic: " <> msg <> "\n\n" <>
"Please submit a bug report including the stacktrace\n" <>
"and a description on how to reproduce the bug."
| Monad fail function from the ' Control . . Fail . ' class .
--
-- When a value is bound in @do@-notation, the pattern on the left
-- hand side of @<-@ might not match. In this case, this class
-- provides a function to recover.
--
A ' Monad ' without a ' MonadFail ' instance may only be used in conjunction
-- with pattern that always match, such as newtypes, tuples, data types with
only a single data constructor , and irrefutable patterns ( ) .
--
Instances of ' MonadFail ' should satisfy the following law : @fail s@ should
be a left zero for ' > > = ' ,
--
-- @
-- fail s >>= f = fail s
-- @
--
If your ' Monad ' is also ' MonadPlus ' , a popular definition is
--
-- @
-- fail _ = mzero
-- @
fail :: Control.Monad.Fail.MonadFail m => Text -> m a
fail = Control.Monad.Fail.fail . convertString
# INLINE fail #
| null | https://raw.githubusercontent.com/minad/intro/45a0bbcfebae747c77e39d30f7ed9c7bb52ac098/src/Intro.hs | haskell | # OPTIONS_HADDOCK show-extensions #
# LANGUAGE OverloadedStrings #
# LANGUAGE Safe #
---------------------------------------------------------------------------
|
Module : Intro
Maintainer :
Stability : experimental
Portability : portable
for most of the partial functions and follows other
best practices, e.g., Text is preferred over String.
This means it just reexports from base and commonly used libraries
and adds only very few additional functions.
List of design decisions:
* Rely only on very common external libraries
* Avoid writing custom functions
* Export everything explicitly to provide a stable interface and for good documentation
* Export only total functions or provide safe alternatives (Very few exceptions like div etc.)
* Provide container types
* Prefer generic functions
* Debugging functions, like 'trace' and 'undefined' are available but produce compile time warnings
* Don't provide error, only panic instead
* Compatibility with unqualified import of Control.Lens
* '>>' = 'Control.Applicative.*>'
* '++' = '<>'
* 'concat' = 'Data.Monoid.mconcat'
* 'fmap' is replaced by generalized 'map'
* 'mapM' = 'Control.Applicative.traverse'
* 'mapM_' = 'Data.Foldable.traverse_'
Integral type conversions are more restricted:
* 'toIntegral' is a safer, but more restricted version of 'fromIntegral'
* 'toIntegerSized' is safe and checked
* 'fromIntegralUnsafe' to get the unsafe, overflowing behavior
* 'fromIntegerUnsafe' instead of 'fromInteger'
functions are exported from the 'safe' package, e.g., 'headMay'.
* 'toEnumMay', 'predMay', 'succMay'
The 'maximum' and 'minimum' functions have been replaced by variants which
are safe for empty structures.
* 'maximumBound', 'maximumBounded', ...
These functions are not provided for various reasons:
* '!!' is unsafe and /O(n)/. Use a 'Data.Map.Map' instead.
* 'lines', 'unlines', 'words' and 'unwords' are not provided. Use qualified 'Data.Text' import instead.
* Instead of 'foldl', it is recommended to use 'Data.Foldable.foldl''.
* 'lex' is not commonly used. Use a parser combinator library instead.
* 'gcd' and 'lcm' are not commonly used.
* 'error' and 'errorWithoutStackTrace' are not provided. Use 'panic' instead.
* Some 'Text.Read.Read' and 'Show' class functions are not provided. Don't write these instances yourself.
Additional types and functions:
* 'showT' and 'showS' are monomorphic 'show' functions.
* '<>^' lifted composition
* '.:' function composition
* '?:' as an alias for 'fromMaybe'
* 'panic' as a replacement for 'error'
---------------------------------------------------------------------------
* Basic functions
Data.Function.id
, (Data.Function..)
* Basic algebraic types
** Void
** Maybe
use headMay
use toList
** List
renamed to asList
prefer pattern match
prefer pattern match
(<|), -- in lens
** Tuple
** Either
* Text types
** Text
Use qualified import instead
Data.Text.unlines,
Data.Text.unwords,
** ByteString
** String conversion
* Container types
** Map and Set (Ordered)
** Seq
* Numeric types
** Big integers
** Small integers
** Floating point
* Numeric type classes
partial functions!
** Real
** Integral
partial functions!
, Prelude.fromIntegral
** Fractional
partial functions
** Floating
** RealFrac
partial functions
** RealFloat
** Bits
bitSize, bitSizeMaybe
unsafeShiftL
unsafeShiftR
shiftR, shiftL,
* Read and Show
** Show
** Read
* Equality and ordering
** Eq
toEnum, succ, pred, -- partial
** Bounded
* Algebraic type classes
** Category
** Semigroup
** Monoid
Hide because of name clash with sum functors
Provide semigroup instances instead
** Functor
, fmap -- hide fmap, use map instead
Data.Functor.Const
** Foldable
foldl, -- hide the bad one
** Applicative
** Alternative
, some
** Monad
** Bifunctor
** Bifoldable
not strict enough
** Bitraversable
* Effects and monad transformers
** MonadReader and ReaderT
** MonadWriter and WriterT
* Generic type classes
* Type level
, Data.Tagged.Tagged(Tagged)
, Data.Tagged.unTagged
* IO
** Console
, interact
** File
* Error handling and debugging
| Alias for lazy 'Data.Text.Lazy.Text'
# INLINE fromFoldable #
This function can be used instead of the 'toList' function
| A synonym for 'fmap'.
@map = 'fmap'@
| Convert a value to a readable 'Text' using the 'Show' instance.
| Convert a value to a readable 'String' using the 'Show' instance.
| Parse a string type using the 'Text.Read.Read' instance.
| The 'print' function outputs a value of any printable type to the
standard output device.
Printable types are those that are instances of class 'Show'; 'print'
converts values to strings for output using the 'show' operation and
adds a newline.
__Note__: This function is lifted to the 'MonadIO' class.
| Write a strict 'Text' to the standard output device.
__Note__: This function is lifted to the 'MonadIO' class.
# INLINE putStr #
| The same as 'putStr', but adds a newline character.
__Note__: This function is lifted to the 'MonadIO' class.
| Write a character to the standard output device.
__Note__: This function is lifted to the 'MonadIO' class.
| Read an entire file strictly into a 'ByteString'.
__Note__: This function is lifted to the 'MonadIO' class.
| Write a 'ByteString' to a file.
__Note__: This function is lifted to the 'MonadIO' class.
| Append a 'ByteString' to a file.
__Note__: This function is lifted to the 'MonadIO' class.
The decoding is done using 'decodeStringLenient'. Invalid characters are replaced
__Note__: This function is lifted to the 'MonadIO' class.
__Note__: This function is lifted to the 'MonadIO' class.
__Note__: This function is lifted to the 'MonadIO' class.
| Throw an undefined error. Use only for debugging.
# WARNING undefined "'undefined' should be used only for debugging" #
| '<>' lifted to 'Control.Applicative.Applicative'
# INLINE (<>^) #
@f .: g = \\x y -> f (g x y)@.
# INLINE (.:) #
| An infix form of 'fromMaybe' with arguments flipped.
# INLINE (?:) #
| @()@ lifted to an 'Control.Applicative.Applicative'.
| Throw an unhandled error to terminate the program in case
of a logic error at runtime. Use this function instead of 'Prelude.error'.
A stack trace will be provided.
In general, prefer total functions. You can use 'Maybe', 'Data.Either.Either',
When a value is bound in @do@-notation, the pattern on the left
hand side of @<-@ might not match. In this case, this class
provides a function to recover.
with pattern that always match, such as newtypes, tuples, data types with
@
fail s >>= f = fail s
@
@
fail _ = mzero
@ | # LANGUAGE FlexibleContexts #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE TypeFamilies #
Copyright : ( c ) 2016 - 2017
License : MIT
Intro is a modern Prelude which provides safe alternatives
For String overloading the extension ' OverloadedStrings ' should be used .
Container types and Monad transformers are provided .
Most important - this Prelude tries to keep things simple .
* Keep everything at one place ( There are only three modules and Intro . Trustworthy is only there for Safe Haskell )
* Conservative extension over the base Prelude
* Prefer Text over String , provide ' ConvertString ' and ' EncodeString '
* Provide Monad transformers
Some ' Prelude ' functions are missing from ' Intro ' . More general variants are available for the following functions :
* ' return ' = ' Control.Applicative.pure '
* ' sequence ' = ' Control . '
* ' sequence _ ' = ' Control . _ '
Unsafe functions are not provided . For example ' read ' is replaced by ' ' .
The unsafe list functions are replaced by their ' NonEmpty ' counterparts . Furthermore ' * May ' and ' * Def '
* ' cycleMay ' , ' headMay ' , ' tailMay ' , ' initMay ' , ' lastMay '
* ' ' , ' ' , ...
* ' ioError ' and ' userError ' are not provided . Import modules for exception handling separately if needed .
* ' LText ' alias for lazy ' Text '
* ' ' alias for lazy ' ByteString '
* ' fromFoldable ' to convert from ' Data . Foldable . Foldable ' to an ' IsList ' type
* ' convertList ' to convert between two ' IsList ' types .
* ' asList ' to convert from an ' IsList ' type to a ' List ' . This function is an
alias for the ' toList ' function from the ' IsList ' class .
* ' skip ' as an alias for ( ) @
module Intro (
Data.Function.const
, Data.Function.flip
, (Data.Function.$)
, (Prelude.$!)
, (Data.Function.&)
, Data.Function.fix
, Data.Function.on
, (.:)
, Prelude.until
, Prelude.asTypeOf
, Prelude.seq
, Data.Void.Void
* *
, Data.Bool.Bool(False, True)
, (Data.Bool.&&)
, (Data.Bool.||)
, Data.Bool.bool
, Data.Bool.not
, Data.Bool.otherwise
, Data.Maybe.Maybe(Nothing, Just)
, Data.Maybe.catMaybes
, Data.Maybe.fromMaybe
, (?:)
, Data.Maybe.isJust
, Data.Maybe.isNothing
, Data.Maybe.mapMaybe
, Data.Maybe.maybe
, Intro.Trustworthy.IsList(
Item
, fromList
)
, asList
, convertList
, fromFoldable
, Data.List.break
, Data.List.Extra.breakOn
, Data.List.Extra.breakOnEnd
, Data.List.drop
, Data.List.Extra.dropEnd
, Data.List.dropWhile
, Data.List.dropWhileEnd
, Data.List.filter
, Data.List.group
, Data.List.groupBy
, Data.List.Extra.groupOn
, Data.List.Extra.groupSort
, Data.List.Extra.groupSortBy
, Data.List.Extra.groupSortOn
, Data.List.inits
, Data.List.intercalate
, Data.List.intersperse
, Data.List.isPrefixOf
, Data.List.isSuffixOf
, Data.List.iterate
, Data.List.iterate'
, Data.List.lookup
, Data.List.Extra.nubOrd
, Data.List.Extra.nubOrdBy
, Data.List.Extra.nubOrdOn
, Data.List.permutations
, Data.List.repeat
, Data.List.replicate
, Data.List.reverse
, Data.List.scanl
, Data.List.scanr
, Data.List.sort
, Data.List.sortBy
, Data.List.sortOn
, Data.List.span
, Data.List.Extra.spanEnd
, Data.List.splitAt
, Data.List.Extra.split
, Data.List.Extra.splitOn
, Data.List.subsequences
, Data.List.tails
, Data.List.take
, Data.List.Extra.takeEnd
, Data.List.takeWhile
, Data.List.Extra.takeWhileEnd
, Data.List.transpose
, Data.List.unfoldr
, Data.List.unzip
, Data.List.unzip3
, Data.List.zip
, Data.List.zip3
, Data.List.zipWith
, Data.List.zipWith3
, Safe.headDef
, Safe.initDef
, Safe.initMay
, Safe.lastDef
, Safe.lastMay
, Safe.tailDef
, Safe.cycleMay
, Safe.cycleDef
* * NonEmpty
, Data.List.NonEmpty.NonEmpty((:|))
, Data.List.NonEmpty.scanl1
, Data.List.NonEmpty.scanr1
, Data.List.NonEmpty.head
, Data.List.NonEmpty.init
, Data.List.NonEmpty.last
, Data.List.NonEmpty.tail
, Data.List.NonEmpty.cycle
, Data.Tuple.fst
, Data.Tuple.snd
, Data.Tuple.curry
, Data.Tuple.uncurry
, Data.Tuple.swap
, Data.Either.Either(Left, Right)
, Data.Either.either
, Data.Either.Extra.fromLeft
, Data.Either.Extra.fromRight
, Data.Either.isLeft
, Data.Either.isRight
, Data.Either.lefts
, Data.Either.rights
, Data.Either.partitionEithers
, Data.Either.Extra.eitherToMaybe
, Data.Either.Extra.maybeToEither
* * and String
, Data.Char.Char
, Data.String.String
, Data.Text.Text
, LText
,
, Data.ByteString.ByteString
, LByteString
, Data.ByteString.Short.ShortByteString
, Data.String.IsString(fromString)
, Intro.ConvertString.ConvertString(convertString)
, Intro.ConvertString.EncodeString(encodeString, decodeString, decodeStringLenient)
, Lenient(..)
, Data.Map.Map
, Data.Set.Set
, Data.IntMap.IntMap
, Data.IntSet.IntSet
* * HashedMap and HashSet
, Data.HashMap.Strict.HashMap
, Data.HashSet.HashSet
, Data.Hashable.Hashable(hash, hashWithSalt)
, Intro.Trustworthy.Hashable1
, Intro.Trustworthy.Hashable2
, Data.Sequence.Seq
, Prelude.Integer
, Numeric.Natural.Natural
, Data.Int.Int
, Data.Int.Int8
, Data.Int.Int16
, Data.Int.Int32
, Data.Int.Int64
, Data.Word.Word
, Data.Word.Word8
, Data.Word.Word16
, Data.Word.Word32
, Data.Word.Word64
, Prelude.Float
, Prelude.Double
* *
, Prelude.Num((+), (-), (*), negate, abs, signum
fromInteger
)
, Prelude.subtract
, Prelude.Real(toRational)
, Prelude.realToFrac
, Intro.ConvertIntegral.ToIntegral(..)
, Data.Bits.toIntegralSized
, Intro.ConvertIntegral.fromIntegralUnsafe
, Intro.ConvertIntegral.fromIntegerUnsafe
, Prelude.even
, Prelude.odd
,
, Prelude.lcm
, (Prelude.^^)
, Prelude.Floating(pi, exp, log, sqrt, (**), logBase, sin, cos, tan,
asin, acos, atan, sinh, cosh, tanh, asinh, acosh, atanh)
, Prelude.RealFloat(floatRadix, floatDigits, floatRange, decodeFloat,
encodeFloat, exponent, significand, scaleFloat, isNaN,
isInfinite, isDenormalized, isIEEE, isNegativeZero, atan2)
, Data.Bits.Bits((.&.), (.|.), xor, complement, shift, rotate, zeroBits,
bit, setBit, clearBit, complementBit, testBit,
isSigned,
rotateL, rotateR, popCount)
, Data.Bits.FiniteBits(finiteBitSize, countLeadingZeros, countTrailingZeros)
, Text.Show.Show
, Data.Functor.Classes.Show1
, Data.Functor.Classes.Show2
, show
, showT
, showS
, Text.Read.Read
, Data.Functor.Classes.Read1
, Data.Functor.Classes.Read2
, readMaybe
, Data.Eq.Eq((==), (/=))
, Data.Functor.Classes.Eq1
, Data.Functor.Classes.Eq2
* *
, Data.Ord.Ord(compare, (<), (>), (<=), (>=), max, min)
, Data.Functor.Classes.Ord1
, Data.Functor.Classes.Ord2
, Data.Ord.Ordering(LT,GT,EQ)
, Data.Ord.Down(Down)
, Data.Ord.comparing
* *
fromEnum, enumFrom, enumFromThen,
enumFromTo, enumFromThenTo)
, Safe.toEnumMay
, Safe.toEnumDef
, Safe.predMay
, Safe.predDef
, Safe.succMay
, Safe.succDef
, Prelude.Bounded(minBound, maxBound)
, Control.Category.Category(id, (.))
, (Control.Category.<<<)
, (Control.Category.>>>)
, Data.Semigroup.Semigroup((<>), sconcat, stimes)
, Data.Semigroup.First(First, getFirst)
, Data.Semigroup.Last(Last, getLast)
, Data.Semigroup.Min(Min, getMin)
, Data.Semigroup.Max(Max, getMax)
, Data.Semigroup.Option(Option, getOption)
, Data.Monoid.Monoid(mempty, mappend, mconcat)
, Data.Monoid.Dual(Dual, getDual)
, Data.Monoid.Endo(Endo, appEndo)
, Data.Monoid.All(All, getAll)
, Data.Monoid.Any(Any, getAny)
, Data . Monoid . Sum(Sum , getSum )
, Data . Monoid . Product(Product , )
, Data . Monoid . First(First , getFirst )
, Data . Monoid . Last(Last , getLast )
, Data.Monoid.Alt(Alt, getAlt)
, Data.Functor.Functor(
(<$)
)
, (Data.Functor.$>)
, (Data.Functor.<$>)
, (Data.Functor.<&>)
, map
, Data.Functor.void
, Data.Functor.Identity.Identity(Identity, runIdentity)
* *
, Data.Functor.Contravariant.Contravariant(
(>$),
contramap
)
, (Data.Functor.Contravariant.$<)
, (Data.Functor.Contravariant.>$<)
, (Data.Functor.Contravariant.>$$<)
, Data.Foldable.Foldable(elem, fold, foldMap,
foldr, foldr',
foldl',
product, sum, toList)
, Data.Foldable.null
, Data.Foldable.length
, Data.Foldable.foldrM
, Data.Foldable.foldlM
, Data.Foldable.traverse_
, Data.Foldable.for_
, Data.Foldable.asum
, Data.Foldable.concatMap
, Data.Foldable.all
, Data.Foldable.any
, Data.Foldable.or
, Data.Foldable.and
, Data.Foldable.find
, Data.Foldable.notElem
, Data.Foldable.sequenceA_
, Safe.Foldable.foldl1May
, Safe.Foldable.foldr1May
, Safe.Foldable.maximumByMay
, Safe.Foldable.maximumBoundBy
, Safe.Foldable.minimumByMay
, Safe.Foldable.minimumBoundBy
, Safe.Foldable.maximumMay
, Safe.Foldable.maximumBounded
, Safe.Foldable.maximumBound
, Safe.Foldable.minimumMay
, Safe.Foldable.minimumBounded
, Safe.Foldable.minimumBound
* *
, Data.Traversable.Traversable(traverse, sequenceA)
, Data.Traversable.for
, Data.Traversable.mapAccumL
, Data.Traversable.mapAccumR
, Control.Applicative.Applicative(pure, (<*>), (*>), (<*))
, Control.Applicative.ZipList(ZipList, getZipList)
, (Control.Applicative.<**>)
, Control.Applicative.liftA2
, Control.Applicative.liftA3
, skip
, (<>^)
, Control.Applicative.optional
, Data.List.NonEmpty.some1
, Control.Monad.Monad((>>=))
, Control.Monad.Fix.MonadFix(mfix)
, (Control.Monad.=<<)
, (Control.Monad.<=<)
, (Control.Monad.>=>)
, Control.Monad.join
, Control.Monad.guard
, Control.Monad.when
, Control.Monad.unless
, Control.Monad.replicateM
, Control.Monad.replicateM_
, (Control.Monad.<$!>)
, Control.Monad.Extra.whenM
, Control.Monad.Extra.unlessM
, Control.Monad.Extra.ifM
, Control.Monad.Extra.allM
, Control.Monad.Extra.anyM
, Control.Monad.Extra.andM
, Control.Monad.Extra.orM
, Control.Monad.Extra.concatMapM
, (Control.Monad.Extra.&&^)
, (Control.Monad.Extra.||^)
, Data.Bifunctor.Bifunctor(bimap, first, second)
, Data.Bifoldable.Bifoldable(bifoldr
, bifoldMap)
, Data.Bifoldable.bifoldl'
, Data.Bifoldable.bifoldr'
, Data.Bifoldable.bitraverse_
, Data.Bifoldable.bisequenceA_
, Data.Bifoldable.bifor_
, Data.Bitraversable.Bitraversable(bitraverse)
, Data.Bitraversable.bifor
, Data.Bitraversable.bisequenceA
, Control.Monad.Trans.MonadTrans(lift)
* * MonadPlus and MaybeT
, Control.Monad.MonadPlus
, Control.Monad.Trans.Maybe.MaybeT(MaybeT, runMaybeT)
, Control.Monad.Trans.Maybe.mapMaybeT
* * and ExceptT
, Control.Monad.Except.MonadError(throwError, catchError)
, Control.Monad.Except.Except
, Control.Monad.Except.runExcept
, Control.Monad.Except.mapExcept
, Control.Monad.Except.withExcept
, Control.Monad.Except.ExceptT(ExceptT)
, Control.Monad.Except.runExceptT
, Control.Monad.Except.mapExceptT
, Control.Monad.Except.withExceptT
, Control.Monad.Reader.MonadReader(ask, local, reader)
, Control.Monad.Reader.asks
, Control.Monad.Reader.Reader
, Control.Monad.Reader.runReader
, Control.Monad.Reader.mapReader
, Control.Monad.Reader.withReader
, Control.Monad.Reader.ReaderT(ReaderT, runReaderT)
, Control.Monad.Reader.mapReaderT
, Control.Monad.Reader.withReaderT
, Control.Monad.Writer.CPS.MonadWriter(writer, tell, listen, pass)
, Control.Monad.Writer.CPS.Writer
, Control.Monad.Writer.CPS.runWriter
, Control.Monad.Writer.CPS.execWriter
, Control.Monad.Writer.CPS.mapWriter
, Control.Monad.Writer.CPS.WriterT
, Control.Monad.Writer.CPS.writerT
, Control.Monad.Writer.CPS.runWriterT
, Control.Monad.Writer.CPS.execWriterT
, Control.Monad.Writer.CPS.mapWriterT
* * MonadState and StateT
, Control.Monad.State.Strict.MonadState(get, put, state)
, Control.Monad.State.Strict.State
, Control.Monad.State.Strict.gets
, Control.Monad.State.Strict.modify
, Control.Monad.State.Strict.modify'
, Control.Monad.State.Strict.runState
, Control.Monad.State.Strict.evalState
, Control.Monad.State.Strict.execState
, Control.Monad.State.Strict.mapState
, Control.Monad.State.Strict.withState
, Control.Monad.State.Strict.StateT(StateT, runStateT)
, Control.Monad.State.Strict.evalStateT
, Control.Monad.State.Strict.execStateT
, Control.Monad.State.Strict.mapStateT
, Control.Monad.State.Strict.withStateT
* * MonadRWS and RWST
, Control.Monad.RWS.CPS.MonadRWS
, Control.Monad.RWS.CPS.RWS
, Control.Monad.RWS.CPS.rws
, Control.Monad.RWS.CPS.runRWS
, Control.Monad.RWS.CPS.evalRWS
, Control.Monad.RWS.CPS.execRWS
, Control.Monad.RWS.CPS.mapRWS
, Control.Monad.RWS.CPS.RWST
, Control.Monad.RWS.CPS.rwsT
, Control.Monad.RWS.CPS.runRWST
, Control.Monad.RWS.CPS.evalRWST
, Control.Monad.RWS.CPS.execRWST
, Control.Monad.RWS.CPS.mapRWST
, GHC.Generics.Generic
, GHC.Generics.Generic1
, Data.Typeable.Typeable
, Data.Kind.Type
, Intro.Trustworthy.Constraint
, Data.Proxy.Proxy(Proxy)
, System.IO.IO
, Control.Monad.Trans.MonadIO(liftIO)
, print
, putChar
, putStr
, putStrLn
, System.IO.FilePath
, readFile
, writeFile
, appendFile
, readFileUtf8
, writeFileUtf8
, appendFileUtf8
, HasCallStack
, Control.Monad.Fail.MonadFail
, fail
, panic
, undefined
, Intro.Trustworthy.trace
, Intro.Trustworthy.traceIO
, Intro.Trustworthy.traceId
, Intro.Trustworthy.traceM
, Intro.Trustworthy.traceShow
, Intro.Trustworthy.traceShowId
, Intro.Trustworthy.traceShowM
) where
import Control.Monad.Trans (MonadIO(liftIO))
import Data.ByteString (ByteString)
import Data.Char (Char)
import Data.Function ((.), ($))
import Data.Functor (Functor(fmap))
import Data.Maybe (Maybe, fromMaybe)
import Data.Semigroup (Semigroup((<>)))
import Data.String (IsString(fromString), String)
import Data.Text (Text)
import Intro.ConvertIntegral
import Intro.ConvertString
import Intro.Trustworthy
import System.IO (FilePath)
import Text.Show (Show)
import qualified Control.Applicative
import qualified Control.Category
import qualified Control.Monad
import qualified Control.Monad.Except
import qualified Control.Monad.Extra
import qualified Control.Monad.Fail
import qualified Control.Monad.Fix
import qualified Control.Monad.RWS.CPS
import qualified Control.Monad.Reader
import qualified Control.Monad.State.Strict
import qualified Control.Monad.Trans
import qualified Control.Monad.Trans.Maybe
import qualified Control.Monad.Writer.CPS
import qualified Data.Bifoldable
import qualified Data.Bifunctor
import qualified Data.Bitraversable
import qualified Data.Bits
import qualified Data.Bool
import qualified Data.ByteString
import qualified Data.ByteString.Lazy
import qualified Data.ByteString.Short
import qualified Data.Either
import qualified Data.Either.Extra
import qualified Data.Eq
import qualified Data.Foldable
import qualified Data.Function
import qualified Data.Functor
import qualified Data.Functor.Classes
import qualified Data.Functor.Identity
import qualified Data.Functor.Contravariant
import qualified Data.HashMap.Strict
import qualified Data.HashSet
import qualified Data.Hashable
import qualified Data.Int
import qualified Data.IntMap
import qualified Data.IntSet
import qualified Data.Kind
import qualified Data.List
import qualified Data.List.Extra
import qualified Data.List.NonEmpty
import qualified Data.Map
import qualified Data.Maybe
import qualified Data.Monoid
import qualified Data.Ord
import qualified Data.Proxy
import qualified Data.Semigroup
import qualified Data.Sequence
import qualified Data.Set
import qualified Data.Text.IO
import qualified Data.Text.Lazy
import qualified Data.Traversable
import qualified Data.Tuple
import qualified Data.Typeable
import qualified Data.Void
import qualified Data.Word
import qualified GHC.Generics
import qualified Numeric.Natural
import qualified Prelude
import qualified Safe
import qualified Safe.Foldable
import qualified System.IO
import qualified Text.Read
import qualified Text.Show
type LText = Data.Text.Lazy.Text
| Alias for lazy ' Data . ByteString . Lazy . ByteString '
type LByteString = Data.ByteString.Lazy.ByteString
| Convert from ' Data . Foldable . Foldable ' to an ' IsList ' type .
fromFoldable :: (Data.Foldable.Foldable f, IsList a) => f (Item a) -> a
fromFoldable = fromList . Data.Foldable.toList
| Convert between two different ' IsList ' types .
originally provided by the ' IsList ' class .
convertList :: (IsList a, IsList b, Item a ~ Item b) => a -> b
convertList = fromList . toList
# INLINE convertList #
| The ' asList ' function extracts a list of @Item a@ from the structure
It should satisfy fromList . asList = i d.
asList :: (IsList a) => a -> [Item a]
asList = Intro.Trustworthy.toList
# INLINE asList #
map :: Functor f => (a -> b) -> f a -> f b
map = fmap
# INLINE map #
| Convert a value to a readable string type supported by ' ConvertString ' using the ' Show ' instance .
show :: (Show a, ConvertString String s) => a -> s
show = convertString . showS
# INLINE show #
showT :: Show a => a -> Text
showT = show
# INLINE showT #
showS :: Show a => a -> String
showS = Text.Show.show
# INLINE showS #
Succeeds if there is exactly one valid result .
readMaybe :: (Text.Read.Read b, ConvertString a String) => a -> Maybe b
readMaybe = Text.Read.readMaybe . convertString
# INLINE readMaybe #
For example , a program to print the first 20 integers and their
powers of 2 could be written as :
> main = print ( [ ( n , 2^n ) | n < - [ 0 .. 19 ] ] )
print :: (MonadIO m, Show a) => a -> m ()
print = liftIO . System.IO.print
# INLINE print #
putStr :: MonadIO m => Text -> m ()
putStr = liftIO . Data.Text.IO.putStr
putStrLn :: MonadIO m => Text -> m ()
putStrLn = liftIO . Data.Text.IO.putStrLn
# INLINE putStrLn #
putChar :: MonadIO m => Char -> m ()
putChar = liftIO . System.IO.putChar
# INLINE putChar #
readFile :: MonadIO m => FilePath -> m ByteString
readFile = liftIO . Data.ByteString.readFile
# INLINE readFile #
writeFile :: MonadIO m => FilePath -> ByteString -> m ()
writeFile = liftIO .: Data.ByteString.writeFile
# INLINE writeFile #
appendFile :: MonadIO m => FilePath -> ByteString -> m ()
appendFile = liftIO .: Data.ByteString.appendFile
# INLINE appendFile #
| Read an entire file strictly into a ' Text ' using UTF-8 encoding .
by the Unicode replacement character ' \FFFD ' .
readFileUtf8 :: MonadIO m => FilePath -> m Text
readFileUtf8 = map decodeStringLenient . readFile
# INLINE readFileUtf8 #
| Write a ' Text ' to a file using UTF-8 encoding .
writeFileUtf8 :: MonadIO m => FilePath -> Text -> m ()
writeFileUtf8 file = writeFile file . convertString
# INLINE writeFileUtf8 #
| Append a ' Text ' to a file using UTF-8 encoding .
appendFileUtf8 :: MonadIO m => FilePath -> Text -> m ()
appendFileUtf8 file = appendFile file . convertString
# INLINE appendFileUtf8 #
undefined :: HasCallStack => a
undefined = Prelude.undefined
(<>^) :: (Control.Applicative.Applicative f, Semigroup a) => f a -> f a -> f a
(<>^) = Control.Applicative.liftA2 (<>)
infixr 6 <>^
| Compose functions with one argument with function with two arguments .
(.:) :: (c -> d) -> (a -> b -> c) -> a -> b -> d
(.:) = (.) . (.)
infixr 8 .:
(?:) :: Maybe a -> a -> a
(?:) = Data.Function.flip fromMaybe
infix 1 ?:
@skip = ' Control.Applicative.pure ' ( ) @
skip :: Control.Applicative.Applicative m => m ()
skip = Control.Applicative.pure ()
# INLINE skip #
' Control . . Except . ExceptT ' or ' Control . . Except . ' for error handling .
panic :: HasCallStack => Text -> a
panic msg = Prelude.error $ convertString $
"Panic: " <> msg <> "\n\n" <>
"Please submit a bug report including the stacktrace\n" <>
"and a description on how to reproduce the bug."
| Monad fail function from the ' Control . . Fail . ' class .
A ' Monad ' without a ' MonadFail ' instance may only be used in conjunction
only a single data constructor , and irrefutable patterns ( ) .
Instances of ' MonadFail ' should satisfy the following law : @fail s@ should
be a left zero for ' > > = ' ,
If your ' Monad ' is also ' MonadPlus ' , a popular definition is
fail :: Control.Monad.Fail.MonadFail m => Text -> m a
fail = Control.Monad.Fail.fail . convertString
# INLINE fail #
|
04eca003bd429a7182eb54bae3c4b303b9d549d85de76b73b6b54b230914640e | eugeneia/athens | testfuns.lisp | ;;;; -*- mode: lisp; indent-tabs-mode: nil -*-
(in-package :crypto-tests)
(defun hex-string-to-byte-array (string &key (start 0) (end nil))
This function disappears from profiles if SBCL can inline the
;; POSITION call, so declare SPEED high enough to trigger that.
(declare (type string string) (optimize (speed 2)))
(let* ((end (or end (length string)))
(length (/ (- end start) 2))
(key (make-array length :element-type '(unsigned-byte 8))))
(declare (type (simple-array (unsigned-byte 8) (*)) key))
(flet ((char-to-digit (char)
(declare (type base-char char))
(let ((x (cl:position char #.(coerce "0123456789abcdef" 'simple-base-string)
:test #'char-equal)))
(or x (error "Invalid hex key ~A specified" string)))))
(loop for i from 0
for j from start below end by 2
do (setf (aref key i)
(+ (* (char-to-digit (char string j)) 16)
(char-to-digit (char string (1+ j)))))
finally (return key)))))
;;; test vector files
(defun test-vector-filename (ident)
(merge-pathnames (make-pathname :directory '(:relative "test-vectors")
:name (substitute #\- #\/ (format nil "~(~A~)" ident))
:type "testvec")
#.*compile-file-pathname*))
(defun sharp-a (stream sub-char numarg)
(declare (ignore sub-char numarg))
(crypto:ascii-string-to-byte-array (read stream t nil t)))
(defun sharp-h (stream sub-char numarg)
(declare (ignore sub-char numarg))
(hex-string-to-byte-array (read stream t nil t)))
(defun run-test-vector-file (name function-map)
(let ((filename (test-vector-filename name))
(*readtable* (copy-readtable)))
(set-dispatch-macro-character #\# #\a #'sharp-a *readtable*)
(set-dispatch-macro-character #\# #\h #'sharp-h *readtable*)
(with-open-file (stream filename :direction :input
:element-type 'character
:if-does-not-exist :error)
(loop for form = (read stream nil stream)
until (eq form stream) do
(cond
((not (listp form))
(error "Invalid form in test vector file ~A: ~A" filename form))
(t
(let ((test-function (cdr (assoc (car form) function-map))))
(unless test-function
(error "No test function defined for ~A" (car form)))
(apply test-function name (cdr form)))))
finally (return t)))))
;;; cipher testing
(defun cipher-test-guts (cipher-name mode key input output
&optional extra-make-cipher-args)
(let ((cipher (apply #'crypto:make-cipher cipher-name
:key key :mode mode
extra-make-cipher-args))
(scratch (copy-seq input)))
(crypto:encrypt cipher input scratch)
(when (mismatch scratch output)
(error "encryption failed for ~A on key ~A, input ~A, output ~A"
cipher-name key input output))
(apply #'reinitialize-instance cipher :key key extra-make-cipher-args)
(crypto:decrypt cipher output scratch)
(when (mismatch scratch input)
(error "decryption failed for ~A on key ~A, input ~A, output ~A"
cipher-name key output input))))
#+(or lispworks sbcl cmucl openmcl allegro abcl ecl clisp)
(defun cipher-stream-test-guts (cipher-name mode key input output
&optional extra-args)
(let* ((out-stream (crypto:make-octet-output-stream))
(enc-stream (apply #'crypto:make-encrypting-stream
out-stream cipher-name mode key extra-args))
(in-stream (crypto:make-octet-input-stream output))
(dec-stream (apply #'crypto:make-decrypting-stream
in-stream cipher-name mode key extra-args)))
(write-byte (aref input 0) enc-stream)
(write-sequence input enc-stream :start 1)
(let ((result (crypto:get-output-stream-octets out-stream)))
(when (mismatch result output)
(error "stream encryption failed for ~A on key ~A, input ~A, output ~A"
cipher-name key input output)))
(let ((result (copy-seq output)))
(setf (aref result 0) (read-byte dec-stream))
(read-sequence result dec-stream :start 1)
(when (mismatch result input)
(error "stream decryption failed for ~A on key ~A, input ~A, output ~A"
cipher-name key output input)))))
(defun ecb-mode-test (cipher-name hexkey hexinput hexoutput)
(cipher-test-guts cipher-name :ecb hexkey hexinput hexoutput))
(defun ecb-tweak-mode-test (cipher-name hexkey hextweak hexinput hexoutput)
(cipher-test-guts cipher-name :ecb hexkey hexinput hexoutput
(list :tweak hextweak)))
(defun stream-mode-test (cipher-name hexkey hexinput hexoutput)
(cipher-test-guts cipher-name :stream hexkey hexinput hexoutput))
(defun stream-nonce-mode-test (cipher-name hexkey hexiv hexinput hexoutput)
(cipher-test-guts cipher-name :stream hexkey hexinput hexoutput
(list :initialization-vector hexiv)))
(defun keystream-test (cipher-name key iv keystream)
(let* ((mode (if (= 1 (crypto:block-length cipher-name)) :stream :ctr))
(cipher (crypto:make-cipher cipher-name :key key :mode mode :initialization-vector iv))
(buffer (make-array 1000 :element-type '(unsigned-byte 8) :initial-element 0)))
(crypto:keystream-position cipher 100)
(crypto:encrypt-in-place cipher buffer :start 100 :end 213)
(crypto:keystream-position cipher 500)
(crypto:encrypt-in-place cipher buffer :start 500 :end 1000)
(crypto:keystream-position cipher 213)
(crypto:encrypt-in-place cipher buffer :start 213 :end 500)
(crypto:keystream-position cipher 0)
(crypto:encrypt-in-place cipher buffer :end 100)
(crypto:keystream-position cipher 765)
(when (or (/= (crypto:keystream-position cipher) 765)
(mismatch buffer keystream))
(error "getting/setting key stream position failed for ~A on key ~A" cipher-name key))))
#+(or lispworks sbcl cmucl openmcl allegro abcl ecl clisp)
(defun stream-mode-test/stream (cipher-name hexkey hexinput hexoutput)
(cipher-stream-test-guts cipher-name :stream hexkey hexinput hexoutput))
#+(or lispworks sbcl cmucl openmcl allegro abcl ecl clisp)
(defun stream-nonce-mode-test/stream (cipher-name hexkey hexiv hexinput hexoutput)
(cipher-stream-test-guts cipher-name :stream hexkey hexinput hexoutput
(list :initialization-vector hexiv)))
(defparameter *cipher-tests*
(list (cons :ecb-mode-test 'ecb-mode-test)
(cons :ecb-tweak-mode-test 'ecb-tweak-mode-test)
(cons :stream-mode-test 'stream-mode-test)
(cons :stream-nonce-mode-test 'stream-nonce-mode-test)
(cons :keystream-test 'keystream-test)))
#+(or lispworks sbcl cmucl openmcl allegro abcl ecl clisp)
(defparameter *cipher-stream-tests*
(list (cons :ecb-mode-test 'ignore-test)
(cons :ecb-tweak-mode-test 'ignore-test)
(cons :stream-mode-test 'stream-mode-test/stream)
(cons :stream-nonce-mode-test 'stream-nonce-mode-test/stream)
(cons :keystream-test 'ignore-test)))
;;; encryption mode consistency checking
tests from NIST
(defun mode-test (mode cipher-name key iv input output)
(labels ((frob-hex-string (cipher func input)
(let ((scratch (copy-seq input)))
(funcall func cipher input scratch)
scratch))
(cipher-test (cipher func input output)
(not (mismatch (frob-hex-string cipher func input) output))))
(let ((cipher (crypto:make-cipher cipher-name :key key :mode mode
:initialization-vector iv)))
(unless (cipher-test cipher 'crypto:encrypt input output)
(error "encryption failed for ~A on key ~A, input ~A, output ~A"
cipher-name key input output))
(reinitialize-instance cipher :key key :mode mode
:initialization-vector iv)
(unless (cipher-test cipher 'crypto:decrypt output input)
(error "decryption failed for ~A on key ~A, input ~A, output ~A"
cipher-name key output input)))))
(defparameter *mode-tests*
(list (cons :mode-test 'mode-test)))
;;; digest testing routines
(defun digest-test/base (digest-name input expected-digest)
(let ((result (crypto:digest-sequence digest-name input)))
(when (mismatch result expected-digest)
(error "one-shot ~A digest of ~S failed" digest-name input))))
(defun digest-test/incremental (digest-name input expected-digest)
(loop with length = (length input)
with digester = (crypto:make-digest digest-name)
for i from 0 below length
do (crypto:update-digest digester input :start i :end (1+ i))
finally
(let ((result (crypto:produce-digest digester)))
(when (mismatch result expected-digest)
(error "incremental ~A digest of ~S failed" digest-name input)))))
#+(or sbcl cmucl)
(defun digest-test/fill-pointer (digest-name octets expected-digest)
(let* ((input (let ((x (make-array (* 2 (length octets))
:fill-pointer 0
:element-type '(unsigned-byte 8))))
(dotimes (i (length octets) x)
(vector-push (aref octets i) x))))
(result (crypto:digest-sequence digest-name input)))
(when (mismatch result expected-digest)
(error "fill-pointer'd ~A digest of ~S failed" digest-name input))))
#+(or lispworks sbcl cmucl openmcl allegro abcl ecl clisp)
(defun digest-test/stream (digest-name input expected-digest)
(let* ((stream (crypto:make-digesting-stream digest-name)))
(when (plusp (length input))
(write-byte (aref input 0) stream)
(write-sequence input stream :start 1))
(crypto:produce-digest stream) ; Calling produce-digest twice should not give a wrong hash
(when (mismatch (crypto:produce-digest stream) expected-digest)
(error "stream-y ~A digest of ~S failed" digest-name input))))
(defun digest-test/reinitialize-instance (digest-name input expected-digest)
(let* ((digest (crypto:make-digest digest-name))
(result (progn
(crypto:digest-sequence digest input)
(crypto:digest-sequence (reinitialize-instance digest) input))))
(when (mismatch result expected-digest)
(error "testing reinitialize-instance ~A digest of ~S failed" digest-name input))))
(defun digest-bit-test (digest-name leading byte trailing expected-digest)
(let* ((input (let ((vector (make-array (+ 1 leading trailing)
:element-type '(unsigned-byte 8)
:initial-element 0)))
(setf (aref vector leading) byte)
vector))
(result (crypto:digest-sequence digest-name input)))
(when (mismatch result expected-digest)
(error "individual bit test ~A digest of (~D #x~2,'0X ~D) failed"
digest-name leading byte trailing))))
(defun xof-digest-test (digest-name output-length input expected-digest)
(let* ((digest (crypto:make-digest digest-name :output-length output-length))
(result (crypto:digest-sequence digest input)))
(when (mismatch result expected-digest)
(error "one-shot ~A xof digest of ~S failed" digest-name input))))
(defparameter *digest-tests*
(list (cons :digest-test 'digest-test/base)
(cons :digest-bit-test 'digest-bit-test)
(cons :xof-digest-test 'xof-digest-test)))
(defun ignore-test (&rest args)
(declare (ignore args))
nil)
(defparameter *digest-incremental-tests*
(list (cons :digest-test 'digest-test/incremental)
(cons :digest-bit-test 'ignore-test)
(cons :xof-digest-test 'ignore-test)))
#+(or sbcl cmucl)
(defparameter *digest-fill-pointer-tests*
(list (cons :digest-test 'digest-test/fill-pointer)
(cons :digest-bit-test 'ignore-test)
(cons :xof-digest-test 'ignore-test)))
#+(or lispworks sbcl cmucl openmcl allegro abcl ecl clisp)
(defparameter *digest-stream-tests*
(list (cons :digest-test 'digest-test/stream)
(cons :digest-bit-test 'ignore-test)
(cons :xof-digest-test 'ignore-test)))
(defparameter *digest-reinitialize-instance-tests*
(list (cons :digest-test 'digest-test/reinitialize-instance)
(cons :digest-bit-test 'ignore-test)
(cons :xof-digest-test 'ignore-test)))
;;; mac testing routines
(defun mac-test/base (mac-name key data expected-digest &rest args)
(let ((mac (apply #'crypto:make-mac mac-name key args)))
(crypto:update-mac mac data)
(let ((result (crypto:produce-mac mac)))
(when (mismatch result expected-digest)
(error "one-shot ~A mac of ~A failed on key ~A, args ~A"
mac-name data key args)))))
(defun mac-test/incremental (mac-name key data expected-digest &rest args)
(loop with length = (length data)
with mac = (apply #'crypto:make-mac mac-name key args)
for i from 0 below length
do (crypto:update-mac mac data :start i :end (1+ i))
finally (let ((result (crypto:produce-mac mac)))
(when (mismatch result expected-digest)
(error "incremental ~A mac of ~A failed on key ~A, args ~A"
mac-name data key args)))))
#+(or lispworks sbcl cmucl openmcl allegro abcl ecl clisp)
(defun mac-test/stream (mac-name key data expected-digest &rest args)
(let ((stream (apply #'crypto:make-authenticating-stream mac-name key args)))
(when (plusp (length data))
(write-byte (aref data 0) stream)
(write-sequence data stream :start 1))
Calling produce - mac twice should not give a wrong MAC
(let ((result (crypto:produce-mac stream)))
(when (mismatch result expected-digest)
(error "stream ~A mac of ~A failed on key ~A, args ~A"
mac-name data key args)))))
(defun mac-test/reinitialize-instance (mac-name key data expected-digest &rest args)
(let* ((mac (apply #'crypto:make-mac mac-name key args))
(result1 (progn
(crypto:update-mac mac data)
(crypto:produce-mac mac))))
(declare (ignorable result1))
(reinitialize-instance mac :key key)
(let ((result2 (progn
(crypto:update-mac mac data)
(crypto:produce-mac mac))))
(when (mismatch result2 expected-digest)
(error "testing reinitialize-instance ~A mac of ~A failed on key ~A, args ~A"
mac-name data key args)))))
(defparameter *mac-tests*
(list (cons :mac-test 'mac-test/base)))
(defparameter *mac-incremental-tests*
(list (cons :mac-test 'mac-test/incremental)))
#+(or lispworks sbcl cmucl openmcl allegro abcl ecl clisp)
(defparameter *mac-stream-tests*
(list (cons :mac-test 'mac-test/stream)))
(defparameter *mac-reinitialize-instance-tests*
(list (cons :mac-test 'mac-test/reinitialize-instance)))
;;; PRNG testing routines
(defun fortuna-test (name seed entropy expected-sequence)
(declare (ignore name))
(let ((prng (crypto:make-prng :fortuna
:seed (coerce seed 'crypto::simple-octet-vector)))
(num-bytes (length expected-sequence)))
(loop for (source pool-id event) in entropy
do (crypto:add-random-event source pool-id event prng))
(assert (equalp expected-sequence
(crypto:random-data num-bytes prng)))))
(defun generator-test (name cipher seeds expected-sequences)
(declare (ignore name))
(let ((generator (make-instance 'crypto:fortuna-generator :cipher cipher)))
(loop for seed in seeds
do (crypto:prng-reseed (coerce seed '(vector (unsigned-byte 8))) generator))
(every (lambda (sequence)
(assert (zerop (mod (length sequence) 16)))
(assert (equalp sequence
(crypto:random-data (length sequence)
generator))))
expected-sequences)))
(defparameter *prng-tests*
`((:fortuna-test . ,'fortuna-test)
(:generator-test . ,'generator-test)))
;;; Public key testing routines
(defun rsa-oaep-encryption-test (name n e d input seed output)
Redefine oaep - encode to use a defined seed for the test instead of a random one
(setf (symbol-function 'ironclad::oaep-encode)
(lambda (digest-name message num-bytes &optional label)
(let* ((digest-name (if (eq digest-name t) :sha1 digest-name))
(digest-len (ironclad:digest-length digest-name)))
(assert (<= (length message) (- num-bytes (* 2 digest-len) 2)))
(let* ((digest (ironclad:make-digest digest-name))
(label (or label (coerce #() '(vector (unsigned-byte 8)))))
(padding-len (- num-bytes (length message) (* 2 digest-len) 2))
(padding (make-array padding-len :element-type '(unsigned-byte 8) :initial-element 0))
(l-hash (ironclad:digest-sequence digest label))
(db (concatenate '(vector (unsigned-byte 8)) l-hash padding #(1) message))
(db-mask (ironclad::mgf digest-name seed (- num-bytes digest-len 1)))
(masked-db (map '(vector (unsigned-byte 8)) #'logxor db db-mask))
(seed-mask (ironclad::mgf digest-name masked-db digest-len))
(masked-seed (map '(vector (unsigned-byte 8)) #'logxor seed seed-mask)))
(concatenate '(vector (unsigned-byte 8)) #(0) masked-seed masked-db)))))
(let* ((pk (ironclad:make-public-key :rsa :n n :e e))
(sk (ironclad:make-private-key :rsa :n n :d d))
(c (ironclad:encrypt-message pk input :oaep t))
(m (ironclad:decrypt-message sk output :oaep t)))
(when (mismatch c output)
(error "encryption failed for ~A on pkey (~A ~A), input ~A, output ~A"
name n e input output))
(when (mismatch m input)
(error "decryption failed for ~A on skey (~A ~A), input ~A, output ~A"
name n d input output))))
(defun elgamal-encryption-test (name p g x y input k output)
Redefine elgamal - generate - k to use a defined K for the test instead of a random one
(setf (symbol-function 'ironclad::elgamal-generate-k)
(lambda (p)
(declare (ignore p))
k))
(let* ((pk (ironclad:make-public-key :elgamal :p p :g g :y y))
(sk (ironclad:make-private-key :elgamal :p p :g g :x x :y y))
(c (ironclad:encrypt-message pk input))
(m (ironclad:decrypt-message sk output)))
(when (mismatch c output)
(error "encryption failed for ~A on pkey (~A ~A ~A), input ~A, output ~A"
name p g y input output))
(when (mismatch m input)
(error "decryption failed for ~A on skey (~A ~A ~A ~A), input ~A, output ~A"
name p g x y input output))))
(defun rsa-pss-signature-test (name n e d input salt signature)
Redefine pss - encode to use a defined salt for the test instead of a random one
(setf (symbol-function 'ironclad::pss-encode)
(lambda (digest-name message num-bytes)
(let* ((digest-name (if (eq digest-name t) :sha1 digest-name))
(digest-len (ironclad:digest-length digest-name)))
(assert (>= num-bytes (+ (* 2 digest-len) 2)))
(let* ((m-hash (ironclad:digest-sequence digest-name message))
(m1 (concatenate '(vector (unsigned-byte 8)) #(0 0 0 0 0 0 0 0) m-hash salt))
(h (ironclad:digest-sequence digest-name m1))
(ps (make-array (- num-bytes (* 2 digest-len) 2)
:element-type '(unsigned-byte 8)
:initial-element 0))
(db (concatenate '(vector (unsigned-byte 8)) ps #(1) salt))
(db-mask (ironclad::mgf digest-name h (- num-bytes digest-len 1)))
(masked-db (map '(vector (unsigned-byte 8)) #'logxor db db-mask)))
(setf (ldb (byte 1 7) (elt masked-db 0)) 0)
(concatenate '(vector (unsigned-byte 8)) masked-db h #(188))))))
(let* ((pk (ironclad:make-public-key :rsa :n n :e e))
(sk (ironclad:make-private-key :rsa :n n :d d))
(s (ironclad:sign-message sk input :pss t)))
(when (mismatch s signature)
(error "signature failed for ~A on skey (~A ~A), input ~A, signature ~A"
name n d input signature))
(unless (ironclad:verify-signature pk input signature :pss t)
(error "signature verification failed for ~A on pkey (~A ~A), input ~A, signature ~A"
name n e input signature))))
(defun elgamal-signature-test (name p g x y input k signature)
Redefine elgamal - generate - k to use a defined K for the test instead of a random one
(setf (symbol-function 'ironclad::elgamal-generate-k)
(lambda (p)
(declare (ignore p))
k))
(let* ((pk (ironclad:make-public-key :elgamal :p p :g g :y y))
(sk (ironclad:make-private-key :elgamal :p p :g g :x x :y y))
(s (ironclad:sign-message sk input)))
(when (mismatch s signature)
(error "signature failed for ~A on skey (~A ~A ~A ~A), input ~A, signature ~A"
name p g x y input signature))
(unless (ironclad:verify-signature pk input signature)
(error "signature verification failed for ~A on pkey (~A ~A ~A), input ~A, signature ~A"
name p g y input signature))))
(defun dsa-signature-test (name p q g x y input k signature)
Redefine dsa - generate - k to use a defined K for the test instead of a random one
(setf (symbol-function 'ironclad::dsa-generate-k)
(lambda (q)
(declare (ignore q))
k))
(let* ((sk (ironclad:make-private-key :dsa :p p :q q :g g :x x :y y))
(pk (ironclad:make-public-key :dsa :p p :q q :g g :y y))
(s (ironclad:sign-message sk input)))
(when (mismatch s signature)
(error "signature failed for ~A on skey (~A ~A ~A ~A ~A), input ~A, signature ~A"
name p q g x y input signature))
(unless (ironclad:verify-signature pk input signature)
(error "signature verification failed for ~A on pkey (~A ~A ~A ~A), input ~A, signature ~A"
name p q g y input signature))))
(defun ed25519-signature-test (name skey pkey input signature)
(let* ((sk (ironclad:make-private-key :ed25519 :x skey :y pkey))
(pk (ironclad:make-public-key :ed25519 :y pkey))
(s (ironclad:sign-message sk input)))
(when (mismatch s signature)
(error "signature failed for ~A on skey ~A, input ~A, signature ~A"
name skey input signature))
(unless (ironclad:verify-signature pk input signature)
(error "signature verification failed for ~A on pkey ~A, input ~A, signature ~A"
name pkey input signature))))
(defun ed448-signature-test (name skey pkey input signature)
(let* ((sk (ironclad:make-private-key :ed448 :x skey :y pkey))
(pk (ironclad:make-public-key :ed448 :y pkey))
(s (ironclad:sign-message sk input)))
(when (mismatch s signature)
(error "signature failed for ~A on skey ~A, input ~A, signature ~A"
name skey input signature))
(unless (ironclad:verify-signature pk input signature)
(error "signature verification failed for ~A on pkey ~A, input ~A, signature ~A"
name pkey input signature))))
(defun curve25519-dh-test (name skey1 pkey1 skey2 pkey2 shared-secret)
(let* ((sk1 (ironclad:make-private-key :curve25519 :x skey1 :y pkey1))
(pk1 (ironclad:make-public-key :curve25519 :y pkey1))
(sk2 (ironclad:make-private-key :curve25519 :x skey2 :y pkey2))
(pk2 (ironclad:make-public-key :curve25519 :y pkey2))
(ss1 (ironclad:diffie-hellman sk1 pk2))
(ss2 (ironclad:diffie-hellman sk2 pk1)))
(when (mismatch ss1 shared-secret)
(error "shared secret computation failed for ~A on skey ~A, pkey ~A, secret ~A"
name skey1 pkey2 shared-secret))
(when (mismatch ss2 shared-secret)
(error "shared secret computation failed for ~A on skey ~A, pkey ~A, secret ~A"
name skey2 pkey1 shared-secret))))
(defun curve448-dh-test (name skey1 pkey1 skey2 pkey2 shared-secret)
(let* ((sk1 (ironclad:make-private-key :curve448 :x skey1 :y pkey1))
(pk1 (ironclad:make-public-key :curve448 :y pkey1))
(sk2 (ironclad:make-private-key :curve448 :x skey2 :y pkey2))
(pk2 (ironclad:make-public-key :curve448 :y pkey2))
(ss1 (ironclad:diffie-hellman sk1 pk2))
(ss2 (ironclad:diffie-hellman sk2 pk1)))
(when (mismatch ss1 shared-secret)
(error "shared secret computation failed for ~A on skey ~A, pkey ~A, secret ~A"
name skey1 pkey2 shared-secret))
(when (mismatch ss2 shared-secret)
(error "shared secret computation failed for ~A on skey ~A, pkey ~A, secret ~A"
name skey2 pkey1 shared-secret))))
(defun elgamal-dh-test (name p g skey1 pkey1 skey2 pkey2 shared-secret)
(let* ((sk1 (ironclad:make-private-key :elgamal :p p :g g :x skey1 :y pkey1))
(pk1 (ironclad:make-public-key :elgamal :p p :g g :y pkey1))
(sk2 (ironclad:make-private-key :elgamal :p p :g g :x skey2 :y pkey2))
(pk2 (ironclad:make-public-key :elgamal :p p :g g :y pkey2))
(ss1 (ironclad:diffie-hellman sk1 pk2))
(ss2 (ironclad:diffie-hellman sk2 pk1)))
(when (mismatch ss1 shared-secret)
(error "shared secret computation failed for ~A on skey ~A, pkey ~A, secret ~A"
name skey1 pkey2 shared-secret))
(when (mismatch ss2 shared-secret)
(error "shared secret computation failed for ~A on skey ~A, pkey ~A, secret ~A"
name skey2 pkey1 shared-secret))))
(defparameter *public-key-encryption-tests*
(list (cons :rsa-oaep-encryption-test 'rsa-oaep-encryption-test)
(cons :elgamal-encryption-test 'elgamal-encryption-test)))
(defparameter *public-key-signature-tests*
(list (cons :rsa-pss-signature-test 'rsa-pss-signature-test)
(cons :elgamal-signature-test 'elgamal-signature-test)
(cons :dsa-signature-test 'dsa-signature-test)
(cons :ed25519-signature-test 'ed25519-signature-test)
(cons :ed448-signature-test 'ed448-signature-test)))
(defparameter *public-key-diffie-hellman-tests*
(list (cons :curve25519-dh-test 'curve25519-dh-test)
(cons :curve448-dh-test 'curve448-dh-test)
(cons :elgamal-dh-test 'elgamal-dh-test)))
| null | https://raw.githubusercontent.com/eugeneia/athens/cc9d456edd3891b764b0fbf0202a3e2f58865cbf/quicklisp/dists/quicklisp/software/ironclad-v0.40/testing/testfuns.lisp | lisp | -*- mode: lisp; indent-tabs-mode: nil -*-
POSITION call, so declare SPEED high enough to trigger that.
test vector files
cipher testing
encryption mode consistency checking
digest testing routines
Calling produce-digest twice should not give a wrong hash
mac testing routines
PRNG testing routines
Public key testing routines | (in-package :crypto-tests)
(defun hex-string-to-byte-array (string &key (start 0) (end nil))
This function disappears from profiles if SBCL can inline the
(declare (type string string) (optimize (speed 2)))
(let* ((end (or end (length string)))
(length (/ (- end start) 2))
(key (make-array length :element-type '(unsigned-byte 8))))
(declare (type (simple-array (unsigned-byte 8) (*)) key))
(flet ((char-to-digit (char)
(declare (type base-char char))
(let ((x (cl:position char #.(coerce "0123456789abcdef" 'simple-base-string)
:test #'char-equal)))
(or x (error "Invalid hex key ~A specified" string)))))
(loop for i from 0
for j from start below end by 2
do (setf (aref key i)
(+ (* (char-to-digit (char string j)) 16)
(char-to-digit (char string (1+ j)))))
finally (return key)))))
(defun test-vector-filename (ident)
(merge-pathnames (make-pathname :directory '(:relative "test-vectors")
:name (substitute #\- #\/ (format nil "~(~A~)" ident))
:type "testvec")
#.*compile-file-pathname*))
(defun sharp-a (stream sub-char numarg)
(declare (ignore sub-char numarg))
(crypto:ascii-string-to-byte-array (read stream t nil t)))
(defun sharp-h (stream sub-char numarg)
(declare (ignore sub-char numarg))
(hex-string-to-byte-array (read stream t nil t)))
(defun run-test-vector-file (name function-map)
(let ((filename (test-vector-filename name))
(*readtable* (copy-readtable)))
(set-dispatch-macro-character #\# #\a #'sharp-a *readtable*)
(set-dispatch-macro-character #\# #\h #'sharp-h *readtable*)
(with-open-file (stream filename :direction :input
:element-type 'character
:if-does-not-exist :error)
(loop for form = (read stream nil stream)
until (eq form stream) do
(cond
((not (listp form))
(error "Invalid form in test vector file ~A: ~A" filename form))
(t
(let ((test-function (cdr (assoc (car form) function-map))))
(unless test-function
(error "No test function defined for ~A" (car form)))
(apply test-function name (cdr form)))))
finally (return t)))))
(defun cipher-test-guts (cipher-name mode key input output
&optional extra-make-cipher-args)
(let ((cipher (apply #'crypto:make-cipher cipher-name
:key key :mode mode
extra-make-cipher-args))
(scratch (copy-seq input)))
(crypto:encrypt cipher input scratch)
(when (mismatch scratch output)
(error "encryption failed for ~A on key ~A, input ~A, output ~A"
cipher-name key input output))
(apply #'reinitialize-instance cipher :key key extra-make-cipher-args)
(crypto:decrypt cipher output scratch)
(when (mismatch scratch input)
(error "decryption failed for ~A on key ~A, input ~A, output ~A"
cipher-name key output input))))
#+(or lispworks sbcl cmucl openmcl allegro abcl ecl clisp)
(defun cipher-stream-test-guts (cipher-name mode key input output
&optional extra-args)
(let* ((out-stream (crypto:make-octet-output-stream))
(enc-stream (apply #'crypto:make-encrypting-stream
out-stream cipher-name mode key extra-args))
(in-stream (crypto:make-octet-input-stream output))
(dec-stream (apply #'crypto:make-decrypting-stream
in-stream cipher-name mode key extra-args)))
(write-byte (aref input 0) enc-stream)
(write-sequence input enc-stream :start 1)
(let ((result (crypto:get-output-stream-octets out-stream)))
(when (mismatch result output)
(error "stream encryption failed for ~A on key ~A, input ~A, output ~A"
cipher-name key input output)))
(let ((result (copy-seq output)))
(setf (aref result 0) (read-byte dec-stream))
(read-sequence result dec-stream :start 1)
(when (mismatch result input)
(error "stream decryption failed for ~A on key ~A, input ~A, output ~A"
cipher-name key output input)))))
(defun ecb-mode-test (cipher-name hexkey hexinput hexoutput)
(cipher-test-guts cipher-name :ecb hexkey hexinput hexoutput))
(defun ecb-tweak-mode-test (cipher-name hexkey hextweak hexinput hexoutput)
(cipher-test-guts cipher-name :ecb hexkey hexinput hexoutput
(list :tweak hextweak)))
(defun stream-mode-test (cipher-name hexkey hexinput hexoutput)
(cipher-test-guts cipher-name :stream hexkey hexinput hexoutput))
(defun stream-nonce-mode-test (cipher-name hexkey hexiv hexinput hexoutput)
(cipher-test-guts cipher-name :stream hexkey hexinput hexoutput
(list :initialization-vector hexiv)))
(defun keystream-test (cipher-name key iv keystream)
(let* ((mode (if (= 1 (crypto:block-length cipher-name)) :stream :ctr))
(cipher (crypto:make-cipher cipher-name :key key :mode mode :initialization-vector iv))
(buffer (make-array 1000 :element-type '(unsigned-byte 8) :initial-element 0)))
(crypto:keystream-position cipher 100)
(crypto:encrypt-in-place cipher buffer :start 100 :end 213)
(crypto:keystream-position cipher 500)
(crypto:encrypt-in-place cipher buffer :start 500 :end 1000)
(crypto:keystream-position cipher 213)
(crypto:encrypt-in-place cipher buffer :start 213 :end 500)
(crypto:keystream-position cipher 0)
(crypto:encrypt-in-place cipher buffer :end 100)
(crypto:keystream-position cipher 765)
(when (or (/= (crypto:keystream-position cipher) 765)
(mismatch buffer keystream))
(error "getting/setting key stream position failed for ~A on key ~A" cipher-name key))))
#+(or lispworks sbcl cmucl openmcl allegro abcl ecl clisp)
(defun stream-mode-test/stream (cipher-name hexkey hexinput hexoutput)
(cipher-stream-test-guts cipher-name :stream hexkey hexinput hexoutput))
#+(or lispworks sbcl cmucl openmcl allegro abcl ecl clisp)
(defun stream-nonce-mode-test/stream (cipher-name hexkey hexiv hexinput hexoutput)
(cipher-stream-test-guts cipher-name :stream hexkey hexinput hexoutput
(list :initialization-vector hexiv)))
(defparameter *cipher-tests*
(list (cons :ecb-mode-test 'ecb-mode-test)
(cons :ecb-tweak-mode-test 'ecb-tweak-mode-test)
(cons :stream-mode-test 'stream-mode-test)
(cons :stream-nonce-mode-test 'stream-nonce-mode-test)
(cons :keystream-test 'keystream-test)))
#+(or lispworks sbcl cmucl openmcl allegro abcl ecl clisp)
(defparameter *cipher-stream-tests*
(list (cons :ecb-mode-test 'ignore-test)
(cons :ecb-tweak-mode-test 'ignore-test)
(cons :stream-mode-test 'stream-mode-test/stream)
(cons :stream-nonce-mode-test 'stream-nonce-mode-test/stream)
(cons :keystream-test 'ignore-test)))
tests from NIST
(defun mode-test (mode cipher-name key iv input output)
(labels ((frob-hex-string (cipher func input)
(let ((scratch (copy-seq input)))
(funcall func cipher input scratch)
scratch))
(cipher-test (cipher func input output)
(not (mismatch (frob-hex-string cipher func input) output))))
(let ((cipher (crypto:make-cipher cipher-name :key key :mode mode
:initialization-vector iv)))
(unless (cipher-test cipher 'crypto:encrypt input output)
(error "encryption failed for ~A on key ~A, input ~A, output ~A"
cipher-name key input output))
(reinitialize-instance cipher :key key :mode mode
:initialization-vector iv)
(unless (cipher-test cipher 'crypto:decrypt output input)
(error "decryption failed for ~A on key ~A, input ~A, output ~A"
cipher-name key output input)))))
(defparameter *mode-tests*
(list (cons :mode-test 'mode-test)))
(defun digest-test/base (digest-name input expected-digest)
(let ((result (crypto:digest-sequence digest-name input)))
(when (mismatch result expected-digest)
(error "one-shot ~A digest of ~S failed" digest-name input))))
(defun digest-test/incremental (digest-name input expected-digest)
(loop with length = (length input)
with digester = (crypto:make-digest digest-name)
for i from 0 below length
do (crypto:update-digest digester input :start i :end (1+ i))
finally
(let ((result (crypto:produce-digest digester)))
(when (mismatch result expected-digest)
(error "incremental ~A digest of ~S failed" digest-name input)))))
#+(or sbcl cmucl)
(defun digest-test/fill-pointer (digest-name octets expected-digest)
(let* ((input (let ((x (make-array (* 2 (length octets))
:fill-pointer 0
:element-type '(unsigned-byte 8))))
(dotimes (i (length octets) x)
(vector-push (aref octets i) x))))
(result (crypto:digest-sequence digest-name input)))
(when (mismatch result expected-digest)
(error "fill-pointer'd ~A digest of ~S failed" digest-name input))))
#+(or lispworks sbcl cmucl openmcl allegro abcl ecl clisp)
(defun digest-test/stream (digest-name input expected-digest)
(let* ((stream (crypto:make-digesting-stream digest-name)))
(when (plusp (length input))
(write-byte (aref input 0) stream)
(write-sequence input stream :start 1))
(when (mismatch (crypto:produce-digest stream) expected-digest)
(error "stream-y ~A digest of ~S failed" digest-name input))))
(defun digest-test/reinitialize-instance (digest-name input expected-digest)
(let* ((digest (crypto:make-digest digest-name))
(result (progn
(crypto:digest-sequence digest input)
(crypto:digest-sequence (reinitialize-instance digest) input))))
(when (mismatch result expected-digest)
(error "testing reinitialize-instance ~A digest of ~S failed" digest-name input))))
(defun digest-bit-test (digest-name leading byte trailing expected-digest)
(let* ((input (let ((vector (make-array (+ 1 leading trailing)
:element-type '(unsigned-byte 8)
:initial-element 0)))
(setf (aref vector leading) byte)
vector))
(result (crypto:digest-sequence digest-name input)))
(when (mismatch result expected-digest)
(error "individual bit test ~A digest of (~D #x~2,'0X ~D) failed"
digest-name leading byte trailing))))
(defun xof-digest-test (digest-name output-length input expected-digest)
(let* ((digest (crypto:make-digest digest-name :output-length output-length))
(result (crypto:digest-sequence digest input)))
(when (mismatch result expected-digest)
(error "one-shot ~A xof digest of ~S failed" digest-name input))))
(defparameter *digest-tests*
(list (cons :digest-test 'digest-test/base)
(cons :digest-bit-test 'digest-bit-test)
(cons :xof-digest-test 'xof-digest-test)))
(defun ignore-test (&rest args)
(declare (ignore args))
nil)
(defparameter *digest-incremental-tests*
(list (cons :digest-test 'digest-test/incremental)
(cons :digest-bit-test 'ignore-test)
(cons :xof-digest-test 'ignore-test)))
#+(or sbcl cmucl)
(defparameter *digest-fill-pointer-tests*
(list (cons :digest-test 'digest-test/fill-pointer)
(cons :digest-bit-test 'ignore-test)
(cons :xof-digest-test 'ignore-test)))
#+(or lispworks sbcl cmucl openmcl allegro abcl ecl clisp)
(defparameter *digest-stream-tests*
(list (cons :digest-test 'digest-test/stream)
(cons :digest-bit-test 'ignore-test)
(cons :xof-digest-test 'ignore-test)))
(defparameter *digest-reinitialize-instance-tests*
(list (cons :digest-test 'digest-test/reinitialize-instance)
(cons :digest-bit-test 'ignore-test)
(cons :xof-digest-test 'ignore-test)))
(defun mac-test/base (mac-name key data expected-digest &rest args)
(let ((mac (apply #'crypto:make-mac mac-name key args)))
(crypto:update-mac mac data)
(let ((result (crypto:produce-mac mac)))
(when (mismatch result expected-digest)
(error "one-shot ~A mac of ~A failed on key ~A, args ~A"
mac-name data key args)))))
(defun mac-test/incremental (mac-name key data expected-digest &rest args)
(loop with length = (length data)
with mac = (apply #'crypto:make-mac mac-name key args)
for i from 0 below length
do (crypto:update-mac mac data :start i :end (1+ i))
finally (let ((result (crypto:produce-mac mac)))
(when (mismatch result expected-digest)
(error "incremental ~A mac of ~A failed on key ~A, args ~A"
mac-name data key args)))))
#+(or lispworks sbcl cmucl openmcl allegro abcl ecl clisp)
(defun mac-test/stream (mac-name key data expected-digest &rest args)
(let ((stream (apply #'crypto:make-authenticating-stream mac-name key args)))
(when (plusp (length data))
(write-byte (aref data 0) stream)
(write-sequence data stream :start 1))
Calling produce - mac twice should not give a wrong MAC
(let ((result (crypto:produce-mac stream)))
(when (mismatch result expected-digest)
(error "stream ~A mac of ~A failed on key ~A, args ~A"
mac-name data key args)))))
(defun mac-test/reinitialize-instance (mac-name key data expected-digest &rest args)
(let* ((mac (apply #'crypto:make-mac mac-name key args))
(result1 (progn
(crypto:update-mac mac data)
(crypto:produce-mac mac))))
(declare (ignorable result1))
(reinitialize-instance mac :key key)
(let ((result2 (progn
(crypto:update-mac mac data)
(crypto:produce-mac mac))))
(when (mismatch result2 expected-digest)
(error "testing reinitialize-instance ~A mac of ~A failed on key ~A, args ~A"
mac-name data key args)))))
(defparameter *mac-tests*
(list (cons :mac-test 'mac-test/base)))
(defparameter *mac-incremental-tests*
(list (cons :mac-test 'mac-test/incremental)))
#+(or lispworks sbcl cmucl openmcl allegro abcl ecl clisp)
(defparameter *mac-stream-tests*
(list (cons :mac-test 'mac-test/stream)))
(defparameter *mac-reinitialize-instance-tests*
(list (cons :mac-test 'mac-test/reinitialize-instance)))
(defun fortuna-test (name seed entropy expected-sequence)
(declare (ignore name))
(let ((prng (crypto:make-prng :fortuna
:seed (coerce seed 'crypto::simple-octet-vector)))
(num-bytes (length expected-sequence)))
(loop for (source pool-id event) in entropy
do (crypto:add-random-event source pool-id event prng))
(assert (equalp expected-sequence
(crypto:random-data num-bytes prng)))))
(defun generator-test (name cipher seeds expected-sequences)
(declare (ignore name))
(let ((generator (make-instance 'crypto:fortuna-generator :cipher cipher)))
(loop for seed in seeds
do (crypto:prng-reseed (coerce seed '(vector (unsigned-byte 8))) generator))
(every (lambda (sequence)
(assert (zerop (mod (length sequence) 16)))
(assert (equalp sequence
(crypto:random-data (length sequence)
generator))))
expected-sequences)))
(defparameter *prng-tests*
`((:fortuna-test . ,'fortuna-test)
(:generator-test . ,'generator-test)))
(defun rsa-oaep-encryption-test (name n e d input seed output)
Redefine oaep - encode to use a defined seed for the test instead of a random one
(setf (symbol-function 'ironclad::oaep-encode)
(lambda (digest-name message num-bytes &optional label)
(let* ((digest-name (if (eq digest-name t) :sha1 digest-name))
(digest-len (ironclad:digest-length digest-name)))
(assert (<= (length message) (- num-bytes (* 2 digest-len) 2)))
(let* ((digest (ironclad:make-digest digest-name))
(label (or label (coerce #() '(vector (unsigned-byte 8)))))
(padding-len (- num-bytes (length message) (* 2 digest-len) 2))
(padding (make-array padding-len :element-type '(unsigned-byte 8) :initial-element 0))
(l-hash (ironclad:digest-sequence digest label))
(db (concatenate '(vector (unsigned-byte 8)) l-hash padding #(1) message))
(db-mask (ironclad::mgf digest-name seed (- num-bytes digest-len 1)))
(masked-db (map '(vector (unsigned-byte 8)) #'logxor db db-mask))
(seed-mask (ironclad::mgf digest-name masked-db digest-len))
(masked-seed (map '(vector (unsigned-byte 8)) #'logxor seed seed-mask)))
(concatenate '(vector (unsigned-byte 8)) #(0) masked-seed masked-db)))))
(let* ((pk (ironclad:make-public-key :rsa :n n :e e))
(sk (ironclad:make-private-key :rsa :n n :d d))
(c (ironclad:encrypt-message pk input :oaep t))
(m (ironclad:decrypt-message sk output :oaep t)))
(when (mismatch c output)
(error "encryption failed for ~A on pkey (~A ~A), input ~A, output ~A"
name n e input output))
(when (mismatch m input)
(error "decryption failed for ~A on skey (~A ~A), input ~A, output ~A"
name n d input output))))
(defun elgamal-encryption-test (name p g x y input k output)
Redefine elgamal - generate - k to use a defined K for the test instead of a random one
(setf (symbol-function 'ironclad::elgamal-generate-k)
(lambda (p)
(declare (ignore p))
k))
(let* ((pk (ironclad:make-public-key :elgamal :p p :g g :y y))
(sk (ironclad:make-private-key :elgamal :p p :g g :x x :y y))
(c (ironclad:encrypt-message pk input))
(m (ironclad:decrypt-message sk output)))
(when (mismatch c output)
(error "encryption failed for ~A on pkey (~A ~A ~A), input ~A, output ~A"
name p g y input output))
(when (mismatch m input)
(error "decryption failed for ~A on skey (~A ~A ~A ~A), input ~A, output ~A"
name p g x y input output))))
(defun rsa-pss-signature-test (name n e d input salt signature)
Redefine pss - encode to use a defined salt for the test instead of a random one
(setf (symbol-function 'ironclad::pss-encode)
(lambda (digest-name message num-bytes)
(let* ((digest-name (if (eq digest-name t) :sha1 digest-name))
(digest-len (ironclad:digest-length digest-name)))
(assert (>= num-bytes (+ (* 2 digest-len) 2)))
(let* ((m-hash (ironclad:digest-sequence digest-name message))
(m1 (concatenate '(vector (unsigned-byte 8)) #(0 0 0 0 0 0 0 0) m-hash salt))
(h (ironclad:digest-sequence digest-name m1))
(ps (make-array (- num-bytes (* 2 digest-len) 2)
:element-type '(unsigned-byte 8)
:initial-element 0))
(db (concatenate '(vector (unsigned-byte 8)) ps #(1) salt))
(db-mask (ironclad::mgf digest-name h (- num-bytes digest-len 1)))
(masked-db (map '(vector (unsigned-byte 8)) #'logxor db db-mask)))
(setf (ldb (byte 1 7) (elt masked-db 0)) 0)
(concatenate '(vector (unsigned-byte 8)) masked-db h #(188))))))
(let* ((pk (ironclad:make-public-key :rsa :n n :e e))
(sk (ironclad:make-private-key :rsa :n n :d d))
(s (ironclad:sign-message sk input :pss t)))
(when (mismatch s signature)
(error "signature failed for ~A on skey (~A ~A), input ~A, signature ~A"
name n d input signature))
(unless (ironclad:verify-signature pk input signature :pss t)
(error "signature verification failed for ~A on pkey (~A ~A), input ~A, signature ~A"
name n e input signature))))
(defun elgamal-signature-test (name p g x y input k signature)
Redefine elgamal - generate - k to use a defined K for the test instead of a random one
(setf (symbol-function 'ironclad::elgamal-generate-k)
(lambda (p)
(declare (ignore p))
k))
(let* ((pk (ironclad:make-public-key :elgamal :p p :g g :y y))
(sk (ironclad:make-private-key :elgamal :p p :g g :x x :y y))
(s (ironclad:sign-message sk input)))
(when (mismatch s signature)
(error "signature failed for ~A on skey (~A ~A ~A ~A), input ~A, signature ~A"
name p g x y input signature))
(unless (ironclad:verify-signature pk input signature)
(error "signature verification failed for ~A on pkey (~A ~A ~A), input ~A, signature ~A"
name p g y input signature))))
(defun dsa-signature-test (name p q g x y input k signature)
Redefine dsa - generate - k to use a defined K for the test instead of a random one
(setf (symbol-function 'ironclad::dsa-generate-k)
(lambda (q)
(declare (ignore q))
k))
(let* ((sk (ironclad:make-private-key :dsa :p p :q q :g g :x x :y y))
(pk (ironclad:make-public-key :dsa :p p :q q :g g :y y))
(s (ironclad:sign-message sk input)))
(when (mismatch s signature)
(error "signature failed for ~A on skey (~A ~A ~A ~A ~A), input ~A, signature ~A"
name p q g x y input signature))
(unless (ironclad:verify-signature pk input signature)
(error "signature verification failed for ~A on pkey (~A ~A ~A ~A), input ~A, signature ~A"
name p q g y input signature))))
(defun ed25519-signature-test (name skey pkey input signature)
(let* ((sk (ironclad:make-private-key :ed25519 :x skey :y pkey))
(pk (ironclad:make-public-key :ed25519 :y pkey))
(s (ironclad:sign-message sk input)))
(when (mismatch s signature)
(error "signature failed for ~A on skey ~A, input ~A, signature ~A"
name skey input signature))
(unless (ironclad:verify-signature pk input signature)
(error "signature verification failed for ~A on pkey ~A, input ~A, signature ~A"
name pkey input signature))))
(defun ed448-signature-test (name skey pkey input signature)
(let* ((sk (ironclad:make-private-key :ed448 :x skey :y pkey))
(pk (ironclad:make-public-key :ed448 :y pkey))
(s (ironclad:sign-message sk input)))
(when (mismatch s signature)
(error "signature failed for ~A on skey ~A, input ~A, signature ~A"
name skey input signature))
(unless (ironclad:verify-signature pk input signature)
(error "signature verification failed for ~A on pkey ~A, input ~A, signature ~A"
name pkey input signature))))
(defun curve25519-dh-test (name skey1 pkey1 skey2 pkey2 shared-secret)
(let* ((sk1 (ironclad:make-private-key :curve25519 :x skey1 :y pkey1))
(pk1 (ironclad:make-public-key :curve25519 :y pkey1))
(sk2 (ironclad:make-private-key :curve25519 :x skey2 :y pkey2))
(pk2 (ironclad:make-public-key :curve25519 :y pkey2))
(ss1 (ironclad:diffie-hellman sk1 pk2))
(ss2 (ironclad:diffie-hellman sk2 pk1)))
(when (mismatch ss1 shared-secret)
(error "shared secret computation failed for ~A on skey ~A, pkey ~A, secret ~A"
name skey1 pkey2 shared-secret))
(when (mismatch ss2 shared-secret)
(error "shared secret computation failed for ~A on skey ~A, pkey ~A, secret ~A"
name skey2 pkey1 shared-secret))))
(defun curve448-dh-test (name skey1 pkey1 skey2 pkey2 shared-secret)
(let* ((sk1 (ironclad:make-private-key :curve448 :x skey1 :y pkey1))
(pk1 (ironclad:make-public-key :curve448 :y pkey1))
(sk2 (ironclad:make-private-key :curve448 :x skey2 :y pkey2))
(pk2 (ironclad:make-public-key :curve448 :y pkey2))
(ss1 (ironclad:diffie-hellman sk1 pk2))
(ss2 (ironclad:diffie-hellman sk2 pk1)))
(when (mismatch ss1 shared-secret)
(error "shared secret computation failed for ~A on skey ~A, pkey ~A, secret ~A"
name skey1 pkey2 shared-secret))
(when (mismatch ss2 shared-secret)
(error "shared secret computation failed for ~A on skey ~A, pkey ~A, secret ~A"
name skey2 pkey1 shared-secret))))
(defun elgamal-dh-test (name p g skey1 pkey1 skey2 pkey2 shared-secret)
(let* ((sk1 (ironclad:make-private-key :elgamal :p p :g g :x skey1 :y pkey1))
(pk1 (ironclad:make-public-key :elgamal :p p :g g :y pkey1))
(sk2 (ironclad:make-private-key :elgamal :p p :g g :x skey2 :y pkey2))
(pk2 (ironclad:make-public-key :elgamal :p p :g g :y pkey2))
(ss1 (ironclad:diffie-hellman sk1 pk2))
(ss2 (ironclad:diffie-hellman sk2 pk1)))
(when (mismatch ss1 shared-secret)
(error "shared secret computation failed for ~A on skey ~A, pkey ~A, secret ~A"
name skey1 pkey2 shared-secret))
(when (mismatch ss2 shared-secret)
(error "shared secret computation failed for ~A on skey ~A, pkey ~A, secret ~A"
name skey2 pkey1 shared-secret))))
(defparameter *public-key-encryption-tests*
(list (cons :rsa-oaep-encryption-test 'rsa-oaep-encryption-test)
(cons :elgamal-encryption-test 'elgamal-encryption-test)))
(defparameter *public-key-signature-tests*
(list (cons :rsa-pss-signature-test 'rsa-pss-signature-test)
(cons :elgamal-signature-test 'elgamal-signature-test)
(cons :dsa-signature-test 'dsa-signature-test)
(cons :ed25519-signature-test 'ed25519-signature-test)
(cons :ed448-signature-test 'ed448-signature-test)))
(defparameter *public-key-diffie-hellman-tests*
(list (cons :curve25519-dh-test 'curve25519-dh-test)
(cons :curve448-dh-test 'curve448-dh-test)
(cons :elgamal-dh-test 'elgamal-dh-test)))
|
420bcd24756a51b50f795fca7855a011464b8a88cadc30d5031e63104ec48b22 | larandaA/hgraphs | BfsSpec.hs | module Data.Graph.Abstract.Accessor.Algorithm.BfsSpec (spec) where
import Control.Monad
import qualified Data.Graph.Abstract as GA
import qualified Data.Graph.Abstract.Accessor as GAA
import qualified Data.Graph.Abstract.Accessor.Algorithm.Bfs as Bfs
import qualified Data.Graph.Abstract.Builder as GAB
import qualified Data.Graph.Abstract.Common as GAC
import Test.Hspec
bfsFromSpec :: Spec
bfsFromSpec = describe "bfsFrom" $ do
it "should return default values for all vertices on empty list of start vertices" $ do
let g = GAC.isolated [1, 2, 3, 4]
let g' = GAA.execute g $ do {
values <- Bfs.bfsFrom [] 1 (\_ _ -> pure 42);
GAA.vgraph values
}
GA.vertices g' `shouldMatchList` [1, 1, 1, 1]
it "should return default values for unreachable vertices" $ do
let g = GAC.isolated [1, 3, 2, 4]
let g' = GAA.execute g $ do {
vs <- GAA.vfind (< 3);
values <- Bfs.bfsFrom vs "N" (\_ _ -> pure "Y");
GAA.vgraph values
}
GA.vertices (GA.zip g g') `shouldMatchList` [(1, "Y"), (2, "Y"), (3, "N"), (4, "N")]
it "should return distances from start vertices" $ do
let g = GAC.path ["s1", "b", "s2", "d", "e"]
let f Nothing _ = pure 0
f (Just (dist, _)) _ = pure (dist + 1)
let g' = GAA.execute g $ do {
vs <- GAA.vfind ((>= 2) . length);
values <- Bfs.bfsFrom vs (-1) f;
GAA.vgraph values
}
GA.vertices (GA.zip g g') `shouldMatchList` [("s1", 0), ("b", 1), ("s2", 0), ("d", 1), ("e", 2)]
it "should return incremented values of vertices" $ do
let g = GAC.path [1, 2, 3, 4]
let f _ v = (+ 1) <$> GAA.value v
let g' = GAA.execute g $ do {
vs <- GAA.vfind (== 1);
values <- Bfs.bfsFrom vs (-1) f;
GAA.vgraph values
}
GA.vertices (GA.zip g g') `shouldMatchList` [(1, 2), (2, 3), (3, 4), (4, 5)]
bfsSpec :: Spec
bfsSpec = describe "bfs" $ do
it "should work fine with empty graph" $ do
let g = GAC.empty
let g' = GAA.execute g $ do {
values <- Bfs.bfs 1 (\_ _ -> pure 42);
GAA.vgraph values
}
length (GA.vertices g') `shouldBe` 0
it "should return non-default values for all vertices" $ do
let g = GAC.isolated [1, 2, 3, 4]
let g' = GAA.execute g $ do {
values <- Bfs.bfs 1 (\_ _ -> pure 42);
GAA.vgraph values
}
GA.vertices g' `shouldMatchList` [42, 42, 42, 42]
distancesSpec :: Spec
distancesSpec = describe "distances" $ do
it "should be Nothing for all vertices on empty list of start vertices" $ do
let g = GAC.isolated [1, 2, 3, 4]
let g' = GAA.execute g $ do {
distances <- Bfs.distances [];
GAA.vgraph distances
}
GA.vertices g' `shouldMatchList` [Nothing, Nothing, Nothing, Nothing]
it "should be 0 for all vertices if all vertices are start vertices" $ do
let g = GAC.isolated [1, 2, 3, 4]
let g' = GAA.execute g $ do {
vs <- GAA.vertices;
distances <- Bfs.distances vs;
GAA.vgraph distances
}
GA.vertices g' `shouldMatchList` [Just 0, Just 0, Just 0, Just 0]
it "should be 0 for a single vertex on isolated vertices" $ do
let g = GAC.isolated [1, 2, 3, 4]
let g' = GAA.execute g $ do {
vs <- GAA.vfind (== 3);
distances <- Bfs.distances vs;
GAA.vgraph distances
}
GA.vertices (GA.zip g g') `shouldMatchList`
[ (1, Nothing), (2, Nothing)
, (3, Just 0), (4, Nothing)
]
it "should be 1 for rays of a star" $ do
let g = GAC.star 4 [1, 2, 3]
let g' = GAA.execute g $ do {
vs <- GAA.vfind (== 4);
distances <- Bfs.distances vs;
GAA.vgraph distances
}
GA.vertices (GA.zip g g') `shouldMatchList`
[ (1, Just 1), (2, Just 1)
, (3, Just 1), (4, Just 0)
]
it "should be the distances of the shortest paths" $ do
let g = GAB.build $ do {
v0 <- GAB.vertex 0;
v1 <- GAB.vertex 1;
v2 <- GAB.vertex 2;
v3 <- GAB.vertex 3;
v4 <- GAB.vertex 4;
GAB.edge' v0 v1;
GAB.edge' v1 v2;
GAB.edge' v2 v4;
GAB.edge' v0 v3;
GAB.edge' v3 v4
}
let g' = GAA.execute g $ do {
vs <- GAA.vfind (== 0);
distances <- Bfs.distances vs;
GAA.vgraph distances
}
GA.vertices (GA.zip g g') `shouldMatchList`
[ (0, Just 0), (1, Just 1)
, (2, Just 2), (3, Just 1)
, (4, Just 2)
]
pathsSpec :: Spec
pathsSpec = describe "paths" $ do
it "should be Nothing for all vertices on empty list of start vertices" $ do
let g = GAC.isolated [1, 2, 3, 4]
let g' = GAA.execute g $ do {
vs <- GAA.vertices;
preds <- Bfs.paths [];
preds' <- GAA.varray Nothing;
forM_ vs $ \v -> do {
pred <- GAA.vget preds v;
pred' <- traverse (GAA.value . GAA.source) pred;
GAA.vset preds' v pred';
};
GAA.vgraph preds'
}
GA.vertices g' `shouldMatchList` [Nothing, Nothing, Nothing, Nothing]
it "should be Nothing for all vertices if all vertices are start vertices" $ do
let g = GAC.isolated [1, 2, 3, 4]
let g' = GAA.execute g $ do {
vs <- GAA.vertices;
preds <- Bfs.paths vs;
preds' <- GAA.varray Nothing;
forM_ vs $ \v -> do {
pred <- GAA.vget preds v;
pred' <- traverse (GAA.value . GAA.source) pred;
GAA.vset preds' v pred';
};
GAA.vgraph preds'
}
GA.vertices g' `shouldMatchList` [Nothing, Nothing, Nothing, Nothing]
it "should be star center for rays of a star" $ do
let g = GAC.star 4 [1, 2, 3]
let g' = GAA.execute g $ do {
vs <- GAA.vertices;
starts <- GAA.vfind (== 4);
preds <- Bfs.paths starts;
preds' <- GAA.varray Nothing;
forM_ vs $ \v -> do {
pred <- GAA.vget preds v;
pred' <- traverse (GAA.value . GAA.source) pred;
GAA.vset preds' v pred';
};
GAA.vgraph preds'
}
GA.vertices (GA.zip g g') `shouldMatchList`
[ (1, Just 4), (2, Just 4)
, (3, Just 4), (4, Nothing)
]
it "should be the predecessors according to the shortest paths" $ do
let g = GAB.build $ do {
v0 <- GAB.vertex 0;
v1 <- GAB.vertex 1;
v2 <- GAB.vertex 2;
v3 <- GAB.vertex 3;
v4 <- GAB.vertex 4;
GAB.edge' v0 v1;
GAB.edge' v1 v2;
GAB.edge' v2 v4;
GAB.edge' v0 v3;
GAB.edge' v3 v4
}
let g' = GAA.execute g $ do {
vs <- GAA.vertices;
starts <- GAA.vfind (== 0);
preds <- Bfs.paths starts;
preds' <- GAA.varray Nothing;
forM_ vs $ \v -> do {
pred <- GAA.vget preds v;
pred' <- traverse (GAA.value . GAA.source) pred;
GAA.vset preds' v pred';
};
GAA.vgraph preds'
}
GA.vertices (GA.zip g g') `shouldMatchList`
[ (0, Nothing), (1, Just 0)
, (2, Just 1), (3, Just 0)
, (4, Just 3)
]
spec :: Spec
spec = describe "Data.Graph.Abstract.Accessor.Algorithm.Bfs" $ do
bfsFromSpec
bfsSpec
distancesSpec
pathsSpec
| null | https://raw.githubusercontent.com/larandaA/hgraphs/322b5cdfafbae851b4251c907e2c81b82cf02d4a/test/Data/Graph/Abstract/Accessor/Algorithm/BfsSpec.hs | haskell | module Data.Graph.Abstract.Accessor.Algorithm.BfsSpec (spec) where
import Control.Monad
import qualified Data.Graph.Abstract as GA
import qualified Data.Graph.Abstract.Accessor as GAA
import qualified Data.Graph.Abstract.Accessor.Algorithm.Bfs as Bfs
import qualified Data.Graph.Abstract.Builder as GAB
import qualified Data.Graph.Abstract.Common as GAC
import Test.Hspec
bfsFromSpec :: Spec
bfsFromSpec = describe "bfsFrom" $ do
it "should return default values for all vertices on empty list of start vertices" $ do
let g = GAC.isolated [1, 2, 3, 4]
let g' = GAA.execute g $ do {
values <- Bfs.bfsFrom [] 1 (\_ _ -> pure 42);
GAA.vgraph values
}
GA.vertices g' `shouldMatchList` [1, 1, 1, 1]
it "should return default values for unreachable vertices" $ do
let g = GAC.isolated [1, 3, 2, 4]
let g' = GAA.execute g $ do {
vs <- GAA.vfind (< 3);
values <- Bfs.bfsFrom vs "N" (\_ _ -> pure "Y");
GAA.vgraph values
}
GA.vertices (GA.zip g g') `shouldMatchList` [(1, "Y"), (2, "Y"), (3, "N"), (4, "N")]
it "should return distances from start vertices" $ do
let g = GAC.path ["s1", "b", "s2", "d", "e"]
let f Nothing _ = pure 0
f (Just (dist, _)) _ = pure (dist + 1)
let g' = GAA.execute g $ do {
vs <- GAA.vfind ((>= 2) . length);
values <- Bfs.bfsFrom vs (-1) f;
GAA.vgraph values
}
GA.vertices (GA.zip g g') `shouldMatchList` [("s1", 0), ("b", 1), ("s2", 0), ("d", 1), ("e", 2)]
it "should return incremented values of vertices" $ do
let g = GAC.path [1, 2, 3, 4]
let f _ v = (+ 1) <$> GAA.value v
let g' = GAA.execute g $ do {
vs <- GAA.vfind (== 1);
values <- Bfs.bfsFrom vs (-1) f;
GAA.vgraph values
}
GA.vertices (GA.zip g g') `shouldMatchList` [(1, 2), (2, 3), (3, 4), (4, 5)]
bfsSpec :: Spec
bfsSpec = describe "bfs" $ do
it "should work fine with empty graph" $ do
let g = GAC.empty
let g' = GAA.execute g $ do {
values <- Bfs.bfs 1 (\_ _ -> pure 42);
GAA.vgraph values
}
length (GA.vertices g') `shouldBe` 0
it "should return non-default values for all vertices" $ do
let g = GAC.isolated [1, 2, 3, 4]
let g' = GAA.execute g $ do {
values <- Bfs.bfs 1 (\_ _ -> pure 42);
GAA.vgraph values
}
GA.vertices g' `shouldMatchList` [42, 42, 42, 42]
distancesSpec :: Spec
distancesSpec = describe "distances" $ do
it "should be Nothing for all vertices on empty list of start vertices" $ do
let g = GAC.isolated [1, 2, 3, 4]
let g' = GAA.execute g $ do {
distances <- Bfs.distances [];
GAA.vgraph distances
}
GA.vertices g' `shouldMatchList` [Nothing, Nothing, Nothing, Nothing]
it "should be 0 for all vertices if all vertices are start vertices" $ do
let g = GAC.isolated [1, 2, 3, 4]
let g' = GAA.execute g $ do {
vs <- GAA.vertices;
distances <- Bfs.distances vs;
GAA.vgraph distances
}
GA.vertices g' `shouldMatchList` [Just 0, Just 0, Just 0, Just 0]
it "should be 0 for a single vertex on isolated vertices" $ do
let g = GAC.isolated [1, 2, 3, 4]
let g' = GAA.execute g $ do {
vs <- GAA.vfind (== 3);
distances <- Bfs.distances vs;
GAA.vgraph distances
}
GA.vertices (GA.zip g g') `shouldMatchList`
[ (1, Nothing), (2, Nothing)
, (3, Just 0), (4, Nothing)
]
it "should be 1 for rays of a star" $ do
let g = GAC.star 4 [1, 2, 3]
let g' = GAA.execute g $ do {
vs <- GAA.vfind (== 4);
distances <- Bfs.distances vs;
GAA.vgraph distances
}
GA.vertices (GA.zip g g') `shouldMatchList`
[ (1, Just 1), (2, Just 1)
, (3, Just 1), (4, Just 0)
]
it "should be the distances of the shortest paths" $ do
let g = GAB.build $ do {
v0 <- GAB.vertex 0;
v1 <- GAB.vertex 1;
v2 <- GAB.vertex 2;
v3 <- GAB.vertex 3;
v4 <- GAB.vertex 4;
GAB.edge' v0 v1;
GAB.edge' v1 v2;
GAB.edge' v2 v4;
GAB.edge' v0 v3;
GAB.edge' v3 v4
}
let g' = GAA.execute g $ do {
vs <- GAA.vfind (== 0);
distances <- Bfs.distances vs;
GAA.vgraph distances
}
GA.vertices (GA.zip g g') `shouldMatchList`
[ (0, Just 0), (1, Just 1)
, (2, Just 2), (3, Just 1)
, (4, Just 2)
]
pathsSpec :: Spec
pathsSpec = describe "paths" $ do
it "should be Nothing for all vertices on empty list of start vertices" $ do
let g = GAC.isolated [1, 2, 3, 4]
let g' = GAA.execute g $ do {
vs <- GAA.vertices;
preds <- Bfs.paths [];
preds' <- GAA.varray Nothing;
forM_ vs $ \v -> do {
pred <- GAA.vget preds v;
pred' <- traverse (GAA.value . GAA.source) pred;
GAA.vset preds' v pred';
};
GAA.vgraph preds'
}
GA.vertices g' `shouldMatchList` [Nothing, Nothing, Nothing, Nothing]
it "should be Nothing for all vertices if all vertices are start vertices" $ do
let g = GAC.isolated [1, 2, 3, 4]
let g' = GAA.execute g $ do {
vs <- GAA.vertices;
preds <- Bfs.paths vs;
preds' <- GAA.varray Nothing;
forM_ vs $ \v -> do {
pred <- GAA.vget preds v;
pred' <- traverse (GAA.value . GAA.source) pred;
GAA.vset preds' v pred';
};
GAA.vgraph preds'
}
GA.vertices g' `shouldMatchList` [Nothing, Nothing, Nothing, Nothing]
it "should be star center for rays of a star" $ do
let g = GAC.star 4 [1, 2, 3]
let g' = GAA.execute g $ do {
vs <- GAA.vertices;
starts <- GAA.vfind (== 4);
preds <- Bfs.paths starts;
preds' <- GAA.varray Nothing;
forM_ vs $ \v -> do {
pred <- GAA.vget preds v;
pred' <- traverse (GAA.value . GAA.source) pred;
GAA.vset preds' v pred';
};
GAA.vgraph preds'
}
GA.vertices (GA.zip g g') `shouldMatchList`
[ (1, Just 4), (2, Just 4)
, (3, Just 4), (4, Nothing)
]
it "should be the predecessors according to the shortest paths" $ do
let g = GAB.build $ do {
v0 <- GAB.vertex 0;
v1 <- GAB.vertex 1;
v2 <- GAB.vertex 2;
v3 <- GAB.vertex 3;
v4 <- GAB.vertex 4;
GAB.edge' v0 v1;
GAB.edge' v1 v2;
GAB.edge' v2 v4;
GAB.edge' v0 v3;
GAB.edge' v3 v4
}
let g' = GAA.execute g $ do {
vs <- GAA.vertices;
starts <- GAA.vfind (== 0);
preds <- Bfs.paths starts;
preds' <- GAA.varray Nothing;
forM_ vs $ \v -> do {
pred <- GAA.vget preds v;
pred' <- traverse (GAA.value . GAA.source) pred;
GAA.vset preds' v pred';
};
GAA.vgraph preds'
}
GA.vertices (GA.zip g g') `shouldMatchList`
[ (0, Nothing), (1, Just 0)
, (2, Just 1), (3, Just 0)
, (4, Just 3)
]
spec :: Spec
spec = describe "Data.Graph.Abstract.Accessor.Algorithm.Bfs" $ do
bfsFromSpec
bfsSpec
distancesSpec
pathsSpec
| |
556b5e22c435ac884bbb4087e16830c2dcfb4061ec342d958bbbab7109449f85 | xapi-project/xen-api | quicktest_http.ml |
* Copyright ( C ) 2006 - 2009 Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) 2006-2009 Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
let finally = Xapi_stdext_pervasives.Pervasiveext.finally
module Uds = struct
{ { { 1
exception Parse_error of string
let with_unix_channels filename func =
let fd = Xapi_stdext_unix.Unixext.open_connection_unix_fd filename in
let ic, oc = (Unix.in_channel_of_descr fd, Unix.out_channel_of_descr fd) in
finally (fun () -> func ic oc) (fun () -> Unix.close fd)
let http_response_code d =
match Xapi_stdext_std.Xstringext.String.split ' ' d with
| _ :: code :: _ ->
int_of_string code
| _ ->
raise (Parse_error "Failed to parse HTTP reponse code")
let rec read_header ic acc =
let line = input_line ic in
if line = "\r" then
List.rev (line :: acc)
else
read_header ic (line :: acc)
let rec read_body ic acc =
try
let line = input_line ic in
read_body ic (line :: acc)
with End_of_file -> List.rev acc
let http_command filename cmd =
with_unix_channels filename (fun ic oc ->
Printf.fprintf oc "%s" cmd ;
flush oc ;
let result_line = input_line ic in
let response_code = http_response_code result_line in
let header = read_header ic [] in
let body = read_body ic [] in
(response_code, result_line, header, body)
)
end
module Secret_Auth_fails = struct
{ { { 1
let invalid_pool_secret =
Http.Request.make ~version:"1.0" ~user_agent:"quicktest" Http.Get
"/sync_config_files"
|> SecretString.with_cookie (SecretString.of_string "whatever")
let invalid_basicauth =
Http.Request.make ~version:"1.0" ~user_agent:"quicktest"
~headers:[("Authorization", "Basic cm9vdDpiYXI=")] (* root:bar *)
Http.Get "/rss"
(** Tests that invalid pool secrets are rejected. *)
let test_auth_fails_invalid_pool_secret () =
Qt.Test.assert_raises_match
(function Http_client.Http_error _ -> true | _ -> false)
(fun () -> Qt.http invalid_pool_secret (fun _ -> ()))
(** Tests that invalid basic authentication fails. *)
let test_auth_fails_invalid_basicauth () =
Qt.Test.assert_raises_match
(function
| Http_client.Http_error _ ->
true
| Http_client.Http_request_rejected _ ->
true
| _ ->
false
)
(fun () -> Qt.http invalid_basicauth (fun _ -> ()))
let tests =
[
( "test_auth_failes_invalid_pool_secret"
, `Quick
, test_auth_fails_invalid_pool_secret
)
; ( "test_auth_failes_invalid_basicauth"
, `Quick
, test_auth_fails_invalid_basicauth
)
]
end
module HTML_Escaping = struct
{ { { 1
let non_resource_cmd = "GET /foo<>'\"& HTTP/1.0\r\n\r\n"
let non_resource_exp = "<>'"&"
let bad_resource_cmd = "GET /%foo<>'\"& HTTP/1.0\r\n\r\n"
let bad_resource_exp = "<>'"&"
let bad_command_cmd = "FOO<>'\"& /foo HTTP/1.0\r\n\r\n"
let bad_command_exp = "<>'\\"&"
let html_escaping expected cmd =
let check_result b =
Xapi_stdext_std.Xstringext.String.has_substr b expected
in
let _, _, _, body = Uds.http_command Xapi_globs.unix_domain_socket cmd in
Printf.printf "expected = [%s]; received = [%s]\n%!" expected (List.hd body) ;
check_result (List.hd body)
let test_html_escaping_non_resource () =
Alcotest.(check bool)
"The data returned when asking for a non-existing resource should be \
properly escaped."
true
(html_escaping non_resource_exp non_resource_cmd)
let test_html_escaping_bad_resource () =
Alcotest.(check bool)
"The data returned when asking for a badly named resource should be \
properly escaped."
true
(html_escaping bad_resource_exp bad_resource_cmd)
let tests =
[
( "test_html_escaping_non_resource"
, `Quick
, test_html_escaping_non_resource
)
; ( "test_html_escaping_bad_resource"
, `Quick
, test_html_escaping_bad_resource
)
]
end
Test suite and definition of test function { { { 1
let tests = Secret_Auth_fails.tests @ HTML_Escaping.tests
| null | https://raw.githubusercontent.com/xapi-project/xen-api/6c2aadbb44166b389a3bd956c8a6af5787003d68/ocaml/quicktest/quicktest_http.ml | ocaml | root:bar
* Tests that invalid pool secrets are rejected.
* Tests that invalid basic authentication fails. |
* Copyright ( C ) 2006 - 2009 Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) 2006-2009 Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
let finally = Xapi_stdext_pervasives.Pervasiveext.finally
module Uds = struct
{ { { 1
exception Parse_error of string
let with_unix_channels filename func =
let fd = Xapi_stdext_unix.Unixext.open_connection_unix_fd filename in
let ic, oc = (Unix.in_channel_of_descr fd, Unix.out_channel_of_descr fd) in
finally (fun () -> func ic oc) (fun () -> Unix.close fd)
let http_response_code d =
match Xapi_stdext_std.Xstringext.String.split ' ' d with
| _ :: code :: _ ->
int_of_string code
| _ ->
raise (Parse_error "Failed to parse HTTP reponse code")
let rec read_header ic acc =
let line = input_line ic in
if line = "\r" then
List.rev (line :: acc)
else
read_header ic (line :: acc)
let rec read_body ic acc =
try
let line = input_line ic in
read_body ic (line :: acc)
with End_of_file -> List.rev acc
let http_command filename cmd =
with_unix_channels filename (fun ic oc ->
Printf.fprintf oc "%s" cmd ;
flush oc ;
let result_line = input_line ic in
let response_code = http_response_code result_line in
let header = read_header ic [] in
let body = read_body ic [] in
(response_code, result_line, header, body)
)
end
module Secret_Auth_fails = struct
{ { { 1
let invalid_pool_secret =
Http.Request.make ~version:"1.0" ~user_agent:"quicktest" Http.Get
"/sync_config_files"
|> SecretString.with_cookie (SecretString.of_string "whatever")
let invalid_basicauth =
Http.Request.make ~version:"1.0" ~user_agent:"quicktest"
Http.Get "/rss"
let test_auth_fails_invalid_pool_secret () =
Qt.Test.assert_raises_match
(function Http_client.Http_error _ -> true | _ -> false)
(fun () -> Qt.http invalid_pool_secret (fun _ -> ()))
let test_auth_fails_invalid_basicauth () =
Qt.Test.assert_raises_match
(function
| Http_client.Http_error _ ->
true
| Http_client.Http_request_rejected _ ->
true
| _ ->
false
)
(fun () -> Qt.http invalid_basicauth (fun _ -> ()))
let tests =
[
( "test_auth_failes_invalid_pool_secret"
, `Quick
, test_auth_fails_invalid_pool_secret
)
; ( "test_auth_failes_invalid_basicauth"
, `Quick
, test_auth_fails_invalid_basicauth
)
]
end
module HTML_Escaping = struct
{ { { 1
let non_resource_cmd = "GET /foo<>'\"& HTTP/1.0\r\n\r\n"
let non_resource_exp = "<>'"&"
let bad_resource_cmd = "GET /%foo<>'\"& HTTP/1.0\r\n\r\n"
let bad_resource_exp = "<>'"&"
let bad_command_cmd = "FOO<>'\"& /foo HTTP/1.0\r\n\r\n"
let bad_command_exp = "<>'\\"&"
let html_escaping expected cmd =
let check_result b =
Xapi_stdext_std.Xstringext.String.has_substr b expected
in
let _, _, _, body = Uds.http_command Xapi_globs.unix_domain_socket cmd in
Printf.printf "expected = [%s]; received = [%s]\n%!" expected (List.hd body) ;
check_result (List.hd body)
let test_html_escaping_non_resource () =
Alcotest.(check bool)
"The data returned when asking for a non-existing resource should be \
properly escaped."
true
(html_escaping non_resource_exp non_resource_cmd)
let test_html_escaping_bad_resource () =
Alcotest.(check bool)
"The data returned when asking for a badly named resource should be \
properly escaped."
true
(html_escaping bad_resource_exp bad_resource_cmd)
let tests =
[
( "test_html_escaping_non_resource"
, `Quick
, test_html_escaping_non_resource
)
; ( "test_html_escaping_bad_resource"
, `Quick
, test_html_escaping_bad_resource
)
]
end
Test suite and definition of test function { { { 1
let tests = Secret_Auth_fails.tests @ HTML_Escaping.tests
|
a4e41bffe0b95ea1e9a9eb00dcdd8270085bcaaa60d44767d3bbb2015851f672 | adlai/scalpl | qd.lisp | (defpackage #:scalpl.qd
(:use #:cl #:chanl #:anaphora #:local-time
#:scalpl.util #:scalpl.exchange #:scalpl.actor)
(:export #:ope-placed #:ope-place #:ope-cancel
#:prioritizer #:prioriteaze
#:next-bids #:next-asks #:sufficiently-different?))
(in-package #:scalpl.qd)
;;;
;;; ENGINE
;;;
(defun ope-placed (ope)
(with-slots (offered) (slot-value ope 'supplicant)
(let ((all (sort (copy-list offered) #'< :key #'price)))
(flet ((split (test) (remove-if test all :key #'price)))
;; bids asks
(values (split #'plusp) (split #'minusp))))))
;;; response: placed offer if successful, nil if not
(defun ope-place (ope offer)
(with-slots (control response) (slot-value ope 'supplicant)
(send control (list :offer offer))))
;;; response: trueish = offer no longer placed, nil = unknown badness
(defun ope-cancel (ope offer)
(with-slots (control response) (slot-value ope 'supplicant)
(send control (list :cancel offer))))
(defclass filter (actor)
((abbrev :allocation :class :initform "filter") (cut :initarg :cut)
(bids :initform ()) (asks :initform ()) (book-cache :initform nil)
(supplicant :initarg :supplicant :initform (error "must link supplicant"))
(frequency :initarg :frequency :initform 1/7))) ; FIXME: s/ll/sh/!?
(defmethod christen ((filter filter) (type (eql 'actor)))
(slot-reduce filter supplicant name))
;;; TODO: deal with partially completed orders
(defun ignore-offers (open mine &aux them)
(dolist (offer open (nreverse them))
(aif (find (price offer) mine :test #'= :key #'price)
(let ((without-me (- (volume offer) (volume it))))
(setf mine (remove it mine))
(unless (< without-me 0.001)
(push (make-instance 'offer :market (slot-value offer 'market)
:price (price offer) :volume without-me)
them)))
(push offer them))))
needs to do three different things
1 ) ignore - offers - fishes offers from linked supplicant
2 ) profitable spread - already does ( via ecase spaghetti )
3 ) profit vs recent cost basis - done , shittily - TODO parametrize depth
(defmethod perform ((filter filter) &key)
(with-slots (market book-cache bids asks frequency supplicant cut) filter
(let ((book (recv (slot-reduce market book))))
(unless (eq book book-cache)
(setf book-cache book)
(with-slots (offered fee) supplicant
(destructuring-bind (bid . ask) (recv (slot-reduce fee output))
(loop with rudder = (sin (phase cut)) with scale = (abs rudder)
for i from 0 for j = (1+ (floor (* i (- 1 scale))))
for a = (if (plusp rudder) j i)
for b = (if (plusp rudder) i j)
;; do (break)
until (< (/ (realpart cut) 100)
(1- (profit-margin (price (nth a (car book)))
(price (nth b (cdr book)))
bid ask)))
finally (setf bids (ignore-offers (nthcdr a (car book))
offered)
asks (ignore-offers (nthcdr b (cdr book))
offered)))))))
(sleep frequency)))
(defclass prioritizer (actor)
((next-bids :initform (make-instance 'channel))
(next-asks :initform (make-instance 'channel))
(response :initform (make-instance 'channel))
(supplicant :initarg :supplicant) (expt :initform (exp 1))
(abbrev :allocation :class :initform "prioritizer")
(frequency :initarg :frequency :initform 1/7))) ; FIXME: s/ll/sh/
(defmethod christen ((prioritizer prioritizer) (type (eql 'actor)))
(slot-reduce prioritizer supplicant name)) ; this is starting to rhyme
(defun sufficiently-different? (new old) ; someday dispatch on market
(< 0.04 (abs (log (/ (quantity (given new)) (quantity (given old)))))))
(defgeneric prioriteaze (ope target placed)
(:method ((ope prioritizer) target placed &aux to-add (excess placed))
(flet ((frob (add pop &aux (max (max (length add) (length pop))))
(with-slots (expt) ope
(let ((n (expt (random (expt max (/ expt))) expt)))
(awhen (nth (floor n) add) (ope-place ope it))
(awhen (nth (- max (ceiling n)) pop) (ope-cancel ope it))))))
(aif (dolist (new target (sort to-add #'< :key #'price))
(aif (find (price new) excess :key #'price :test #'=)
(setf excess (remove it excess)) (push new to-add)))
(frob it (reverse excess)) ; which of these is worst?
(if excess (frob () excess) ; choose the lesser weevil
(and target placed (= (length target) (length placed))
(loop for new in target and old in placed
when (sufficiently-different? new old)
collect new into news and collect old into olds
finally (when news (frob news olds)))))))))
;;; receives target bids and asks in the next-bids and next-asks channels
;;; sends commands in the control channel through #'ope-place
;;; sends completion acknowledgement to response channel
(defmethod perform ((prioritizer prioritizer) &key)
(with-slots (next-bids next-asks response frequency) prioritizer
(multiple-value-bind (placed-bids placed-asks) (ope-placed prioritizer)
(select ((recv next-bids next)
(send response (prioriteaze prioritizer next placed-bids)))
((recv next-asks next)
(send response (prioriteaze prioritizer next placed-asks)))
(t (sleep frequency))))))
(defun profit-margin (bid ask &optional (bid-fee 0) (ask-fee 0))
(abs (if (= bid-fee ask-fee 0) (/ ask bid)
(/ (* ask (- 1 (/ ask-fee 100)))
(* bid (+ 1 (/ bid-fee 100)))))))
;;; "plan to throw one away, for you already have"
" plan two : yeet it like a neet "
;;; before undichotomising simultaneity and sequentialism,
reframe complexity as nonlinear metric
(defun dumbot-offers (foreigners ; filtered to prevent overdamping
resilience ; pq target offer depth to fill
funds ; pq target total offer volume
epsilon ; pq size of smallest order
max-orders ; maximal offer count
magic ; if you have to ask, you'll never know
&aux (acc 0.0) (share 0) (others (copy-list foreigners))
(asset (consumed-asset (first others))))
(do* ((remaining-offers others (rest remaining-offers))
(processed-tally 0 (1+ processed-tally)))
((or (null remaining-offers) ; EITHER: processed entire order book
() ; TODO : passed over enough liquidity
(and (> acc resilience) ; OR: BOTH: processed past resilience
(> processed-tally max-orders))) ; AND: at maximal order count
(flet ((pick (count offers)
(sort (subseq* (sort (or (subseq offers 0 (1- processed-tally))
(warn "~&FIXME: GO DEEPER!~%") offers)
#'> :key (lambda (x) (volume (cdr x))))
0 count) #'< :key (lambda (x) (price (cdr x)))))
(offer-scaler (total bonus count)
(let ((scale (/ funds (+ total (* bonus count)))))
(lambda (order &aux (vol (* scale (+ bonus (car order)))))
(with-slots (market price) (cdr order)
(make-instance 'offer ; FIXME: :given (ring a bell?)
:given (cons-aq* asset vol) :volume vol
:market market :price (1- price)))))))
ygni ! ?
max-orders processed-tally))
(chosen-stairs ; the (shares . foreign-offer)s to fight
(if (>= magic target-count) (pick target-count others)
(cons (first others)
(pick (1- target-count) (rest others)))))
(total-shares (reduce #'+ (mapcar #'car chosen-stairs)))
;; we need the smallest order to be epsilon
(e/f (/ epsilon funds))
(bonus (if (>= 1 target-count) 0
(/ (- (* e/f total-shares) (caar chosen-stairs))
(- 1 (* e/f target-count))))))
dbz = no funds left , too bad
(mapcar (offer-scaler total-shares bonus target-count)
chosen-stairs)))))
;; DONT use callbacks for liquidity distribution control
(with-slots (volume) (first remaining-offers)
(push (incf share (* 7/4 (incf acc volume))) (first remaining-offers)))))
(defclass ope-scalper (parent)
((input :initform (make-instance 'channel))
(output :initform (make-instance 'channel))
(abbrev :allocation :class :initform "ope")
(frequency :initform 1/7 :initarg :frequency)
(supplicant :initarg :supplicant) filter prioritizer
(epsilon :initform (expt 0.14 5) :initarg :epsilon)
(magic :initform 3 :initarg :magic-count)))
(defmethod christen ((ope ope-scalper) (type (eql 'actor)))
(name (slot-value ope 'supplicant)))
(defun ope-sprinner (offers funds count magic bases punk dunk book)
c'est pas un bean
(destructuring-bind (car . cdr) offers
(multiple-value-bind (bases vwab cost)
;; what appears to be the officer, problem?
;; (bases-without bases (given top)) fails, because bids are `viqc'
(bases-without bases (cons-aq* (consumed-asset car) (volume car)))
(if (or (null bases) (zerop count) (null offers)) offers ; again!
(flet ((profit (o)
(funcall punk (1- (price o)) (price vwab) (cdar funds))))
(when vwab
(setf book (rest (member 0 book :test #'< :key #'profit))))
(if (and vwab (plusp (profit car)))
`(,car .,(ope-sprinner
cdr (destructuring-bind ((caar . cdar) . cdr) funds
(aprog1 `((,(- caar (volume car)) .,cdar) .,cdr)
(signal "~S" it)))
(1- count) magic bases punk dunk book))
(ope-sprinner (funcall dunk book funds count magic) funds
count magic (and vwab `((,vwab ,(aq* vwab cost)
,cost) ,@bases))
punk dunk book))))))))
(defun ope-spreader (book resilience funds epsilon side ope)
(flet ((dunk (book funds count magic &optional (start epsilon))
(and book (dumbot-offers book resilience (caar funds)
start (floor count) magic))))
(with-slots (supplicant magic) ope
(with-slots (order-slots) supplicant
(awhen (dunk book funds (/ order-slots 2) magic)
(ope-sprinner it funds (/ order-slots 2) magic
(bases-for supplicant (asset (given (first it))))
(destructuring-bind (bid . ask)
(recv (slot-reduce ope supplicant fee output))
(macrolet ((punk (&rest args)
`(lambda (price vwab inner-cut)
(- (* 100 (1- (profit-margin ,@args)))
inner-cut))))
give ( sem # \N # \D ) a chance !
(bids (punk price vwab bid))
(asks (punk vwab price 0 ask)))))
#'dunk book))))))
;;; C-h k C-x ]
(defmethod perform ((ope ope-scalper) &key)
;; before you read any further, remember:
abstracting patterns with strictly fewer than THREE [ ie ( 1 + 2 ! ) ]
;; instances is not only all integral roots of premature noptimals,
it also just makes you sound like a lad , a cad , and never a chad
(with-slots (input output filter prioritizer epsilon frequency) ope
(destructuring-bind (primary counter resilience ratio) (recv input)
(with-slots (cut) filter
(setf cut (complex (realpart cut)
(* (realpart cut) (atan (log ratio))))))
(with-slots (next-bids next-asks response) prioritizer
(macrolet ((do-side (amount side chan epsilon)
#+ () "can't I write documentation for local macros?"
`(let ((,side (copy-list (slot-value filter ',side))))
(unless (or (zerop (caar ,amount)) (null ,side))
(send ,chan (ope-spreader ,side resilience ,amount
,epsilon ',side ope))
M - x viper SPACE ; why'dit haffter
(recv response)))))
(let ((e (/ epsilon (+ 1/13 (abs (log (1+ (abs (log ratio)))))))))
(do-side counter bids next-bids
(* (max e epsilon) (abs (price (first bids))) (max ratio 1)
(expt 10 (- (decimals (market (first bids)))))))
(do-side primary asks next-asks (* (max e epsilon) (max (/ ratio) 1)))
no , you may not write CL : DOCUMENTATION for macrolets ,
;; you fine-source-reusing literati scum-bucket investor;
;; and keep those widths in check unless you want an "and mate"!
)))) ; this line is fine though, plenty of sideband, and green, too!
(send output (sleep frequency))))
;;; C-h k C-x [
(defmethod initialize-instance :after ((ope ope-scalper) &key cut)
(with-slots (filter prioritizer supplicant) ope
(macrolet ((init (slot &rest args)
`(setf ,slot (make-instance ',slot :supplicant supplicant
:delegates `(,supplicant) ,@args)))
(children (&rest slots)
`(progn ,@(mapcar (lambda (slot) `(adopt ope ,slot)) slots))))
(children (init prioritizer) (init filter :cut cut)))))
;;;
;;; ACCOUNT TRACKING
;;;
(defclass maker (parent)
((fund-factor :initarg :fund-factor :initform 1)
(resilience-factor :initarg :resilience :initform 1)
(targeting-factor :initarg :targeting :initform (random 1.0))
(skew-factor :initarg :skew-factor :initform 1)
(cut :initform 0 :initarg :cut) ope (supplicant :initarg :supplicant)
(snake :initform (list 30)) ; FIXME: snake-args
(abbrev :initform "maker" :allocation :class) (last-report :initform nil)
(print-args :initform '(:market t :ours t :wait () :count 28)))) ; perfect
(defmethod christen ((maker maker) (type (eql 'actor)))
(name (slot-reduce maker gate)))
(defmethod print-object ((maker maker) stream)
(print-unreadable-object (maker stream :type t :identity nil)
(write-string (name maker) stream)))
(defun profit-snake (lictor length &aux (trades (slot-value lictor 'trades)))
(flet ((depth-profit (depth)
(flet ((vwap (side) (vwap lictor :type side :depth depth)))
(* 100 (1- (profit-margin (vwap "buy") (vwap "sell"))))))
(side-last (side)
(volume (find side trades :key #'direction :test #'string-equal)))
(chr (real) ;_; was I a good function? // No.
(funcall (if (plusp real) #'identity #'char-downcase)
(char "HWFUMONERYLSICAZJX"
(floor (* (abs real) #xFF) #xF)))))
(with-output-to-string (out)
(when (and (find "buy" trades :key #'direction :test #'string-equal)
(find "sell" trades :key #'direction :test #'string-equal))
(let* ((min-sum (loop for trade in trades
for volume = (net-volume trade)
if (string-equal (direction trade) "buy")
sum volume into buy-sum
else sum volume into sell-sum
finally (return (min buy-sum sell-sum))))
(min-last (apply 'min (mapcar #'side-last '("buy" "sell"))))
(scale (expt (/ min-sum min-last) (/ (1+ length))))
;; FIXME: neither a sticky hocker nor a logical shocker be
(dps (loop for i to length collect
(depth-profit (/ min-sum (expt scale i)))))
(highest (apply #'max 0 (remove-if #'minusp dps)))
(lowest (apply #'min 0 (remove-if #'plusp dps))))
(format out "~4@$" (depth-profit min-sum))
(dolist (dp dps (format out "~4@$" (first (last dps))))
(format out "~C" (case (round (signum dp)) (0 #\Space)
(+1 (chr (/ dp highest)))
(-1 (chr (- (/ dp lowest))))))))))))
(defun makereport (maker fund rate btc doge investment risked skew &optional ha)
(with-slots (name market ope snake last-report) maker
(unless ha
(let ((new-report (list (float btc) (float doge) ; approximate,
investment risked skew ; intentionally.
(first (slot-reduce maker lictor trades)))))
(if (equal new-report (if (channelp (first last-report))
(cdr last-report) last-report))
(return-from makereport)
(if (channelp (first last-report))
(setf (cdr last-report) new-report)
(setf last-report new-report)))))
TODO factor out aqstr
(format nil "~V,,V$" (decimals (slot-value market side))
0 (float amount 0d0))))
;; FIXME: modularize all this decimal point handling
we need a pprint - style ~/aq/ function , and pass it aq objects !
;; time, total, primary, counter, invested, risked, risk bias, pulse
(aprog1 (format () "~&~A~A ~{~A~^ ~} ~5,4,,VF~4,4F~4,4@F~A~%"
name (format-timestring ; a naggy mess and lispy, too!
() (now) :format '((:hour 2) (:min 2) (:sec 2)))
(mapcar #'sastr '(primary counter primary counter)
`(,@#1=`(,fund ,(* fund rate)) ,btc ,doge))
;; THE FOLLOWING LINES ARE SEVERE MATHEMATICAL AGONY!
this loud ?
investment risked skew ; >= SU[1x2]? PL[3]? the usual
(apply 'profit-snake ; approach is useless here...!
(slot-reduce ope supplicant lictor) snake))
;; Incidentally, the agony is due to numerical bases;
CL : FORMAT is perfectly fine , and mostly painless .
(format t it) (if (channelp (first last-report))
(send (first last-report) it))
If I ever have to see three consecutive tildes , remind me that
I am not supposed to live beyond one third of a dozen decades .
)))
(force-output))
(defmethod perform ((maker maker) &key)
memento , du !
(with-slots (fund-factor resilience-factor targeting-factor skew-factor
market name ope cut supplicant) maker
(let* ((trades (recv (slot-reduce market trades))) ; nananananana
(balances (with-slots (sync) (slot-reduce maker treasurer)
(recv (send sync sync)))) ; excellent!
(doge/btc (vwap market :depth 50 :type :sell)))
(flet ((total-of (btc doge) (float (+ btc (/ doge doge/btc)))))
(let* ((total-btc (asset-funds (primary market) balances))
(total-doge (asset-funds (counter market) balances))
(total-fund (total-of total-btc total-doge)))
;; history, yo!
;; this test originated in a harried attempt at bugfixing an instance
of Maybe , where the treasurer reports zero balances when the http
request ( checking for balance changes ) fails ; due to use of aprog1
when the Right Thing 's anaphoric . now that the bug 's killed better ,
;; Maybe thru recognition, the test remains; for when you lose the bug
;; don't lose the lesson, nor the joke.
(unless (zerop total-fund)
(let* ((buyin (dbz-guard (/ total-btc total-fund)))
(btc (* fund-factor total-btc
buyin targeting-factor))
(doge (* fund-factor total-doge
(/ (- 1 buyin) targeting-factor)))
status should always be \in ( 0,1 )
(status (dbz-guard (/ (total-of btc doge) total-fund)))
;; torque's range varies, depending on markets and climates
(torque (dbz-guard (/ (total-of (- btc) doge) total-fund)))
;; this old formula has lost its spice; needs sigmoid clamp
(skew (log (if (zerop (* btc doge))
(max 1/100
(min 100
(or (ignore-errors
(/ doge btc doge/btc)) 0)))
(/ doge btc doge/btc)))))
;; ;; "Yeah, science!" - King of the Universe, Right Here
;; (when (= (signum skew) (signum (log targeting-factor)))
( setf targeting - factor ( / targeting - factor ) ) )
;; report funding
(makereport maker total-fund doge/btc total-btc total-doge
buyin status torque)
(flet ((f (g h) `((,g . ,(* cut (max 0 (* skew-factor h)))))))
(send (slot-reduce ope input)
(list (f (min btc (* 2/3 total-btc)) skew)
(f (min doge (* 2/3 total-doge)) (- skew))
(* resilience-factor
(reduce #'max (mapcar #'volume trades)
:initial-value 0))
(expt (exp skew) skew-factor)))
(recv (slot-reduce ope output))))))))))
(defmethod initialize-instance :after ((maker maker) &key)
(with-slots (supplicant ope delegates cut) maker
(adopt maker supplicant) (push supplicant delegates)
(adopt maker (setf ope (make-instance 'ope-scalper :cut cut
:supplicant supplicant)))))
(defun reset-the-net (maker &key (revive t) (delay 5))
(mapc 'kill (mapcar 'task-thread (pooled-tasks)))
#+sbcl (sb-ext:gc :full t)
(when revive
(dolist (actor (list (slot-reduce maker market) (slot-reduce maker gate)
(slot-reduce maker ope) maker))
(sleep delay) (reinitialize-instance actor))))
(defmacro define-maker (name &rest keys)
`(defvar ,name
(make-instance 'maker :name ,(string-trim "*+<>" name) ,@keys)))
(defun current-depth (maker)
(with-slots (resilience-factor market) maker
(with-slots (trades) (slot-value market 'trades-tracker)
(* resilience-factor (reduce #'max (mapcar #'volume trades))))))
(defun trades-profits (trades)
(flet ((side-sum (side asset)
(aif (remove side trades :key #'direction :test-not #'string-equal)
(reduce #'aq+ (mapcar asset it)) 0)))
(let ((aq1 (aq- (side-sum "buy" #'taken) (side-sum "sell" #'given)))
(aq2 (aq- (side-sum "sell" #'taken) (side-sum "buy" #'given))))
(ecase (- (signum (quantity aq1)) (signum (quantity aq2)))
((0 1 -1) (values nil aq1 aq2))
(-2 (values (aq/ (- (conjugate aq1)) aq2) aq2 aq1))
(+2 (values (aq/ (- (conjugate aq2)) aq1) aq1 aq2))))))
(defun performance-overview (maker &optional depth)
(with-slots (treasurer lictor) maker
(with-slots (primary counter) #1=(market maker)
(flet ((funds (symbol)
(asset-funds symbol (slot-reduce treasurer balances)))
(total (btc doge) ; patt'ring on my chamber door?
(+ btc (/ doge (vwap #1# :depth 50 :type :buy))))
(vwap (side) (vwap lictor :type side :market #1# :depth depth)))
(let* ((trades (slot-reduce maker lictor trades))
(uptime (timestamp-difference
(now) (timestamp (first (last trades)))))
(updays (/ uptime 60 60 24))
(volume (reduce #'+ (mapcar #'volume trades)))
(profit (* volume (1- (profit-margin (vwap "buy")
(vwap "sell"))) 1/2))
(total (total (funds primary) (funds counter))))
(format t "~&I failed calculus, so why take my ~
word for any of these reckonings?~%")
(format t "~&Been up ~7@F days ~A~
~%traded ~7@F ~(~A~),~
~%profit ~7@F ~(~2:*~A~*~),~
~%portfolio flip per ~7@F days,~
~%avg daily profit: ~4@$%~
~%estd monthly profit: ~4@$%~%"
updays (now) volume (name primary) profit
(/ (* total updays 2) volume)
(/ (* 100 profit) updays total) ; ignores compounding, du'e!
(/ (* 100 profit) (/ updays 30) total)))))))
(defmethod print-book ((maker maker) &rest keys &key market ours wait)
(macrolet ((path (&rest path)
`(apply #'print-book (slot-reduce ,@path) keys)))
(with-slots (response) (slot-reduce maker ope prioritizer)
(multiple-value-bind (next source) (when wait (recv response))
(let ((placed (multiple-value-call 'cons
(ope-placed (slot-reduce maker ope)))))
(path placed) (when ours (setf (getf keys :ours) placed))
(when source (send source next)))))
(when market (path maker market book-tracker))))
;;; General Introspection, Major Mayhem, and of course SFC Property
(defmethod describe-object ((maker maker) (stream t))
(with-slots (name print-args lictor) maker
(apply #'print-book maker print-args) (performance-overview maker)
(multiple-value-call 'format stream "~@{~A~#[~:; ~]~}" name
(trades-profits (slot-reduce lictor trades)))))
(defmethod describe-object :after ((maker maker) (stream t))
(with-aslots (market) (slot-reduce maker supplicant)
(describe-account it (exchange market) stream)))
;; (flet ((window (start trades)
( if ( null trades ) ( list start 0 nil )
;; (multiple-value-call 'list start
;; (length trades) (trades-profits trades)))))
;; (loop with windows
;; for trades = (slot-reduce *maker* lictor trades)
then ( window - count trades ) ; FUCK A DUCK A RANDOM DUCK
for window - start = ( timestamp ( first trades ) ) then window - close
for window - close = ( timestamp- window - start 1 : day )
;; for window-count = (position window-close trades
;; :key #'timestamp
;; :test #'timestamp>=)
;; for window = (window window-start
;; (if (null window-count) trades
( subseq trades 0 window - count ) ) )
;; while window-count do (push window windows)
;; finally (return (cons window windows))))
;; (defmethod describe-account :after (supplicant exchange stream)
( destructuring - bind ( first . rest ) ( slot - reduce supplicant lictor trades )
( dolist ( day ( reduce ( lambda ( days trade )
;; (destructuring-bind ((day . trades) . rest) days
;; (let ((next (timestamp trade)))
( if (= ( day - of next ) ( day - of day ) )
( cons ( list * day trade trades ) rest )
( acons next trade days ) ) ) ) )
rest : initial - value ( acons ( timestamp first ) first ( ) ) ) )
( multiple - value - call ' format stream " : ; ~]~}~% " name
;; (trades-profits day)))))
| null | https://raw.githubusercontent.com/adlai/scalpl/39a0d49be18f36b0f339fc5482e946924b658e21/qd.lisp | lisp |
ENGINE
bids asks
response: placed offer if successful, nil if not
response: trueish = offer no longer placed, nil = unknown badness
FIXME: s/ll/sh/!?
TODO: deal with partially completed orders
do (break)
FIXME: s/ll/sh/
this is starting to rhyme
someday dispatch on market
which of these is worst?
choose the lesser weevil
receives target bids and asks in the next-bids and next-asks channels
sends commands in the control channel through #'ope-place
sends completion acknowledgement to response channel
"plan to throw one away, for you already have"
before undichotomising simultaneity and sequentialism,
filtered to prevent overdamping
pq target offer depth to fill
pq target total offer volume
pq size of smallest order
maximal offer count
if you have to ask, you'll never know
EITHER: processed entire order book
TODO : passed over enough liquidity
OR: BOTH: processed past resilience
AND: at maximal order count
FIXME: :given (ring a bell?)
the (shares . foreign-offer)s to fight
we need the smallest order to be epsilon
DONT use callbacks for liquidity distribution control
what appears to be the officer, problem?
(bases-without bases (given top)) fails, because bids are `viqc'
again!
C-h k C-x ]
before you read any further, remember:
instances is not only all integral roots of premature noptimals,
why'dit haffter
you fine-source-reusing literati scum-bucket investor;
and keep those widths in check unless you want an "and mate"!
this line is fine though, plenty of sideband, and green, too!
C-h k C-x [
ACCOUNT TRACKING
FIXME: snake-args
perfect
_; was I a good function? // No.
FIXME: neither a sticky hocker nor a logical shocker be
approximate,
intentionally.
FIXME: modularize all this decimal point handling
time, total, primary, counter, invested, risked, risk bias, pulse
a naggy mess and lispy, too!
THE FOLLOWING LINES ARE SEVERE MATHEMATICAL AGONY!
>= SU[1x2]? PL[3]? the usual
approach is useless here...!
Incidentally, the agony is due to numerical bases;
nananananana
excellent!
history, yo!
this test originated in a harried attempt at bugfixing an instance
due to use of aprog1
Maybe thru recognition, the test remains; for when you lose the bug
don't lose the lesson, nor the joke.
torque's range varies, depending on markets and climates
this old formula has lost its spice; needs sigmoid clamp
;; "Yeah, science!" - King of the Universe, Right Here
(when (= (signum skew) (signum (log targeting-factor)))
report funding
patt'ring on my chamber door?
ignores compounding, du'e!
General Introspection, Major Mayhem, and of course SFC Property
(flet ((window (start trades)
(multiple-value-call 'list start
(length trades) (trades-profits trades)))))
(loop with windows
for trades = (slot-reduce *maker* lictor trades)
FUCK A DUCK A RANDOM DUCK
for window-count = (position window-close trades
:key #'timestamp
:test #'timestamp>=)
for window = (window window-start
(if (null window-count) trades
while window-count do (push window windows)
finally (return (cons window windows))))
(defmethod describe-account :after (supplicant exchange stream)
(destructuring-bind ((day . trades) . rest) days
(let ((next (timestamp trade)))
(trades-profits day))))) | (defpackage #:scalpl.qd
(:use #:cl #:chanl #:anaphora #:local-time
#:scalpl.util #:scalpl.exchange #:scalpl.actor)
(:export #:ope-placed #:ope-place #:ope-cancel
#:prioritizer #:prioriteaze
#:next-bids #:next-asks #:sufficiently-different?))
(in-package #:scalpl.qd)
(defun ope-placed (ope)
(with-slots (offered) (slot-value ope 'supplicant)
(let ((all (sort (copy-list offered) #'< :key #'price)))
(flet ((split (test) (remove-if test all :key #'price)))
(values (split #'plusp) (split #'minusp))))))
(defun ope-place (ope offer)
(with-slots (control response) (slot-value ope 'supplicant)
(send control (list :offer offer))))
(defun ope-cancel (ope offer)
(with-slots (control response) (slot-value ope 'supplicant)
(send control (list :cancel offer))))
(defclass filter (actor)
((abbrev :allocation :class :initform "filter") (cut :initarg :cut)
(bids :initform ()) (asks :initform ()) (book-cache :initform nil)
(supplicant :initarg :supplicant :initform (error "must link supplicant"))
(defmethod christen ((filter filter) (type (eql 'actor)))
(slot-reduce filter supplicant name))
(defun ignore-offers (open mine &aux them)
(dolist (offer open (nreverse them))
(aif (find (price offer) mine :test #'= :key #'price)
(let ((without-me (- (volume offer) (volume it))))
(setf mine (remove it mine))
(unless (< without-me 0.001)
(push (make-instance 'offer :market (slot-value offer 'market)
:price (price offer) :volume without-me)
them)))
(push offer them))))
needs to do three different things
1 ) ignore - offers - fishes offers from linked supplicant
2 ) profitable spread - already does ( via ecase spaghetti )
3 ) profit vs recent cost basis - done , shittily - TODO parametrize depth
(defmethod perform ((filter filter) &key)
(with-slots (market book-cache bids asks frequency supplicant cut) filter
(let ((book (recv (slot-reduce market book))))
(unless (eq book book-cache)
(setf book-cache book)
(with-slots (offered fee) supplicant
(destructuring-bind (bid . ask) (recv (slot-reduce fee output))
(loop with rudder = (sin (phase cut)) with scale = (abs rudder)
for i from 0 for j = (1+ (floor (* i (- 1 scale))))
for a = (if (plusp rudder) j i)
for b = (if (plusp rudder) i j)
until (< (/ (realpart cut) 100)
(1- (profit-margin (price (nth a (car book)))
(price (nth b (cdr book)))
bid ask)))
finally (setf bids (ignore-offers (nthcdr a (car book))
offered)
asks (ignore-offers (nthcdr b (cdr book))
offered)))))))
(sleep frequency)))
(defclass prioritizer (actor)
((next-bids :initform (make-instance 'channel))
(next-asks :initform (make-instance 'channel))
(response :initform (make-instance 'channel))
(supplicant :initarg :supplicant) (expt :initform (exp 1))
(abbrev :allocation :class :initform "prioritizer")
(defmethod christen ((prioritizer prioritizer) (type (eql 'actor)))
(< 0.04 (abs (log (/ (quantity (given new)) (quantity (given old)))))))
(defgeneric prioriteaze (ope target placed)
(:method ((ope prioritizer) target placed &aux to-add (excess placed))
(flet ((frob (add pop &aux (max (max (length add) (length pop))))
(with-slots (expt) ope
(let ((n (expt (random (expt max (/ expt))) expt)))
(awhen (nth (floor n) add) (ope-place ope it))
(awhen (nth (- max (ceiling n)) pop) (ope-cancel ope it))))))
(aif (dolist (new target (sort to-add #'< :key #'price))
(aif (find (price new) excess :key #'price :test #'=)
(setf excess (remove it excess)) (push new to-add)))
(and target placed (= (length target) (length placed))
(loop for new in target and old in placed
when (sufficiently-different? new old)
collect new into news and collect old into olds
finally (when news (frob news olds)))))))))
(defmethod perform ((prioritizer prioritizer) &key)
(with-slots (next-bids next-asks response frequency) prioritizer
(multiple-value-bind (placed-bids placed-asks) (ope-placed prioritizer)
(select ((recv next-bids next)
(send response (prioriteaze prioritizer next placed-bids)))
((recv next-asks next)
(send response (prioriteaze prioritizer next placed-asks)))
(t (sleep frequency))))))
(defun profit-margin (bid ask &optional (bid-fee 0) (ask-fee 0))
(abs (if (= bid-fee ask-fee 0) (/ ask bid)
(/ (* ask (- 1 (/ ask-fee 100)))
(* bid (+ 1 (/ bid-fee 100)))))))
" plan two : yeet it like a neet "
reframe complexity as nonlinear metric
&aux (acc 0.0) (share 0) (others (copy-list foreigners))
(asset (consumed-asset (first others))))
(do* ((remaining-offers others (rest remaining-offers))
(processed-tally 0 (1+ processed-tally)))
(flet ((pick (count offers)
(sort (subseq* (sort (or (subseq offers 0 (1- processed-tally))
(warn "~&FIXME: GO DEEPER!~%") offers)
#'> :key (lambda (x) (volume (cdr x))))
0 count) #'< :key (lambda (x) (price (cdr x)))))
(offer-scaler (total bonus count)
(let ((scale (/ funds (+ total (* bonus count)))))
(lambda (order &aux (vol (* scale (+ bonus (car order)))))
(with-slots (market price) (cdr order)
:given (cons-aq* asset vol) :volume vol
:market market :price (1- price)))))))
ygni ! ?
max-orders processed-tally))
(if (>= magic target-count) (pick target-count others)
(cons (first others)
(pick (1- target-count) (rest others)))))
(total-shares (reduce #'+ (mapcar #'car chosen-stairs)))
(e/f (/ epsilon funds))
(bonus (if (>= 1 target-count) 0
(/ (- (* e/f total-shares) (caar chosen-stairs))
(- 1 (* e/f target-count))))))
dbz = no funds left , too bad
(mapcar (offer-scaler total-shares bonus target-count)
chosen-stairs)))))
(with-slots (volume) (first remaining-offers)
(push (incf share (* 7/4 (incf acc volume))) (first remaining-offers)))))
(defclass ope-scalper (parent)
((input :initform (make-instance 'channel))
(output :initform (make-instance 'channel))
(abbrev :allocation :class :initform "ope")
(frequency :initform 1/7 :initarg :frequency)
(supplicant :initarg :supplicant) filter prioritizer
(epsilon :initform (expt 0.14 5) :initarg :epsilon)
(magic :initform 3 :initarg :magic-count)))
(defmethod christen ((ope ope-scalper) (type (eql 'actor)))
(name (slot-value ope 'supplicant)))
(defun ope-sprinner (offers funds count magic bases punk dunk book)
c'est pas un bean
(destructuring-bind (car . cdr) offers
(multiple-value-bind (bases vwab cost)
(bases-without bases (cons-aq* (consumed-asset car) (volume car)))
(flet ((profit (o)
(funcall punk (1- (price o)) (price vwab) (cdar funds))))
(when vwab
(setf book (rest (member 0 book :test #'< :key #'profit))))
(if (and vwab (plusp (profit car)))
`(,car .,(ope-sprinner
cdr (destructuring-bind ((caar . cdar) . cdr) funds
(aprog1 `((,(- caar (volume car)) .,cdar) .,cdr)
(signal "~S" it)))
(1- count) magic bases punk dunk book))
(ope-sprinner (funcall dunk book funds count magic) funds
count magic (and vwab `((,vwab ,(aq* vwab cost)
,cost) ,@bases))
punk dunk book))))))))
(defun ope-spreader (book resilience funds epsilon side ope)
(flet ((dunk (book funds count magic &optional (start epsilon))
(and book (dumbot-offers book resilience (caar funds)
start (floor count) magic))))
(with-slots (supplicant magic) ope
(with-slots (order-slots) supplicant
(awhen (dunk book funds (/ order-slots 2) magic)
(ope-sprinner it funds (/ order-slots 2) magic
(bases-for supplicant (asset (given (first it))))
(destructuring-bind (bid . ask)
(recv (slot-reduce ope supplicant fee output))
(macrolet ((punk (&rest args)
`(lambda (price vwab inner-cut)
(- (* 100 (1- (profit-margin ,@args)))
inner-cut))))
give ( sem # \N # \D ) a chance !
(bids (punk price vwab bid))
(asks (punk vwab price 0 ask)))))
#'dunk book))))))
(defmethod perform ((ope ope-scalper) &key)
abstracting patterns with strictly fewer than THREE [ ie ( 1 + 2 ! ) ]
it also just makes you sound like a lad , a cad , and never a chad
(with-slots (input output filter prioritizer epsilon frequency) ope
(destructuring-bind (primary counter resilience ratio) (recv input)
(with-slots (cut) filter
(setf cut (complex (realpart cut)
(* (realpart cut) (atan (log ratio))))))
(with-slots (next-bids next-asks response) prioritizer
(macrolet ((do-side (amount side chan epsilon)
#+ () "can't I write documentation for local macros?"
`(let ((,side (copy-list (slot-value filter ',side))))
(unless (or (zerop (caar ,amount)) (null ,side))
(send ,chan (ope-spreader ,side resilience ,amount
,epsilon ',side ope))
(recv response)))))
(let ((e (/ epsilon (+ 1/13 (abs (log (1+ (abs (log ratio)))))))))
(do-side counter bids next-bids
(* (max e epsilon) (abs (price (first bids))) (max ratio 1)
(expt 10 (- (decimals (market (first bids)))))))
(do-side primary asks next-asks (* (max e epsilon) (max (/ ratio) 1)))
no , you may not write CL : DOCUMENTATION for macrolets ,
(send output (sleep frequency))))
(defmethod initialize-instance :after ((ope ope-scalper) &key cut)
(with-slots (filter prioritizer supplicant) ope
(macrolet ((init (slot &rest args)
`(setf ,slot (make-instance ',slot :supplicant supplicant
:delegates `(,supplicant) ,@args)))
(children (&rest slots)
`(progn ,@(mapcar (lambda (slot) `(adopt ope ,slot)) slots))))
(children (init prioritizer) (init filter :cut cut)))))
(defclass maker (parent)
((fund-factor :initarg :fund-factor :initform 1)
(resilience-factor :initarg :resilience :initform 1)
(targeting-factor :initarg :targeting :initform (random 1.0))
(skew-factor :initarg :skew-factor :initform 1)
(cut :initform 0 :initarg :cut) ope (supplicant :initarg :supplicant)
(abbrev :initform "maker" :allocation :class) (last-report :initform nil)
(defmethod christen ((maker maker) (type (eql 'actor)))
(name (slot-reduce maker gate)))
(defmethod print-object ((maker maker) stream)
(print-unreadable-object (maker stream :type t :identity nil)
(write-string (name maker) stream)))
(defun profit-snake (lictor length &aux (trades (slot-value lictor 'trades)))
(flet ((depth-profit (depth)
(flet ((vwap (side) (vwap lictor :type side :depth depth)))
(* 100 (1- (profit-margin (vwap "buy") (vwap "sell"))))))
(side-last (side)
(volume (find side trades :key #'direction :test #'string-equal)))
(funcall (if (plusp real) #'identity #'char-downcase)
(char "HWFUMONERYLSICAZJX"
(floor (* (abs real) #xFF) #xF)))))
(with-output-to-string (out)
(when (and (find "buy" trades :key #'direction :test #'string-equal)
(find "sell" trades :key #'direction :test #'string-equal))
(let* ((min-sum (loop for trade in trades
for volume = (net-volume trade)
if (string-equal (direction trade) "buy")
sum volume into buy-sum
else sum volume into sell-sum
finally (return (min buy-sum sell-sum))))
(min-last (apply 'min (mapcar #'side-last '("buy" "sell"))))
(scale (expt (/ min-sum min-last) (/ (1+ length))))
(dps (loop for i to length collect
(depth-profit (/ min-sum (expt scale i)))))
(highest (apply #'max 0 (remove-if #'minusp dps)))
(lowest (apply #'min 0 (remove-if #'plusp dps))))
(format out "~4@$" (depth-profit min-sum))
(dolist (dp dps (format out "~4@$" (first (last dps))))
(format out "~C" (case (round (signum dp)) (0 #\Space)
(+1 (chr (/ dp highest)))
(-1 (chr (- (/ dp lowest))))))))))))
(defun makereport (maker fund rate btc doge investment risked skew &optional ha)
(with-slots (name market ope snake last-report) maker
(unless ha
(first (slot-reduce maker lictor trades)))))
(if (equal new-report (if (channelp (first last-report))
(cdr last-report) last-report))
(return-from makereport)
(if (channelp (first last-report))
(setf (cdr last-report) new-report)
(setf last-report new-report)))))
TODO factor out aqstr
(format nil "~V,,V$" (decimals (slot-value market side))
0 (float amount 0d0))))
we need a pprint - style ~/aq/ function , and pass it aq objects !
(aprog1 (format () "~&~A~A ~{~A~^ ~} ~5,4,,VF~4,4F~4,4@F~A~%"
() (now) :format '((:hour 2) (:min 2) (:sec 2)))
(mapcar #'sastr '(primary counter primary counter)
`(,@#1=`(,fund ,(* fund rate)) ,btc ,doge))
this loud ?
(slot-reduce ope supplicant lictor) snake))
CL : FORMAT is perfectly fine , and mostly painless .
(format t it) (if (channelp (first last-report))
(send (first last-report) it))
If I ever have to see three consecutive tildes , remind me that
I am not supposed to live beyond one third of a dozen decades .
)))
(force-output))
(defmethod perform ((maker maker) &key)
memento , du !
(with-slots (fund-factor resilience-factor targeting-factor skew-factor
market name ope cut supplicant) maker
(balances (with-slots (sync) (slot-reduce maker treasurer)
(doge/btc (vwap market :depth 50 :type :sell)))
(flet ((total-of (btc doge) (float (+ btc (/ doge doge/btc)))))
(let* ((total-btc (asset-funds (primary market) balances))
(total-doge (asset-funds (counter market) balances))
(total-fund (total-of total-btc total-doge)))
of Maybe , where the treasurer reports zero balances when the http
when the Right Thing 's anaphoric . now that the bug 's killed better ,
(unless (zerop total-fund)
(let* ((buyin (dbz-guard (/ total-btc total-fund)))
(btc (* fund-factor total-btc
buyin targeting-factor))
(doge (* fund-factor total-doge
(/ (- 1 buyin) targeting-factor)))
status should always be \in ( 0,1 )
(status (dbz-guard (/ (total-of btc doge) total-fund)))
(torque (dbz-guard (/ (total-of (- btc) doge) total-fund)))
(skew (log (if (zerop (* btc doge))
(max 1/100
(min 100
(or (ignore-errors
(/ doge btc doge/btc)) 0)))
(/ doge btc doge/btc)))))
( setf targeting - factor ( / targeting - factor ) ) )
(makereport maker total-fund doge/btc total-btc total-doge
buyin status torque)
(flet ((f (g h) `((,g . ,(* cut (max 0 (* skew-factor h)))))))
(send (slot-reduce ope input)
(list (f (min btc (* 2/3 total-btc)) skew)
(f (min doge (* 2/3 total-doge)) (- skew))
(* resilience-factor
(reduce #'max (mapcar #'volume trades)
:initial-value 0))
(expt (exp skew) skew-factor)))
(recv (slot-reduce ope output))))))))))
(defmethod initialize-instance :after ((maker maker) &key)
(with-slots (supplicant ope delegates cut) maker
(adopt maker supplicant) (push supplicant delegates)
(adopt maker (setf ope (make-instance 'ope-scalper :cut cut
:supplicant supplicant)))))
(defun reset-the-net (maker &key (revive t) (delay 5))
(mapc 'kill (mapcar 'task-thread (pooled-tasks)))
#+sbcl (sb-ext:gc :full t)
(when revive
(dolist (actor (list (slot-reduce maker market) (slot-reduce maker gate)
(slot-reduce maker ope) maker))
(sleep delay) (reinitialize-instance actor))))
(defmacro define-maker (name &rest keys)
`(defvar ,name
(make-instance 'maker :name ,(string-trim "*+<>" name) ,@keys)))
(defun current-depth (maker)
(with-slots (resilience-factor market) maker
(with-slots (trades) (slot-value market 'trades-tracker)
(* resilience-factor (reduce #'max (mapcar #'volume trades))))))
(defun trades-profits (trades)
(flet ((side-sum (side asset)
(aif (remove side trades :key #'direction :test-not #'string-equal)
(reduce #'aq+ (mapcar asset it)) 0)))
(let ((aq1 (aq- (side-sum "buy" #'taken) (side-sum "sell" #'given)))
(aq2 (aq- (side-sum "sell" #'taken) (side-sum "buy" #'given))))
(ecase (- (signum (quantity aq1)) (signum (quantity aq2)))
((0 1 -1) (values nil aq1 aq2))
(-2 (values (aq/ (- (conjugate aq1)) aq2) aq2 aq1))
(+2 (values (aq/ (- (conjugate aq2)) aq1) aq1 aq2))))))
(defun performance-overview (maker &optional depth)
(with-slots (treasurer lictor) maker
(with-slots (primary counter) #1=(market maker)
(flet ((funds (symbol)
(asset-funds symbol (slot-reduce treasurer balances)))
(+ btc (/ doge (vwap #1# :depth 50 :type :buy))))
(vwap (side) (vwap lictor :type side :market #1# :depth depth)))
(let* ((trades (slot-reduce maker lictor trades))
(uptime (timestamp-difference
(now) (timestamp (first (last trades)))))
(updays (/ uptime 60 60 24))
(volume (reduce #'+ (mapcar #'volume trades)))
(profit (* volume (1- (profit-margin (vwap "buy")
(vwap "sell"))) 1/2))
(total (total (funds primary) (funds counter))))
(format t "~&I failed calculus, so why take my ~
word for any of these reckonings?~%")
(format t "~&Been up ~7@F days ~A~
~%traded ~7@F ~(~A~),~
~%profit ~7@F ~(~2:*~A~*~),~
~%portfolio flip per ~7@F days,~
~%avg daily profit: ~4@$%~
~%estd monthly profit: ~4@$%~%"
updays (now) volume (name primary) profit
(/ (* total updays 2) volume)
(/ (* 100 profit) (/ updays 30) total)))))))
(defmethod print-book ((maker maker) &rest keys &key market ours wait)
(macrolet ((path (&rest path)
`(apply #'print-book (slot-reduce ,@path) keys)))
(with-slots (response) (slot-reduce maker ope prioritizer)
(multiple-value-bind (next source) (when wait (recv response))
(let ((placed (multiple-value-call 'cons
(ope-placed (slot-reduce maker ope)))))
(path placed) (when ours (setf (getf keys :ours) placed))
(when source (send source next)))))
(when market (path maker market book-tracker))))
(defmethod describe-object ((maker maker) (stream t))
(with-slots (name print-args lictor) maker
(apply #'print-book maker print-args) (performance-overview maker)
(multiple-value-call 'format stream "~@{~A~#[~:; ~]~}" name
(trades-profits (slot-reduce lictor trades)))))
(defmethod describe-object :after ((maker maker) (stream t))
(with-aslots (market) (slot-reduce maker supplicant)
(describe-account it (exchange market) stream)))
( if ( null trades ) ( list start 0 nil )
for window - start = ( timestamp ( first trades ) ) then window - close
for window - close = ( timestamp- window - start 1 : day )
( subseq trades 0 window - count ) ) )
( destructuring - bind ( first . rest ) ( slot - reduce supplicant lictor trades )
( dolist ( day ( reduce ( lambda ( days trade )
( if (= ( day - of next ) ( day - of day ) )
( cons ( list * day trade trades ) rest )
( acons next trade days ) ) ) ) )
rest : initial - value ( acons ( timestamp first ) first ( ) ) ) )
( multiple - value - call ' format stream " : ; ~]~}~% " name
|
c09db15dde6e7bbda644b3b78f1fbb89e9f61b7d585b91cbad83131ea285ab74 | exercism/erlang | grade_school_tests.erl | Generated with ' v0.2.0 '
%% Revision 1 of the exercises generator was used
%% -specifications/raw/7a8722ac4546baae28b4b2c1bdae14e04fdba88c/exercises/grade-school/canonical-data.json
%% This file is automatically generated from the exercises canonical data.
-module(grade_school_tests).
-include_lib("erl_exercism/include/exercism.hrl").
-include_lib("eunit/include/eunit.hrl").
'1_roster_is_empty_when_no_student_is_added_test_'() ->
S0 = grade_school:new(),
{"Roster is empty when no student is added",
?_assertEqual([], lists:sort(grade_school:get(S0)))}.
'2_student_is_added_to_the_roster_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Aimee", 2, S0),
{"Student is added to the roster",
?_assertEqual(["Aimee"],
lists:sort(grade_school:get(S1)))}.
'3_multiple_students_in_the_same_grade_are_added_to_the_roster_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Blair", 2, S0),
S2 = grade_school:add("James", 2, S1),
S3 = grade_school:add("Paul", 2, S2),
{"Multiple students in the same grade "
"are added to the roster",
?_assertEqual(["Blair", "James", "Paul"],
lists:sort(grade_school:get(S3)))}.
'4_student_not_added_to_same_grade_in_the_roster_more_than_once_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Blair", 2, S0),
S2 = grade_school:add("James", 2, S1),
S3 = grade_school:add("James", 2, S2),
S4 = grade_school:add("Paul", 2, S3),
{"Student not added to same grade in the "
"roster more than once",
?_assertEqual(["Blair", "James", "Paul"],
lists:sort(grade_school:get(S4)))}.
'5_students_in_multiple_grades_are_added_to_the_roster_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Chelsea", 3, S0),
S2 = grade_school:add("Logan", 7, S1),
{"Students in multiple grades are added "
"to the roster",
?_assertEqual(["Chelsea", "Logan"],
lists:sort(grade_school:get(S2)))}.
'6_student_not_added_to_multiple_grades_in_the_roster_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Blair", 2, S0),
S2 = grade_school:add("James", 2, S1),
S3 = grade_school:add("James", 3, S2),
S4 = grade_school:add("Paul", 3, S3),
{"Student not added to multiple grades "
"in the roster",
?_assertEqual(["Blair", "James", "Paul"],
lists:sort(grade_school:get(S4)))}.
'7_students_are_sorted_by_grades_in_the_roster_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Jim", 3, S0),
S2 = grade_school:add("Peter", 2, S1),
S3 = grade_school:add("Anna", 1, S2),
{"Students are sorted by grades in the "
"roster",
?_assertEqual(["Anna", "Jim", "Peter"],
lists:sort(grade_school:get(S3)))}.
'8_students_are_sorted_by_name_in_the_roster_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Peter", 2, S0),
S2 = grade_school:add("Zoe", 2, S1),
S3 = grade_school:add("Alex", 2, S2),
{"Students are sorted by name in the roster",
?_assertEqual(["Alex", "Peter", "Zoe"],
lists:sort(grade_school:get(S3)))}.
'9_students_are_sorted_by_grades_and_then_by_name_in_the_roster_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Peter", 2, S0),
S2 = grade_school:add("Anna", 1, S1),
S3 = grade_school:add("Barb", 1, S2),
S4 = grade_school:add("Zoe", 2, S3),
S5 = grade_school:add("Alex", 2, S4),
S6 = grade_school:add("Jim", 3, S5),
S7 = grade_school:add("Charlie", 1, S6),
{"Students are sorted by grades and then "
"by name in the roster",
?_assertEqual(["Alex", "Anna", "Barb", "Charlie", "Jim",
"Peter", "Zoe"],
lists:sort(grade_school:get(S7)))}.
'10_grade_is_empty_if_no_students_in_the_roster_test_'() ->
S0 = grade_school:new(),
{"Grade is empty if no students in the "
"roster",
?_assertEqual([], lists:sort(grade_school:get(1, S0)))}.
'11_grade_is_empty_if_no_students_in_that_grade_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Peter", 2, S0),
S2 = grade_school:add("Zoe", 2, S1),
S3 = grade_school:add("Alex", 2, S2),
S4 = grade_school:add("Jim", 3, S3),
{"Grade is empty if no students in that "
"grade",
?_assertEqual([], lists:sort(grade_school:get(1, S4)))}.
'12_student_not_added_to_same_grade_more_than_once_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Blair", 2, S0),
S2 = grade_school:add("James", 2, S1),
S3 = grade_school:add("James", 2, S2),
S4 = grade_school:add("Paul", 2, S3),
{"Student not added to same grade more "
"than once",
?_assertEqual(["Blair", "James", "Paul"],
lists:sort(grade_school:get(2, S4)))}.
'13_student_not_added_to_multiple_grades_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Blair", 2, S0),
S2 = grade_school:add("James", 2, S1),
S3 = grade_school:add("James", 3, S2),
S4 = grade_school:add("Paul", 3, S3),
{"Student not added to multiple grades",
?_assertEqual(["Blair", "James"],
lists:sort(grade_school:get(2, S4)))}.
'14_student_not_added_to_other_grade_for_multiple_grades_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Blair", 2, S0),
S2 = grade_school:add("James", 2, S1),
S3 = grade_school:add("James", 3, S2),
S4 = grade_school:add("Paul", 3, S3),
{"Student not added to other grade for "
"multiple grades",
?_assertEqual(["Paul"],
lists:sort(grade_school:get(3, S4)))}.
'15_students_are_sorted_by_name_in_a_grade_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Franklin", 5, S0),
S2 = grade_school:add("Bradley", 5, S1),
S3 = grade_school:add("Jeff", 1, S2),
{"Students are sorted by name in a grade",
?_assertEqual(["Bradley", "Franklin"],
lists:sort(grade_school:get(5, S3)))}.
| null | https://raw.githubusercontent.com/exercism/erlang/9b3d3c14ef826e7efbc4fcd024fd20ed09332562/exercises/practice/grade-school/test/grade_school_tests.erl | erlang | Revision 1 of the exercises generator was used
-specifications/raw/7a8722ac4546baae28b4b2c1bdae14e04fdba88c/exercises/grade-school/canonical-data.json
This file is automatically generated from the exercises canonical data. | Generated with ' v0.2.0 '
-module(grade_school_tests).
-include_lib("erl_exercism/include/exercism.hrl").
-include_lib("eunit/include/eunit.hrl").
'1_roster_is_empty_when_no_student_is_added_test_'() ->
S0 = grade_school:new(),
{"Roster is empty when no student is added",
?_assertEqual([], lists:sort(grade_school:get(S0)))}.
'2_student_is_added_to_the_roster_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Aimee", 2, S0),
{"Student is added to the roster",
?_assertEqual(["Aimee"],
lists:sort(grade_school:get(S1)))}.
'3_multiple_students_in_the_same_grade_are_added_to_the_roster_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Blair", 2, S0),
S2 = grade_school:add("James", 2, S1),
S3 = grade_school:add("Paul", 2, S2),
{"Multiple students in the same grade "
"are added to the roster",
?_assertEqual(["Blair", "James", "Paul"],
lists:sort(grade_school:get(S3)))}.
'4_student_not_added_to_same_grade_in_the_roster_more_than_once_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Blair", 2, S0),
S2 = grade_school:add("James", 2, S1),
S3 = grade_school:add("James", 2, S2),
S4 = grade_school:add("Paul", 2, S3),
{"Student not added to same grade in the "
"roster more than once",
?_assertEqual(["Blair", "James", "Paul"],
lists:sort(grade_school:get(S4)))}.
'5_students_in_multiple_grades_are_added_to_the_roster_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Chelsea", 3, S0),
S2 = grade_school:add("Logan", 7, S1),
{"Students in multiple grades are added "
"to the roster",
?_assertEqual(["Chelsea", "Logan"],
lists:sort(grade_school:get(S2)))}.
'6_student_not_added_to_multiple_grades_in_the_roster_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Blair", 2, S0),
S2 = grade_school:add("James", 2, S1),
S3 = grade_school:add("James", 3, S2),
S4 = grade_school:add("Paul", 3, S3),
{"Student not added to multiple grades "
"in the roster",
?_assertEqual(["Blair", "James", "Paul"],
lists:sort(grade_school:get(S4)))}.
'7_students_are_sorted_by_grades_in_the_roster_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Jim", 3, S0),
S2 = grade_school:add("Peter", 2, S1),
S3 = grade_school:add("Anna", 1, S2),
{"Students are sorted by grades in the "
"roster",
?_assertEqual(["Anna", "Jim", "Peter"],
lists:sort(grade_school:get(S3)))}.
'8_students_are_sorted_by_name_in_the_roster_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Peter", 2, S0),
S2 = grade_school:add("Zoe", 2, S1),
S3 = grade_school:add("Alex", 2, S2),
{"Students are sorted by name in the roster",
?_assertEqual(["Alex", "Peter", "Zoe"],
lists:sort(grade_school:get(S3)))}.
'9_students_are_sorted_by_grades_and_then_by_name_in_the_roster_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Peter", 2, S0),
S2 = grade_school:add("Anna", 1, S1),
S3 = grade_school:add("Barb", 1, S2),
S4 = grade_school:add("Zoe", 2, S3),
S5 = grade_school:add("Alex", 2, S4),
S6 = grade_school:add("Jim", 3, S5),
S7 = grade_school:add("Charlie", 1, S6),
{"Students are sorted by grades and then "
"by name in the roster",
?_assertEqual(["Alex", "Anna", "Barb", "Charlie", "Jim",
"Peter", "Zoe"],
lists:sort(grade_school:get(S7)))}.
'10_grade_is_empty_if_no_students_in_the_roster_test_'() ->
S0 = grade_school:new(),
{"Grade is empty if no students in the "
"roster",
?_assertEqual([], lists:sort(grade_school:get(1, S0)))}.
'11_grade_is_empty_if_no_students_in_that_grade_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Peter", 2, S0),
S2 = grade_school:add("Zoe", 2, S1),
S3 = grade_school:add("Alex", 2, S2),
S4 = grade_school:add("Jim", 3, S3),
{"Grade is empty if no students in that "
"grade",
?_assertEqual([], lists:sort(grade_school:get(1, S4)))}.
'12_student_not_added_to_same_grade_more_than_once_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Blair", 2, S0),
S2 = grade_school:add("James", 2, S1),
S3 = grade_school:add("James", 2, S2),
S4 = grade_school:add("Paul", 2, S3),
{"Student not added to same grade more "
"than once",
?_assertEqual(["Blair", "James", "Paul"],
lists:sort(grade_school:get(2, S4)))}.
'13_student_not_added_to_multiple_grades_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Blair", 2, S0),
S2 = grade_school:add("James", 2, S1),
S3 = grade_school:add("James", 3, S2),
S4 = grade_school:add("Paul", 3, S3),
{"Student not added to multiple grades",
?_assertEqual(["Blair", "James"],
lists:sort(grade_school:get(2, S4)))}.
'14_student_not_added_to_other_grade_for_multiple_grades_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Blair", 2, S0),
S2 = grade_school:add("James", 2, S1),
S3 = grade_school:add("James", 3, S2),
S4 = grade_school:add("Paul", 3, S3),
{"Student not added to other grade for "
"multiple grades",
?_assertEqual(["Paul"],
lists:sort(grade_school:get(3, S4)))}.
'15_students_are_sorted_by_name_in_a_grade_test_'() ->
S0 = grade_school:new(),
S1 = grade_school:add("Franklin", 5, S0),
S2 = grade_school:add("Bradley", 5, S1),
S3 = grade_school:add("Jeff", 1, S2),
{"Students are sorted by name in a grade",
?_assertEqual(["Bradley", "Franklin"],
lists:sort(grade_school:get(5, S3)))}.
|
d8eb00f691b0d0ba1b797aacbafaa8ba6e0238d3548032c3a9548369cba79445 | janestreet/memtrace_viewer_with_deps | weak_pointer.ml | (* We implement a weak pointer using a [Weak_array.t]. *)
open! Core_kernel
type 'a t = 'a Weak_array.t
let create () = Weak_array.create ~len:1
We use a weak array of length 1 , so the weak pointer is at index 0 .
let index = 0
let get t = Weak_array.get t index
let sexp_of_t sexp_of_a t = [%sexp (get t : a Heap_block.t option)]
let is_none t = Weak_array.is_none t index
let is_some t = Weak_array.is_some t index
let set t block = Weak_array.set t index (Some block)
| null | https://raw.githubusercontent.com/janestreet/memtrace_viewer_with_deps/5a9e1f927f5f8333e2d71c8d3ca03a45587422c4/vendor/core_kernel/weak_pointer/src/weak_pointer.ml | ocaml | We implement a weak pointer using a [Weak_array.t]. |
open! Core_kernel
type 'a t = 'a Weak_array.t
let create () = Weak_array.create ~len:1
We use a weak array of length 1 , so the weak pointer is at index 0 .
let index = 0
let get t = Weak_array.get t index
let sexp_of_t sexp_of_a t = [%sexp (get t : a Heap_block.t option)]
let is_none t = Weak_array.is_none t index
let is_some t = Weak_array.is_some t index
let set t block = Weak_array.set t index (Some block)
|
339d662e5158c8a64310d7f22881ba889a39f568ed8829ac5194f84c54a45b65 | gfour/gic | exmh20.hs | result = apply four 3;
apply f x = f x;
four y = w y;
z y1 = y1 * y1;
w y2 = z y2 * z y2
| null | https://raw.githubusercontent.com/gfour/gic/d5f2e506b31a1a28e02ca54af9610b3d8d618e9a/Examples/Num/exmh20.hs | haskell | result = apply four 3;
apply f x = f x;
four y = w y;
z y1 = y1 * y1;
w y2 = z y2 * z y2
| |
126b6d186dc96e5d0f027313815125fbc396726bd8955c627c5fe2725a14bf6f | ocaml-opam/opam-compiler | main.ml | let () = Opam_compiler.Cli.main ()
| null | https://raw.githubusercontent.com/ocaml-opam/opam-compiler/5fa28d623f06d8be6856ac167a11fbf347caef19/bin/main.ml | ocaml | let () = Opam_compiler.Cli.main ()
| |
d7af35b021fac10ea8b623eb8f3fd624aa898b9475af56b0a4546051eda6a817 | senapk/funcional_arcade | solver.hs | concatmap :: (a -> [b]) -> [a] -> [b]
concatmap fn xs = foldl (++) [] (map fn xs)
| null | https://raw.githubusercontent.com/senapk/funcional_arcade/70fa04b4799d5a8c7e5add39d9f217f38f418600/base/067/solver.hs | haskell | concatmap :: (a -> [b]) -> [a] -> [b]
concatmap fn xs = foldl (++) [] (map fn xs)
| |
e2da88043a36bf6a0807827e287257ad2260dee708bf653d80c4f47bb2aed122 | con-kitty/categorifier | Client.hs | # LANGUAGE TemplateHaskell #
# LANGUAGE TypeFamilies #
# OPTIONS_GHC -Wno - orphans #
module Categorifier.LinearBase.Client
( HasRep (..),
deriveHasRep,
)
where
import Categorifier.Client (HasRep (..), deriveHasRep)
import Control.Functor.Linear (Data, ReaderT, StateT)
import Control.Optics.Linear (Optic_)
import Data.Arity.Linear (Peano)
import Data.Array.Destination (DArray)
import qualified Data.Array.Polarized.Pull as Pull
import Data.HashMap.Mutable.Linear (HashMap)
import Data.Monoid.Linear (Endo, NonLinear)
import Data.Num.Linear (Adding, Multiplying)
import Data.Profunctor.Kleisli.Linear (CoKleisli, Kleisli)
import Data.Profunctor.Linear (Exchange, Market)
import Data.Replicator.Linear (Replicator)
import Data.Set.Mutable.Linear (Set)
import Data.Unrestricted.Linear (AsMovable, MovableMonoid, Ur, UrT)
import Data.V.Linear (V)
import Data.Vector.Mutable.Linear (Vector)
import Foreign.Marshal.Pure (Box, Pool)
import Prelude.Linear.Generically (Generically, Generically1)
import Streaming.Linear (Of, Stream)
import Streaming.Prelude.Linear (Either3)
import System.IO.Resource.Linear (Handle, RIO, UnsafeResource)
deriveHasRep ''Adding
deriveHasRep ''AsMovable
deriveHasRep ''Box
deriveHasRep ''CoKleisli
deriveHasRep ''DArray
deriveHasRep ''Data
deriveHasRep ''Either3
deriveHasRep ''Endo
deriveHasRep ''Exchange
deriveHasRep ''Generically
deriveHasRep ''Generically1
deriveHasRep ''Handle
deriveHasRep ''HashMap
deriveHasRep ''Kleisli
deriveHasRep ''Market
deriveHasRep ''MovableMonoid
deriveHasRep ''Multiplying
deriveHasRep ''NonLinear
deriveHasRep ''Of
deriveHasRep ''Optic_
deriveHasRep ''Peano
deriveHasRep ''Pool
deriveHasRep ''Pull.Array
deriveHasRep ''RIO
deriveHasRep ''ReaderT
deriveHasRep ''Replicator
deriveHasRep ''Set
deriveHasRep ''StateT
deriveHasRep ''Stream
deriveHasRep ''UnsafeResource
deriveHasRep ''Ur
deriveHasRep ''UrT
deriveHasRep ''V
deriveHasRep ''Vector
| null | https://raw.githubusercontent.com/con-kitty/categorifier/d8dc1106c4600c2168889519d2c3f843db2e9410/integrations/linear-base/integration/Categorifier/LinearBase/Client.hs | haskell | # LANGUAGE TemplateHaskell #
# LANGUAGE TypeFamilies #
# OPTIONS_GHC -Wno - orphans #
module Categorifier.LinearBase.Client
( HasRep (..),
deriveHasRep,
)
where
import Categorifier.Client (HasRep (..), deriveHasRep)
import Control.Functor.Linear (Data, ReaderT, StateT)
import Control.Optics.Linear (Optic_)
import Data.Arity.Linear (Peano)
import Data.Array.Destination (DArray)
import qualified Data.Array.Polarized.Pull as Pull
import Data.HashMap.Mutable.Linear (HashMap)
import Data.Monoid.Linear (Endo, NonLinear)
import Data.Num.Linear (Adding, Multiplying)
import Data.Profunctor.Kleisli.Linear (CoKleisli, Kleisli)
import Data.Profunctor.Linear (Exchange, Market)
import Data.Replicator.Linear (Replicator)
import Data.Set.Mutable.Linear (Set)
import Data.Unrestricted.Linear (AsMovable, MovableMonoid, Ur, UrT)
import Data.V.Linear (V)
import Data.Vector.Mutable.Linear (Vector)
import Foreign.Marshal.Pure (Box, Pool)
import Prelude.Linear.Generically (Generically, Generically1)
import Streaming.Linear (Of, Stream)
import Streaming.Prelude.Linear (Either3)
import System.IO.Resource.Linear (Handle, RIO, UnsafeResource)
deriveHasRep ''Adding
deriveHasRep ''AsMovable
deriveHasRep ''Box
deriveHasRep ''CoKleisli
deriveHasRep ''DArray
deriveHasRep ''Data
deriveHasRep ''Either3
deriveHasRep ''Endo
deriveHasRep ''Exchange
deriveHasRep ''Generically
deriveHasRep ''Generically1
deriveHasRep ''Handle
deriveHasRep ''HashMap
deriveHasRep ''Kleisli
deriveHasRep ''Market
deriveHasRep ''MovableMonoid
deriveHasRep ''Multiplying
deriveHasRep ''NonLinear
deriveHasRep ''Of
deriveHasRep ''Optic_
deriveHasRep ''Peano
deriveHasRep ''Pool
deriveHasRep ''Pull.Array
deriveHasRep ''RIO
deriveHasRep ''ReaderT
deriveHasRep ''Replicator
deriveHasRep ''Set
deriveHasRep ''StateT
deriveHasRep ''Stream
deriveHasRep ''UnsafeResource
deriveHasRep ''Ur
deriveHasRep ''UrT
deriveHasRep ''V
deriveHasRep ''Vector
| |
a0293339f1b0341111e72d7176ad2308f62f271219e2ec5178dba726b6bf7294 | well-typed/cborg | PkgAesonGeneric.hs | # OPTIONS_GHC -fno - warn - orphans #
module Micro.PkgAesonGeneric where
import Micro.Types
import Data.Aeson as Aeson
import Data.ByteString.Lazy as BS
import Data.Maybe
serialise :: Tree -> BS.ByteString
serialise = Aeson.encode
deserialise :: BS.ByteString -> Tree
deserialise = fromJust . Aeson.decode'
instance ToJSON Tree
instance FromJSON Tree
| null | https://raw.githubusercontent.com/well-typed/cborg/9be3fd5437f9d2ec1df784d5d939efb9a85fd1fb/serialise/bench/micro/Micro/PkgAesonGeneric.hs | haskell | # OPTIONS_GHC -fno - warn - orphans #
module Micro.PkgAesonGeneric where
import Micro.Types
import Data.Aeson as Aeson
import Data.ByteString.Lazy as BS
import Data.Maybe
serialise :: Tree -> BS.ByteString
serialise = Aeson.encode
deserialise :: BS.ByteString -> Tree
deserialise = fromJust . Aeson.decode'
instance ToJSON Tree
instance FromJSON Tree
| |
684d960dc3841a1136ea74606e7f51d845b720716cc19c6502a3ddb483a1d715 | janestreet/hardcaml_circuits | rac.mli | * ROM - accumulator .
Evaluate [ a0.x0 + a1.x1 + ... + an.xn ] where the [ ai ] are constants , using
distributed arithmetic . The architecture uses a rom and add / shift circuit and
requires no multipliers .
The ROM - accumulator extends the idea of multiplication by adding and shifting .
[ a0.x0 ] can be the calculated by testing each bit of [ x ] and adding [ a ] to the shifted
accumulator . Similarly [ a0.x0 + a1.x1 ] can be calculated by forming an address vector
from each successive bit , [ b ] , of and x1 , i.e. :
[ [ x1.[b ] ; x0.[b ] ] ]
A pre - calculated rom stores all possibile additions of a0 and a1
[ [ 0 ; a0 ; a1 ; a0+a1 ] ]
Given n coefficients the required rom will be of size 2^n .
The address is looked up in the rom and added to ( or subtracted from ) the shifted
accumulator .
Evaluate [ a0.x0 + a1.x1 + ... + an.xn ] where the [ai] are constants, using
distributed arithmetic. The architecture uses a rom and add/shift circuit and
requires no multipliers.
The ROM-accumulator extends the idea of multiplication by adding and shifting.
[a0.x0] can be the calculated by testing each bit of [x] and adding [a] to the shifted
accumulator. Similarly [a0.x0 + a1.x1] can be calculated by forming an address vector
from each successive bit, [b], of x0 and x1, i.e.:
[[x1.[b]; x0.[b]]]
A pre-calculated rom stores all possibile additions of a0 and a1
[[ 0; a0; a1; a0+a1 ]]
Given n coefficients the required rom will be of size 2^n.
The address is looked up in the rom and added to (or subtracted from) the shifted
accumulator. *)
open Base
open Hardcaml
module Mode : sig
* In [ integer ] mode the coefficients and accumulator are treated as integers , the
internal shift registers shift msb first , and the accumulator shifts to the left .
This in turn specifies an exact result , so long as the accumulator is large enough
to hold it .
In fixed mode the coefficients and accumulator are treated as fixed point values ,
the shift regiters shift lsb first and the accumulator shifts to the right .
internal shift registers shift msb first, and the accumulator shifts to the left.
This in turn specifies an exact result, so long as the accumulator is large enough
to hold it.
In fixed mode the coefficients and accumulator are treated as fixed point values,
the shift regiters shift lsb first and the accumulator shifts to the right. *)
type t =
| Fixed
| Integer
[@@deriving sexp_of]
end
module type Config = sig
val mode : Mode.t
(** Width of the accumulator. *)
val accumulator_bits : int
(** Width of input data. *)
val data_bits : int
(** Number of coefficients. *)
val num_coefs : int
(** Extra least significant bits added to the accumulator. This can add extra
precision without extending the rom size. *)
val rom_shift : int
end
module Make (Config : Config) : sig
module I : sig
type 'a t =
{ clk : 'a
; clr : 'a
; en : 'a
; ld : 'a (* Load input data to internal shift registers *)
High on the msb ( if input data is signed ) .
; x : 'a array
}
[@@deriving sexp_of, hardcaml]
end
module O : sig
type 'a t = { q : 'a } [@@deriving sexp_of, hardcaml]
end
(** Create the Rom-accumulator. *)
val create : coefs:Bits.t array -> Signal.t I.t -> Signal.t O.t
end
| null | https://raw.githubusercontent.com/janestreet/hardcaml_circuits/a2c2d1ea3e6957c3cda4767d519e94c20f1172b2/src/rac.mli | ocaml | * Width of the accumulator.
* Width of input data.
* Number of coefficients.
* Extra least significant bits added to the accumulator. This can add extra
precision without extending the rom size.
Load input data to internal shift registers
* Create the Rom-accumulator. | * ROM - accumulator .
Evaluate [ a0.x0 + a1.x1 + ... + an.xn ] where the [ ai ] are constants , using
distributed arithmetic . The architecture uses a rom and add / shift circuit and
requires no multipliers .
The ROM - accumulator extends the idea of multiplication by adding and shifting .
[ a0.x0 ] can be the calculated by testing each bit of [ x ] and adding [ a ] to the shifted
accumulator . Similarly [ a0.x0 + a1.x1 ] can be calculated by forming an address vector
from each successive bit , [ b ] , of and x1 , i.e. :
[ [ x1.[b ] ; x0.[b ] ] ]
A pre - calculated rom stores all possibile additions of a0 and a1
[ [ 0 ; a0 ; a1 ; a0+a1 ] ]
Given n coefficients the required rom will be of size 2^n .
The address is looked up in the rom and added to ( or subtracted from ) the shifted
accumulator .
Evaluate [ a0.x0 + a1.x1 + ... + an.xn ] where the [ai] are constants, using
distributed arithmetic. The architecture uses a rom and add/shift circuit and
requires no multipliers.
The ROM-accumulator extends the idea of multiplication by adding and shifting.
[a0.x0] can be the calculated by testing each bit of [x] and adding [a] to the shifted
accumulator. Similarly [a0.x0 + a1.x1] can be calculated by forming an address vector
from each successive bit, [b], of x0 and x1, i.e.:
[[x1.[b]; x0.[b]]]
A pre-calculated rom stores all possibile additions of a0 and a1
[[ 0; a0; a1; a0+a1 ]]
Given n coefficients the required rom will be of size 2^n.
The address is looked up in the rom and added to (or subtracted from) the shifted
accumulator. *)
open Base
open Hardcaml
module Mode : sig
* In [ integer ] mode the coefficients and accumulator are treated as integers , the
internal shift registers shift msb first , and the accumulator shifts to the left .
This in turn specifies an exact result , so long as the accumulator is large enough
to hold it .
In fixed mode the coefficients and accumulator are treated as fixed point values ,
the shift regiters shift lsb first and the accumulator shifts to the right .
internal shift registers shift msb first, and the accumulator shifts to the left.
This in turn specifies an exact result, so long as the accumulator is large enough
to hold it.
In fixed mode the coefficients and accumulator are treated as fixed point values,
the shift regiters shift lsb first and the accumulator shifts to the right. *)
type t =
| Fixed
| Integer
[@@deriving sexp_of]
end
module type Config = sig
val mode : Mode.t
val accumulator_bits : int
val data_bits : int
val num_coefs : int
val rom_shift : int
end
module Make (Config : Config) : sig
module I : sig
type 'a t =
{ clk : 'a
; clr : 'a
; en : 'a
High on the msb ( if input data is signed ) .
; x : 'a array
}
[@@deriving sexp_of, hardcaml]
end
module O : sig
type 'a t = { q : 'a } [@@deriving sexp_of, hardcaml]
end
val create : coefs:Bits.t array -> Signal.t I.t -> Signal.t O.t
end
|
62e140aa01f0b2924b26848467e39b260e286a748617347474110608772bffda | nuvla/api-server | configuration_session_mitreid_token.clj | (ns sixsq.nuvla.server.resources.configuration-session-mitreid-token
(:require
[sixsq.nuvla.server.resources.common.std-crud :as std-crud]
[sixsq.nuvla.server.resources.common.utils :as u]
[sixsq.nuvla.server.resources.configuration :as p]
[sixsq.nuvla.server.resources.spec.configuration-template-session-mitreid-token :as cts-mitreid-token]))
(def ^:const service "session-mitreid-token")
;;
;; multimethods for validation
;;
(def validate-fn (u/create-spec-validation-fn ::cts-mitreid-token/schema))
(defmethod p/validate-subtype service
[resource]
(validate-fn resource))
(def create-validate-fn (u/create-spec-validation-fn ::cts-mitreid-token/schema-create))
(defmethod p/create-validate-subtype service
[resource]
(create-validate-fn resource))
;;
;; initialization
;;
(defn initialize
[]
(std-crud/initialize p/resource-type ::cts-mitreid-token/schema))
| null | https://raw.githubusercontent.com/nuvla/api-server/a64a61b227733f1a0a945003edf5abaf5150a15c/code/src/sixsq/nuvla/server/resources/configuration_session_mitreid_token.clj | clojure |
multimethods for validation
initialization
| (ns sixsq.nuvla.server.resources.configuration-session-mitreid-token
(:require
[sixsq.nuvla.server.resources.common.std-crud :as std-crud]
[sixsq.nuvla.server.resources.common.utils :as u]
[sixsq.nuvla.server.resources.configuration :as p]
[sixsq.nuvla.server.resources.spec.configuration-template-session-mitreid-token :as cts-mitreid-token]))
(def ^:const service "session-mitreid-token")
(def validate-fn (u/create-spec-validation-fn ::cts-mitreid-token/schema))
(defmethod p/validate-subtype service
[resource]
(validate-fn resource))
(def create-validate-fn (u/create-spec-validation-fn ::cts-mitreid-token/schema-create))
(defmethod p/create-validate-subtype service
[resource]
(create-validate-fn resource))
(defn initialize
[]
(std-crud/initialize p/resource-type ::cts-mitreid-token/schema))
|
0ccbb9c58abdfab966e6d32723a0ed44bb3ccde445b4b20ebe35cd6bdbc930e5 | juxt/site | nrepl.clj | Copyright © 2021 , JUXT LTD .
(ns juxt.site.alpha.nrepl
(:require
[clojure.tools.logging :as log]
[integrant.core :as ig]
[nrepl.server :refer [start-server stop-server]]))
(alias 'site (create-ns 'juxt.site.alpha))
(defmethod ig/init-key ::server [_ {::site/keys [port] :as opts}]
(log/infof "Starting nREPL server on port %d" port)
(start-server :port port))
(defmethod ig/halt-key! ::server [_ server]
(when server
(log/info "Stopping nREPL server")
(stop-server server)))
| null | https://raw.githubusercontent.com/juxt/site/f45326476e9793e812839f1f0a465e11d29e2bb4/src/juxt/site/alpha/nrepl.clj | clojure | Copyright © 2021 , JUXT LTD .
(ns juxt.site.alpha.nrepl
(:require
[clojure.tools.logging :as log]
[integrant.core :as ig]
[nrepl.server :refer [start-server stop-server]]))
(alias 'site (create-ns 'juxt.site.alpha))
(defmethod ig/init-key ::server [_ {::site/keys [port] :as opts}]
(log/infof "Starting nREPL server on port %d" port)
(start-server :port port))
(defmethod ig/halt-key! ::server [_ server]
(when server
(log/info "Stopping nREPL server")
(stop-server server)))
| |
cef8ecd378842e9cb4b2a482c429886b6725c7e0ec8289677916268ed9e34e96 | racket/rhombus-prototype | entry-point-macro.rkt | #lang racket/base
(require (for-syntax racket/base
syntax/parse/pre)
"space-provide.rkt"
"entry-point.rkt"
"space.rkt"
"name-root.rkt")
(define+provide-space entry_point rhombus/entry_point
#:fields
(macro))
(define-syntax macro
(lambda (stx)
(raise-syntax-error #f "not supported, yet" stx)))
| null | https://raw.githubusercontent.com/racket/rhombus-prototype/4e66c1361bdde51c2df9332644800baead49e86f/rhombus/private/entry-point-macro.rkt | racket | #lang racket/base
(require (for-syntax racket/base
syntax/parse/pre)
"space-provide.rkt"
"entry-point.rkt"
"space.rkt"
"name-root.rkt")
(define+provide-space entry_point rhombus/entry_point
#:fields
(macro))
(define-syntax macro
(lambda (stx)
(raise-syntax-error #f "not supported, yet" stx)))
| |
de4dc5644ab295a6cba808fccc9d7d97c71d0a29d36f8f5d22e54b6aa3335e4f | pallet/pallet | plugin.clj | (ns pallet.plugin (:require
[chiba.plugin :refer [plugins]]))
(defn load-plugins
"Load pallet plugins"
[]
(let [plugin-namespaces (plugins "pallet.plugin." #".*test.*")]
(doseq [plugin plugin-namespaces]
(require plugin)
(when-let [init (ns-resolve plugin 'init)]
(init)))
plugin-namespaces))
| null | https://raw.githubusercontent.com/pallet/pallet/30226008d243c1072dcfa1f27150173d6d71c36d/src/pallet/plugin.clj | clojure | (ns pallet.plugin (:require
[chiba.plugin :refer [plugins]]))
(defn load-plugins
"Load pallet plugins"
[]
(let [plugin-namespaces (plugins "pallet.plugin." #".*test.*")]
(doseq [plugin plugin-namespaces]
(require plugin)
(when-let [init (ns-resolve plugin 'init)]
(init)))
plugin-namespaces))
| |
380f581c20c4b0edb2c8f7b33788f318001c9ed87c2350a31b26414f97290c84 | haskus/haskus-system | Devices.hs | {-# LANGUAGE OverloadedStrings #-}
module Haskus.Tests.System.Devices
( testsDevices
)
where
import Test.Tasty
import Test.Tasty.QuickCheck as QC
import Test.QuickCheck.Monadic
import Haskus.System.Devices
import Data.Maybe
import qualified Data.Map as Map
import Control.Concurrent.STM
treeRoot :: IO DeviceTree
treeRoot = deviceTreeCreate Nothing Nothing Map.empty
treeXYZ :: IO DeviceTree
treeXYZ = deviceTreeCreate (Just "XYZ") Nothing Map.empty
treeABC :: IO DeviceTree
treeABC = deviceTreeCreate (Just "ABC") Nothing Map.empty
testsDevices :: TestTree
testsDevices = testGroup "Device tree"
[ testProperty "Insert/lookup" $ monadicIO $ do
let path = "/devices/xyz"
tree <- run $ do
s <- deviceTreeInsert path <$> treeXYZ <*> treeRoot
atomically s
let xyz = deviceTreeLookup path tree
assert (isJust xyz)
assert (deviceNodeSubsystem (fromJust xyz) == Just "XYZ")
, testProperty "Insert/remove" $ monadicIO $ do
let path = "/devices/xyz"
tree <- run $ do
s <- deviceTreeInsert path <$> treeXYZ <*> treeRoot
atomically s
let xyz = deviceTreeLookup path (deviceTreeRemove path tree)
assert (isNothing xyz)
, testProperty "Insert/lookup hierarchy" $ monadicIO $ do
let path0 = "/devices/xyz"
let path1 = "/devices/xyz/abc"
tree <- run $ do
xyz <- treeXYZ
abc <- treeABC
root <- treeRoot
atomically $ do
t1 <- deviceTreeInsert path0 xyz root
deviceTreeInsert path1 abc t1
let abc = deviceTreeLookup path1 tree
assert (isJust abc)
assert (deviceNodeSubsystem (fromJust abc) == Just "ABC")
]
| null | https://raw.githubusercontent.com/haskus/haskus-system/38b3a363c26bc4d82e3493d8638d46bc35678616/haskus-system/src/tests/Haskus/Tests/System/Devices.hs | haskell | # LANGUAGE OverloadedStrings # |
module Haskus.Tests.System.Devices
( testsDevices
)
where
import Test.Tasty
import Test.Tasty.QuickCheck as QC
import Test.QuickCheck.Monadic
import Haskus.System.Devices
import Data.Maybe
import qualified Data.Map as Map
import Control.Concurrent.STM
treeRoot :: IO DeviceTree
treeRoot = deviceTreeCreate Nothing Nothing Map.empty
treeXYZ :: IO DeviceTree
treeXYZ = deviceTreeCreate (Just "XYZ") Nothing Map.empty
treeABC :: IO DeviceTree
treeABC = deviceTreeCreate (Just "ABC") Nothing Map.empty
testsDevices :: TestTree
testsDevices = testGroup "Device tree"
[ testProperty "Insert/lookup" $ monadicIO $ do
let path = "/devices/xyz"
tree <- run $ do
s <- deviceTreeInsert path <$> treeXYZ <*> treeRoot
atomically s
let xyz = deviceTreeLookup path tree
assert (isJust xyz)
assert (deviceNodeSubsystem (fromJust xyz) == Just "XYZ")
, testProperty "Insert/remove" $ monadicIO $ do
let path = "/devices/xyz"
tree <- run $ do
s <- deviceTreeInsert path <$> treeXYZ <*> treeRoot
atomically s
let xyz = deviceTreeLookup path (deviceTreeRemove path tree)
assert (isNothing xyz)
, testProperty "Insert/lookup hierarchy" $ monadicIO $ do
let path0 = "/devices/xyz"
let path1 = "/devices/xyz/abc"
tree <- run $ do
xyz <- treeXYZ
abc <- treeABC
root <- treeRoot
atomically $ do
t1 <- deviceTreeInsert path0 xyz root
deviceTreeInsert path1 abc t1
let abc = deviceTreeLookup path1 tree
assert (isJust abc)
assert (deviceNodeSubsystem (fromJust abc) == Just "ABC")
]
|
4ace5f7c04d6251fcbba54dd187534b81ef5f83f4ce9a802c4c79103d5face21 | Opetushallitus/ataru | lomake.cljs | (ns ataru.virkailija.dev.lomake)
(def translations {:translations #js {}})
(def controller {:controller
(clj->js {:getCustomComponentTypeMapping (fn [] #js [])
:componentDidMount (fn [field value])
:createCustomComponent (fn [props])})})
(def text-field {:id "test-text-field"
:fieldType "textField"
:fieldClass "formField"
:label {:fi "Suomi"
:sv "Ruotsi"
:en "Englanti"}})
(def lomake-1 {:form {:content [text-field]}})
(defn field [field]
{:field field
:fieldType (:fieldType field)})
(def placeholder-content
{:content
[{:fieldClass "wrapperElement"
:id "applicant-fieldset"
:children
[{:fieldClass "formField"
:helpText
{:fi "Yhteyshenkilöllä tarkoitetaan hankkeen vastuuhenkilöä."
:sv "Med kontaktperson avses den projektansvariga i sökandeorganisationen."}
:label {:fi "Sukunimi", :sv "Efternamn"}
:id "applicant-firstname"
:validators ["required"]
:fieldType "textField"}
{:fieldClass "formField"
:label {:fi "Etunimi", :sv "Förnamn"}
:id "applicant-surname"
:validators ["required"]
:fieldType "textField"}]}]})
| null | https://raw.githubusercontent.com/Opetushallitus/ataru/2d8ef1d3f972621e301a3818567d4e11219d2e82/src/cljs/ataru/virkailija/dev/lomake.cljs | clojure | (ns ataru.virkailija.dev.lomake)
(def translations {:translations #js {}})
(def controller {:controller
(clj->js {:getCustomComponentTypeMapping (fn [] #js [])
:componentDidMount (fn [field value])
:createCustomComponent (fn [props])})})
(def text-field {:id "test-text-field"
:fieldType "textField"
:fieldClass "formField"
:label {:fi "Suomi"
:sv "Ruotsi"
:en "Englanti"}})
(def lomake-1 {:form {:content [text-field]}})
(defn field [field]
{:field field
:fieldType (:fieldType field)})
(def placeholder-content
{:content
[{:fieldClass "wrapperElement"
:id "applicant-fieldset"
:children
[{:fieldClass "formField"
:helpText
{:fi "Yhteyshenkilöllä tarkoitetaan hankkeen vastuuhenkilöä."
:sv "Med kontaktperson avses den projektansvariga i sökandeorganisationen."}
:label {:fi "Sukunimi", :sv "Efternamn"}
:id "applicant-firstname"
:validators ["required"]
:fieldType "textField"}
{:fieldClass "formField"
:label {:fi "Etunimi", :sv "Förnamn"}
:id "applicant-surname"
:validators ["required"]
:fieldType "textField"}]}]})
| |
3583df8b0c7fc3853a3eaa760f68dd33d597349ad4c2759a0b5cf6fdace1361f | 5HT/ant | Opaque.ml |
exception Type_error;
type opaque 'a =
{
data : !'b . 'b;
type_info : type_info 'a
}
and type_info 'a =
{
name : string;
apply : opaque 'a -> 'a -> 'a;
compare : opaque 'a -> opaque 'a -> bool;
unify : opaque 'a -> opaque 'a -> bool
};
value type_name x = x.type_info.name;
value same_type x y = (x.type_info == y.type_info);
value apply x y = x.type_info.apply x y;
value compare x y = x.type_info.compare x y;
value unify x y = x.type_info.unify x y;
value declare_type name apply cmp unify = do
{
let rec wrap x =
{
data = Obj.magic x;
type_info = ti
}
and unwrap x = do
{
if x.type_info == ti then
x.data
else
raise Type_error
}
and ti =
{
name = name;
apply = (fun x y -> apply (unwrap x) y);
compare = (fun x y -> cmp (unwrap x) (unwrap y));
unify = (fun x y -> unify (unwrap x) (unwrap y))
};
(wrap, unwrap)
};
| null | https://raw.githubusercontent.com/5HT/ant/6acf51f4c4ebcc06c52c595776e0293cfa2f1da4/VM/Opaque.ml | ocaml |
exception Type_error;
type opaque 'a =
{
data : !'b . 'b;
type_info : type_info 'a
}
and type_info 'a =
{
name : string;
apply : opaque 'a -> 'a -> 'a;
compare : opaque 'a -> opaque 'a -> bool;
unify : opaque 'a -> opaque 'a -> bool
};
value type_name x = x.type_info.name;
value same_type x y = (x.type_info == y.type_info);
value apply x y = x.type_info.apply x y;
value compare x y = x.type_info.compare x y;
value unify x y = x.type_info.unify x y;
value declare_type name apply cmp unify = do
{
let rec wrap x =
{
data = Obj.magic x;
type_info = ti
}
and unwrap x = do
{
if x.type_info == ti then
x.data
else
raise Type_error
}
and ti =
{
name = name;
apply = (fun x y -> apply (unwrap x) y);
compare = (fun x y -> cmp (unwrap x) (unwrap y));
unify = (fun x y -> unify (unwrap x) (unwrap y))
};
(wrap, unwrap)
};
| |
9d1ddf449c5f69be08ec74b5a984b8573f2aef42542bce1615e24c2e9e6eaf8f | FundingCircle/jackdaw | edn.clj | (ns jackdaw.serdes.edn
"DEPRECATION NOTICE:
This namespace is deprecated. Please use jackdaw.serdes/edn-serde.
The behavior of the new EDN serde is different. It does not print
the newline.
Implements an EDN SerDes (Serializer/Deserializer)."
{:license "BSD 3-Clause License <>"}
(:require [clojure.edn]
[jackdaw.serdes.fn :as jsfn])
(:import java.nio.charset.StandardCharsets
org.apache.kafka.common.serialization.Serdes))
(set! *warn-on-reflection* true)
(defn to-bytes
"Converts a string to a byte array."
[data]
(.getBytes ^String data StandardCharsets/UTF_8))
(defn from-bytes
"Converts a byte array to a string."
[^bytes data]
(String. data StandardCharsets/UTF_8))
(defn serializer
"Returns an EDN serializer."
[]
(jsfn/new-serializer {:serialize (fn [_ _ data]
(when data
(to-bytes
(binding [*print-length* false
*print-level* false]
(prn-str data)))))}))
(defn deserializer
"Returns an EDN deserializer."
([]
(deserializer {}))
([opts]
(let [opts (into {} opts)]
(jsfn/new-deserializer {:deserialize (fn [_ _ data]
(->> (from-bytes data)
(clojure.edn/read-string opts)))}))))
(defn serde
"Returns an EDN serde."
[& [opts]]
(Serdes/serdeFrom (serializer) (deserializer opts)))
| null | https://raw.githubusercontent.com/FundingCircle/jackdaw/e0c66d386277282219e070cfbd0fe2ffa3c9dca5/src/jackdaw/serdes/edn.clj | clojure | (ns jackdaw.serdes.edn
"DEPRECATION NOTICE:
This namespace is deprecated. Please use jackdaw.serdes/edn-serde.
The behavior of the new EDN serde is different. It does not print
the newline.
Implements an EDN SerDes (Serializer/Deserializer)."
{:license "BSD 3-Clause License <>"}
(:require [clojure.edn]
[jackdaw.serdes.fn :as jsfn])
(:import java.nio.charset.StandardCharsets
org.apache.kafka.common.serialization.Serdes))
(set! *warn-on-reflection* true)
(defn to-bytes
"Converts a string to a byte array."
[data]
(.getBytes ^String data StandardCharsets/UTF_8))
(defn from-bytes
"Converts a byte array to a string."
[^bytes data]
(String. data StandardCharsets/UTF_8))
(defn serializer
"Returns an EDN serializer."
[]
(jsfn/new-serializer {:serialize (fn [_ _ data]
(when data
(to-bytes
(binding [*print-length* false
*print-level* false]
(prn-str data)))))}))
(defn deserializer
"Returns an EDN deserializer."
([]
(deserializer {}))
([opts]
(let [opts (into {} opts)]
(jsfn/new-deserializer {:deserialize (fn [_ _ data]
(->> (from-bytes data)
(clojure.edn/read-string opts)))}))))
(defn serde
"Returns an EDN serde."
[& [opts]]
(Serdes/serdeFrom (serializer) (deserializer opts)))
| |
66b6d2f6be95989d1ad4cdb1d5edc5825d32eac02cc87b418936a02d1b86655d | emqx/emqx | emqx_pqueue_SUITE.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2018 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_pqueue_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-define(PQ, emqx_pqueue).
-define(SUITE, ?MODULE).
all() -> emqx_common_test_helpers:all(?SUITE).
t_is_queue(_) ->
Q = ?PQ:new(),
?assertEqual(true, ?PQ:is_queue(Q)),
Q1 = ?PQ:in(a, 1, Q),
?assertEqual(true, ?PQ:is_queue(Q1)),
?assertEqual(false, ?PQ:is_queue(bad_queue)).
t_is_empty(_) ->
Q = ?PQ:new(),
?assertEqual(true, ?PQ:is_empty(Q)),
?assertEqual(false, ?PQ:is_empty(?PQ:in(a, Q))).
t_len(_) ->
Q = ?PQ:new(),
Q1 = ?PQ:in(a, Q),
?assertEqual(1, ?PQ:len(Q1)),
Q2 = ?PQ:in(b, 1, Q1),
?assertEqual(2, ?PQ:len(Q2)).
t_plen(_) ->
Q = ?PQ:new(),
Q1 = ?PQ:in(a, Q),
?assertEqual(1, ?PQ:plen(0, Q1)),
?assertEqual(0, ?PQ:plen(1, Q1)),
Q2 = ?PQ:in(b, 1, Q1),
Q3 = ?PQ:in(c, 1, Q2),
?assertEqual(2, ?PQ:plen(1, Q3)),
?assertEqual(1, ?PQ:plen(0, Q3)),
?assertEqual(0, ?PQ:plen(0, {pqueue, []})).
t_to_list(_) ->
Q = ?PQ:new(),
?assertEqual([], ?PQ:to_list(Q)),
Q1 = ?PQ:in(a, Q),
L1 = ?PQ:to_list(Q1),
?assertEqual([{0, a}], L1),
Q2 = ?PQ:in(b, 1, Q1),
L2 = ?PQ:to_list(Q2),
?assertEqual([{1, b}, {0, a}], L2).
t_from_list(_) ->
Q = ?PQ:from_list([{1, c}, {1, d}, {0, a}, {0, b}]),
?assertEqual({pqueue, [{-1, {queue, [d], [c], 2}}, {0, {queue, [b], [a], 2}}]}, Q),
?assertEqual(true, ?PQ:is_queue(Q)),
?assertEqual(4, ?PQ:len(Q)).
t_in(_) ->
Q = ?PQ:new(),
Els = [a, b, {c, 1}, {d, 1}, {e, infinity}, {f, 2}],
Q1 = lists:foldl(
fun
({El, P}, Acc) ->
?PQ:in(El, P, Acc);
(El, Acc) ->
?PQ:in(El, Acc)
end,
Q,
Els
),
?assertEqual(
{pqueue, [
{infinity, {queue, [e], [], 1}},
{-2, {queue, [f], [], 1}},
{-1, {queue, [d], [c], 2}},
{0, {queue, [b], [a], 2}}
]},
Q1
).
t_out(_) ->
Q = ?PQ:new(),
{empty, Q} = ?PQ:out(Q),
{empty, Q} = ?PQ:out(0, Q),
try ?PQ:out(1, Q) of
_ -> ct:fail(should_throw_error)
catch
error:Reason ->
?assertEqual(Reason, badarg)
end,
{{value, a}, Q} = ?PQ:out(?PQ:from_list([{0, a}])),
{{value, a}, {queue, [], [b], 1}} = ?PQ:out(?PQ:from_list([{0, a}, {0, b}])),
{{value, a}, {queue, [], [], 0}} = ?PQ:out({queue, [], [a], 1}),
{{value, a}, {queue, [c], [b], 2}} = ?PQ:out({queue, [c, b], [a], 3}),
{{value, a}, {queue, [e, d], [b, c], 4}} = ?PQ:out({queue, [e, d, c, b], [a], 5}),
{{value, a}, {queue, [c], [b], 2}} = ?PQ:out({queue, [c, b, a], [], 3}),
{{value, a}, {queue, [d, c], [b], 3}} = ?PQ:out({queue, [d, c], [a, b], 4}),
{{value, a}, {queue, [], [], 0}} = ?PQ:out(?PQ:from_list([{1, a}])),
{{value, a}, {queue, [c], [b], 2}} = ?PQ:out(?PQ:from_list([{1, a}, {0, b}, {0, c}])),
{{value, a}, {pqueue, [{-1, {queue, [b], [], 1}}]}} = ?PQ:out(?PQ:from_list([{1, b}, {2, a}])),
{{value, a}, {pqueue, [{-1, {queue, [], [b], 1}}]}} = ?PQ:out(?PQ:from_list([{1, a}, {1, b}])).
t_out_2(_) ->
{empty, {pqueue, [{-1, {queue, [a], [], 1}}]}} = ?PQ:out(0, ?PQ:from_list([{1, a}])),
{{value, a}, {queue, [], [], 0}} = ?PQ:out(1, ?PQ:from_list([{1, a}])),
{{value, a}, {pqueue, [{-1, {queue, [], [b], 1}}]}} =
?PQ:out(1, ?PQ:from_list([{1, a}, {1, b}])),
{{value, a}, {queue, [b], [], 1}} = ?PQ:out(1, ?PQ:from_list([{1, a}, {0, b}])).
t_out_p(_) ->
{empty, {queue, [], [], 0}} = ?PQ:out_p(?PQ:new()),
{{value, a, 1}, {queue, [b], [], 1}} = ?PQ:out_p(?PQ:from_list([{1, a}, {0, b}])).
t_join(_) ->
Q = ?PQ:in(a, ?PQ:new()),
Q = ?PQ:join(Q, ?PQ:new()),
Q = ?PQ:join(?PQ:new(), Q),
Q1 = ?PQ:in(a, ?PQ:new()),
Q2 = ?PQ:in(b, Q1),
Q3 = ?PQ:in(c, Q2),
{queue, [c, b], [a], 3} = Q3,
Q4 = ?PQ:in(x, ?PQ:new()),
Q5 = ?PQ:in(y, Q4),
Q6 = ?PQ:in(z, Q5),
{queue, [z, y], [x], 3} = Q6,
{queue, [z, y], [a, b, c, x], 6} = ?PQ:join(Q3, Q6),
PQueue1 = ?PQ:from_list([{1, c}, {1, d}]),
PQueue2 = ?PQ:from_list([{1, c}, {1, d}, {0, a}, {0, b}]),
PQueue3 = ?PQ:from_list([{1, c}, {1, d}, {-1, a}, {-1, b}]),
{pqueue, [
{-1, {queue, [d], [c], 2}},
{0, {queue, [z, y], [x], 3}}
]} = ?PQ:join(PQueue1, Q6),
{pqueue, [
{-1, {queue, [d], [c], 2}},
{0, {queue, [z, y], [x], 3}}
]} = ?PQ:join(Q6, PQueue1),
{pqueue, [
{-1, {queue, [d], [c], 2}},
{0, {queue, [z, y], [a, b, x], 5}}
]} = ?PQ:join(PQueue2, Q6),
{pqueue, [
{-1, {queue, [d], [c], 2}},
{0, {queue, [b], [x, y, z, a], 5}}
]} = ?PQ:join(Q6, PQueue2),
{pqueue, [
{-1, {queue, [d], [c], 2}},
{0, {queue, [z, y], [x], 3}},
{1, {queue, [b], [a], 2}}
]} = ?PQ:join(PQueue3, Q6),
{pqueue, [
{-1, {queue, [d], [c], 2}},
{0, {queue, [z, y], [x], 3}},
{1, {queue, [b], [a], 2}}
]} = ?PQ:join(Q6, PQueue3),
PQueue4 = ?PQ:from_list([{1, c}, {1, d}]),
PQueue5 = ?PQ:from_list([{2, a}, {2, b}]),
{pqueue, [
{-2, {queue, [b], [a], 2}},
{-1, {queue, [d], [c], 2}}
]} = ?PQ:join(PQueue4, PQueue5).
t_filter(_) ->
{pqueue, [
{-2, {queue, [10], [4], 2}},
{-1, {queue, [2], [], 1}}
]} =
?PQ:filter(
fun
(V) when V rem 2 =:= 0 ->
true;
(_) ->
false
end,
?PQ:from_list([{0, 1}, {0, 3}, {1, 2}, {2, 4}, {2, 10}])
).
t_highest(_) ->
empty = ?PQ:highest(?PQ:new()),
0 = ?PQ:highest(?PQ:from_list([{0, a}, {0, b}])),
2 = ?PQ:highest(?PQ:from_list([{0, a}, {0, b}, {1, c}, {2, d}, {2, e}])).
| null | https://raw.githubusercontent.com/emqx/emqx/dbc10c2eed3df314586c7b9ac6292083204f1f68/apps/emqx/test/emqx_pqueue_SUITE.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-------------------------------------------------------------------- | Copyright ( c ) 2018 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(emqx_pqueue_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-define(PQ, emqx_pqueue).
-define(SUITE, ?MODULE).
all() -> emqx_common_test_helpers:all(?SUITE).
t_is_queue(_) ->
Q = ?PQ:new(),
?assertEqual(true, ?PQ:is_queue(Q)),
Q1 = ?PQ:in(a, 1, Q),
?assertEqual(true, ?PQ:is_queue(Q1)),
?assertEqual(false, ?PQ:is_queue(bad_queue)).
t_is_empty(_) ->
Q = ?PQ:new(),
?assertEqual(true, ?PQ:is_empty(Q)),
?assertEqual(false, ?PQ:is_empty(?PQ:in(a, Q))).
t_len(_) ->
Q = ?PQ:new(),
Q1 = ?PQ:in(a, Q),
?assertEqual(1, ?PQ:len(Q1)),
Q2 = ?PQ:in(b, 1, Q1),
?assertEqual(2, ?PQ:len(Q2)).
t_plen(_) ->
Q = ?PQ:new(),
Q1 = ?PQ:in(a, Q),
?assertEqual(1, ?PQ:plen(0, Q1)),
?assertEqual(0, ?PQ:plen(1, Q1)),
Q2 = ?PQ:in(b, 1, Q1),
Q3 = ?PQ:in(c, 1, Q2),
?assertEqual(2, ?PQ:plen(1, Q3)),
?assertEqual(1, ?PQ:plen(0, Q3)),
?assertEqual(0, ?PQ:plen(0, {pqueue, []})).
t_to_list(_) ->
Q = ?PQ:new(),
?assertEqual([], ?PQ:to_list(Q)),
Q1 = ?PQ:in(a, Q),
L1 = ?PQ:to_list(Q1),
?assertEqual([{0, a}], L1),
Q2 = ?PQ:in(b, 1, Q1),
L2 = ?PQ:to_list(Q2),
?assertEqual([{1, b}, {0, a}], L2).
t_from_list(_) ->
Q = ?PQ:from_list([{1, c}, {1, d}, {0, a}, {0, b}]),
?assertEqual({pqueue, [{-1, {queue, [d], [c], 2}}, {0, {queue, [b], [a], 2}}]}, Q),
?assertEqual(true, ?PQ:is_queue(Q)),
?assertEqual(4, ?PQ:len(Q)).
t_in(_) ->
Q = ?PQ:new(),
Els = [a, b, {c, 1}, {d, 1}, {e, infinity}, {f, 2}],
Q1 = lists:foldl(
fun
({El, P}, Acc) ->
?PQ:in(El, P, Acc);
(El, Acc) ->
?PQ:in(El, Acc)
end,
Q,
Els
),
?assertEqual(
{pqueue, [
{infinity, {queue, [e], [], 1}},
{-2, {queue, [f], [], 1}},
{-1, {queue, [d], [c], 2}},
{0, {queue, [b], [a], 2}}
]},
Q1
).
t_out(_) ->
Q = ?PQ:new(),
{empty, Q} = ?PQ:out(Q),
{empty, Q} = ?PQ:out(0, Q),
try ?PQ:out(1, Q) of
_ -> ct:fail(should_throw_error)
catch
error:Reason ->
?assertEqual(Reason, badarg)
end,
{{value, a}, Q} = ?PQ:out(?PQ:from_list([{0, a}])),
{{value, a}, {queue, [], [b], 1}} = ?PQ:out(?PQ:from_list([{0, a}, {0, b}])),
{{value, a}, {queue, [], [], 0}} = ?PQ:out({queue, [], [a], 1}),
{{value, a}, {queue, [c], [b], 2}} = ?PQ:out({queue, [c, b], [a], 3}),
{{value, a}, {queue, [e, d], [b, c], 4}} = ?PQ:out({queue, [e, d, c, b], [a], 5}),
{{value, a}, {queue, [c], [b], 2}} = ?PQ:out({queue, [c, b, a], [], 3}),
{{value, a}, {queue, [d, c], [b], 3}} = ?PQ:out({queue, [d, c], [a, b], 4}),
{{value, a}, {queue, [], [], 0}} = ?PQ:out(?PQ:from_list([{1, a}])),
{{value, a}, {queue, [c], [b], 2}} = ?PQ:out(?PQ:from_list([{1, a}, {0, b}, {0, c}])),
{{value, a}, {pqueue, [{-1, {queue, [b], [], 1}}]}} = ?PQ:out(?PQ:from_list([{1, b}, {2, a}])),
{{value, a}, {pqueue, [{-1, {queue, [], [b], 1}}]}} = ?PQ:out(?PQ:from_list([{1, a}, {1, b}])).
t_out_2(_) ->
{empty, {pqueue, [{-1, {queue, [a], [], 1}}]}} = ?PQ:out(0, ?PQ:from_list([{1, a}])),
{{value, a}, {queue, [], [], 0}} = ?PQ:out(1, ?PQ:from_list([{1, a}])),
{{value, a}, {pqueue, [{-1, {queue, [], [b], 1}}]}} =
?PQ:out(1, ?PQ:from_list([{1, a}, {1, b}])),
{{value, a}, {queue, [b], [], 1}} = ?PQ:out(1, ?PQ:from_list([{1, a}, {0, b}])).
t_out_p(_) ->
{empty, {queue, [], [], 0}} = ?PQ:out_p(?PQ:new()),
{{value, a, 1}, {queue, [b], [], 1}} = ?PQ:out_p(?PQ:from_list([{1, a}, {0, b}])).
t_join(_) ->
Q = ?PQ:in(a, ?PQ:new()),
Q = ?PQ:join(Q, ?PQ:new()),
Q = ?PQ:join(?PQ:new(), Q),
Q1 = ?PQ:in(a, ?PQ:new()),
Q2 = ?PQ:in(b, Q1),
Q3 = ?PQ:in(c, Q2),
{queue, [c, b], [a], 3} = Q3,
Q4 = ?PQ:in(x, ?PQ:new()),
Q5 = ?PQ:in(y, Q4),
Q6 = ?PQ:in(z, Q5),
{queue, [z, y], [x], 3} = Q6,
{queue, [z, y], [a, b, c, x], 6} = ?PQ:join(Q3, Q6),
PQueue1 = ?PQ:from_list([{1, c}, {1, d}]),
PQueue2 = ?PQ:from_list([{1, c}, {1, d}, {0, a}, {0, b}]),
PQueue3 = ?PQ:from_list([{1, c}, {1, d}, {-1, a}, {-1, b}]),
{pqueue, [
{-1, {queue, [d], [c], 2}},
{0, {queue, [z, y], [x], 3}}
]} = ?PQ:join(PQueue1, Q6),
{pqueue, [
{-1, {queue, [d], [c], 2}},
{0, {queue, [z, y], [x], 3}}
]} = ?PQ:join(Q6, PQueue1),
{pqueue, [
{-1, {queue, [d], [c], 2}},
{0, {queue, [z, y], [a, b, x], 5}}
]} = ?PQ:join(PQueue2, Q6),
{pqueue, [
{-1, {queue, [d], [c], 2}},
{0, {queue, [b], [x, y, z, a], 5}}
]} = ?PQ:join(Q6, PQueue2),
{pqueue, [
{-1, {queue, [d], [c], 2}},
{0, {queue, [z, y], [x], 3}},
{1, {queue, [b], [a], 2}}
]} = ?PQ:join(PQueue3, Q6),
{pqueue, [
{-1, {queue, [d], [c], 2}},
{0, {queue, [z, y], [x], 3}},
{1, {queue, [b], [a], 2}}
]} = ?PQ:join(Q6, PQueue3),
PQueue4 = ?PQ:from_list([{1, c}, {1, d}]),
PQueue5 = ?PQ:from_list([{2, a}, {2, b}]),
{pqueue, [
{-2, {queue, [b], [a], 2}},
{-1, {queue, [d], [c], 2}}
]} = ?PQ:join(PQueue4, PQueue5).
t_filter(_) ->
{pqueue, [
{-2, {queue, [10], [4], 2}},
{-1, {queue, [2], [], 1}}
]} =
?PQ:filter(
fun
(V) when V rem 2 =:= 0 ->
true;
(_) ->
false
end,
?PQ:from_list([{0, 1}, {0, 3}, {1, 2}, {2, 4}, {2, 10}])
).
t_highest(_) ->
empty = ?PQ:highest(?PQ:new()),
0 = ?PQ:highest(?PQ:from_list([{0, a}, {0, b}])),
2 = ?PQ:highest(?PQ:from_list([{0, a}, {0, b}, {1, c}, {2, d}, {2, e}])).
|
6189856fc4dc2e9ee984d5c16bb7545893f5478d8e95d5675973cfa7a21160f3 | andrenth/srsly | milter_callbacks.mli | type ops =
{ is_remote_sender : (string -> bool Lwt.t)
; choose_forward_domain : (string list -> string option Lwt.t)
}
val init : ops -> unit
val connect : Milter.ctx -> string option -> Unix.sockaddr option -> Milter.stat
val helo : Milter.ctx -> string -> Milter.stat
val envfrom : Milter.ctx -> string -> string list -> Milter.stat
val envrcpt : Milter.ctx -> string -> string list -> Milter.stat
val eom : Milter.ctx -> Milter.stat
val abort : Milter.ctx -> Milter.stat
val close : Milter.ctx -> Milter.stat
val negotiate : Milter.ctx
-> Milter.flag list
-> Milter.step list
-> Milter.stat * Milter.flag list * Milter.step list
| null | https://raw.githubusercontent.com/andrenth/srsly/fce92645781a6a6037512be4d35116ec53737b17/src/milter_callbacks.mli | ocaml | type ops =
{ is_remote_sender : (string -> bool Lwt.t)
; choose_forward_domain : (string list -> string option Lwt.t)
}
val init : ops -> unit
val connect : Milter.ctx -> string option -> Unix.sockaddr option -> Milter.stat
val helo : Milter.ctx -> string -> Milter.stat
val envfrom : Milter.ctx -> string -> string list -> Milter.stat
val envrcpt : Milter.ctx -> string -> string list -> Milter.stat
val eom : Milter.ctx -> Milter.stat
val abort : Milter.ctx -> Milter.stat
val close : Milter.ctx -> Milter.stat
val negotiate : Milter.ctx
-> Milter.flag list
-> Milter.step list
-> Milter.stat * Milter.flag list * Milter.step list
| |
63f5ba8d056ee803532b0dba4b52d10d13c4edb5843d1aff20e2201b60651195 | mark-gerarts/nature-of-code | sketch.lisp | (defpackage :nature-of-code.vectors.example-5
(:export :start-sketch)
(:use :cl :trivial-gamekit)
(:import-from :cl-bodge.engine :vector-length))
(in-package :nature-of-code.vectors.example-5)
(defvar *width* 800)
(defvar *height* 600)
(defvar *black* (vec4 0 0 0 1))
(defun draw-magnitude (magnitude)
(let ((rect-height 10))
(draw-rect (vec2 0 (- *height* rect-height))
magnitude
rect-height
:fill-paint *black*)))
(defgame sketch ()
((center
:initform (vec2 (/ *width* 2) (/ *height* 2))
:accessor center)
(mouse-position
:initform (vec2 0 0)
:accessor mouse-position))
(:viewport-width *width*)
(:viewport-height *height*)
(:viewport-title "Vector magnitude"))
(defmethod post-initialize ((this sketch))
(bind-cursor (lambda (x y)
(setf (mouse-position this) (vec2 x y)))))
(defmethod draw ((this sketch))
(let* ((center (center this))
(sub (subt (mouse-position this) center)))
(with-pushed-canvas ()
(translate-canvas (x center) (y center))
(draw-line (vec2 0 0) sub *black* :thickness 2)) ; The vector
(draw-magnitude (vector-length sub)))) ; The magnitude
(defun start-sketch ()
(start 'sketch))
| null | https://raw.githubusercontent.com/mark-gerarts/nature-of-code/4f8612e18a2a62012e44f08a8e0a4f8aec21a1ec/01.%20Vectors/Example%201.5%20-%20Vector%20magnitude/sketch.lisp | lisp | The vector
The magnitude | (defpackage :nature-of-code.vectors.example-5
(:export :start-sketch)
(:use :cl :trivial-gamekit)
(:import-from :cl-bodge.engine :vector-length))
(in-package :nature-of-code.vectors.example-5)
(defvar *width* 800)
(defvar *height* 600)
(defvar *black* (vec4 0 0 0 1))
(defun draw-magnitude (magnitude)
(let ((rect-height 10))
(draw-rect (vec2 0 (- *height* rect-height))
magnitude
rect-height
:fill-paint *black*)))
(defgame sketch ()
((center
:initform (vec2 (/ *width* 2) (/ *height* 2))
:accessor center)
(mouse-position
:initform (vec2 0 0)
:accessor mouse-position))
(:viewport-width *width*)
(:viewport-height *height*)
(:viewport-title "Vector magnitude"))
(defmethod post-initialize ((this sketch))
(bind-cursor (lambda (x y)
(setf (mouse-position this) (vec2 x y)))))
(defmethod draw ((this sketch))
(let* ((center (center this))
(sub (subt (mouse-position this) center)))
(with-pushed-canvas ()
(translate-canvas (x center) (y center))
(defun start-sketch ()
(start 'sketch))
|
2b74d363cc6cc9251f9b7a190ff8daee78d37d910d3841008c9867627704406b | kuenishi/riak_scr_jp | otp_intro_sup.erl |
-module(otp_intro_sup).
-behaviour(supervisor).
%% API
-export([start_link/0]).
%% Supervisor callbacks
-export([init/1]).
%% Helper macro for declaring children of supervisor
-define(CHILD(I, Type), {I, {I, start_link, []}, permanent, 5000, Type, [I]}).
%% ===================================================================
%% API functions
%% ===================================================================
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
%% ===================================================================
%% Supervisor callbacks
%% ===================================================================
init([]) ->
{ok, { {one_for_one, 5, 10}, [?CHILD(otp_echo_server, worker)]} }.
| null | https://raw.githubusercontent.com/kuenishi/riak_scr_jp/7b556227b4439bb18f5eb53822a4247fcbe0ecdd/talks/%235/sample/src/otp_intro_sup.erl | erlang | API
Supervisor callbacks
Helper macro for declaring children of supervisor
===================================================================
API functions
===================================================================
===================================================================
Supervisor callbacks
=================================================================== |
-module(otp_intro_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
-define(CHILD(I, Type), {I, {I, start_link, []}, permanent, 5000, Type, [I]}).
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) ->
{ok, { {one_for_one, 5, 10}, [?CHILD(otp_echo_server, worker)]} }.
|
e31a2ef14d693b71cbed843811d80e5936a708bc5ed0df9a16e7a4fee4bb4f71 | the-real-blackh/sodium-2d-game-engine | Cache.hs | module FRP.Sodium.GameEngine2D.Cache (
Cache,
readCache,
writeCache,
newCache,
flipCache
) where
import FRP.Sodium.GameEngine2D.Geometry
import FRP.Sodium.GameEngine2D.Platform (Key)
import Control.Applicative
import Control.Monad
import Data.IORef
import Data.Map (Map)
import qualified Data.Map as M
data Cache args = Cache {
ccTableRef :: IORef (Map Key (Entry args))
}
data Entry args = Entry {
eDraw :: args -> IO (),
eCleanup :: IO (),
eTouched :: Bool
}
newCache :: IO (Cache args)
newCache = Cache <$> newIORef M.empty
readCache :: Cache args -> Key -> IO (Maybe (args -> IO ()))
readCache cache key = do
table <- readIORef (ccTableRef cache)
return $ eDraw `fmap` M.lookup key table
writeCache :: Cache args -> Key -> IO (args -> IO (), IO ()) -> IO ()
writeCache cache key mkDraw = do
table <- readIORef (ccTableRef cache)
case M.lookup key table of
Just entry -> do
writeIORef (ccTableRef cache) $ M.insert key (entry { eTouched = True }) table
Nothing -> do
(draw, cleanup) <- mkDraw
let entry = Entry {
eDraw = draw,
eCleanup = cleanup,
eTouched = True
}
writeIORef (ccTableRef cache) $ M.insert key entry table
flipCache :: Cache args -> IO ()
flipCache cache = do
table <- readIORef (ccTableRef cache)
let (table', toClean) = M.partition eTouched table
writeIORef (ccTableRef cache) $ M.map unTouch table'
--when (not $ M.null toClean) $ print (M.keys toClean)
case M.elems toClean of
[] -> return ()
items -> do
" clean " + + show ( length items )
forM_ items $ \e -> eCleanup e
where
unTouch e = e { eTouched = False }
| null | https://raw.githubusercontent.com/the-real-blackh/sodium-2d-game-engine/fb765eb50d39f0297274c2d24cd71dcec2dc2c2a/FRP/Sodium/GameEngine2D/Cache.hs | haskell | when (not $ M.null toClean) $ print (M.keys toClean) | module FRP.Sodium.GameEngine2D.Cache (
Cache,
readCache,
writeCache,
newCache,
flipCache
) where
import FRP.Sodium.GameEngine2D.Geometry
import FRP.Sodium.GameEngine2D.Platform (Key)
import Control.Applicative
import Control.Monad
import Data.IORef
import Data.Map (Map)
import qualified Data.Map as M
data Cache args = Cache {
ccTableRef :: IORef (Map Key (Entry args))
}
data Entry args = Entry {
eDraw :: args -> IO (),
eCleanup :: IO (),
eTouched :: Bool
}
newCache :: IO (Cache args)
newCache = Cache <$> newIORef M.empty
readCache :: Cache args -> Key -> IO (Maybe (args -> IO ()))
readCache cache key = do
table <- readIORef (ccTableRef cache)
return $ eDraw `fmap` M.lookup key table
writeCache :: Cache args -> Key -> IO (args -> IO (), IO ()) -> IO ()
writeCache cache key mkDraw = do
table <- readIORef (ccTableRef cache)
case M.lookup key table of
Just entry -> do
writeIORef (ccTableRef cache) $ M.insert key (entry { eTouched = True }) table
Nothing -> do
(draw, cleanup) <- mkDraw
let entry = Entry {
eDraw = draw,
eCleanup = cleanup,
eTouched = True
}
writeIORef (ccTableRef cache) $ M.insert key entry table
flipCache :: Cache args -> IO ()
flipCache cache = do
table <- readIORef (ccTableRef cache)
let (table', toClean) = M.partition eTouched table
writeIORef (ccTableRef cache) $ M.map unTouch table'
case M.elems toClean of
[] -> return ()
items -> do
" clean " + + show ( length items )
forM_ items $ \e -> eCleanup e
where
unTouch e = e { eTouched = False }
|
aeea8b29e98b6f79f082a2481f6d6d3d7e110c208b460e7f998c6e6221171524 | ivanperez-keera/Yampa | TestsCommon.hs | -- |
Module : TestsCommon
-- Description : Common definitions for the regression test modules.
Copyright : Yale University , 2003
Authors : and
module TestsCommon where
import FRP.Yampa
-- * Rough equality with instances
-- Rough equality. Only intended to be good enough for test cases in this
-- module.
class REq a where
(~=) :: a -> a -> Bool
epsilon :: Fractional a => a
epsilon = 0.0001
instance REq Float where
x ~= y = abs (x - y) < epsilon -- A relative measure should be used.
instance REq Double where
x ~= y = abs (x - y) < epsilon -- A relative measure should be used.
instance REq Int where
(~=) = (==)
instance REq Integer where
(~=) = (==)
instance REq Bool where
(~=) = (==)
instance REq Char where
(~=) = (==)
instance REq () where
() ~= () = True
instance (REq a, REq b) => REq (a,b) where
(x1,x2) ~= (y1,y2) = x1 ~= y1 && x2 ~= y2
instance (REq a, REq b, REq c) => REq (a,b,c) where
(x1,x2,x3) ~= (y1,y2,y3) = x1 ~= y1 && x2 ~= y2 && x3 ~= y3
instance (REq a, REq b, REq c, REq d) => REq (a,b,c,d) where
(x1,x2,x3,x4) ~= (y1,y2,y3,y4) = x1 ~= y1
&& x2 ~= y2
&& x3 ~= y3
&& x4 ~= y4
instance (REq a, REq b, REq c, REq d, REq e) => REq (a,b,c,d,e) where
(x1,x2,x3,x4,x5) ~= (y1,y2,y3,y4,y5) = x1 ~= y1
&& x2 ~= y2
&& x3 ~= y3
&& x4 ~= y4
&& x5 ~= y5
instance REq a => REq (Maybe a) where
Nothing ~= Nothing = True
(Just x) ~= (Just y) = x ~= y
_ ~= _ = False
instance REq a => REq (Event a) where
NoEvent ~= NoEvent = True
(Event x) ~= (Event y) = x ~= y
_ ~= _ = False
instance (REq a, REq b) => REq (Either a b) where
(Left x) ~= (Left y) = x ~= y
(Right x) ~= (Right y) = x ~= y
_ ~= _ = False
instance REq a => REq [a] where
[] ~= [] = True
(x:xs) ~= (y:ys) = x ~= y && xs ~= ys
_ ~= _ = False
------------------------------------------------------------------------------
-- Testing utilities
------------------------------------------------------------------------------
testSF1 :: SF Double a -> [a]
testSF1 sf = take 25 (embed sf (deltaEncodeBy (~=) 0.25 [0.0..]))
testSF2 :: SF Double a -> [a]
testSF2 sf = take 25 (embed sf (deltaEncodeBy (~=) 0.25 input))
where
The initial 0.0 is just for result compatibility with an older
-- version.
input = 0.0 : [ fromIntegral (b `div` freq) | b <- [1..] :: [Int] ]
freq = 5
------------------------------------------------------------------------------
-- Some utilities used for testing laws
------------------------------------------------------------------------------
fun_prod f g = \(x,y) -> (f x, g y)
assoc :: ((a,b),c) -> (a,(b,c))
assoc ((a,b),c) = (a,(b,c))
assocInv :: (a,(b,c)) -> ((a,b),c)
assocInv (a,(b,c)) = ((a,b),c)
| null | https://raw.githubusercontent.com/ivanperez-keera/Yampa/af3a3393e7e059ffb813dc4381e91b2ffab7efc6/yampa-test/tests/TestsCommon.hs | haskell | |
Description : Common definitions for the regression test modules.
* Rough equality with instances
Rough equality. Only intended to be good enough for test cases in this
module.
A relative measure should be used.
A relative measure should be used.
----------------------------------------------------------------------------
Testing utilities
----------------------------------------------------------------------------
version.
----------------------------------------------------------------------------
Some utilities used for testing laws
---------------------------------------------------------------------------- | Module : TestsCommon
Copyright : Yale University , 2003
Authors : and
module TestsCommon where
import FRP.Yampa
class REq a where
(~=) :: a -> a -> Bool
epsilon :: Fractional a => a
epsilon = 0.0001
instance REq Float where
instance REq Double where
instance REq Int where
(~=) = (==)
instance REq Integer where
(~=) = (==)
instance REq Bool where
(~=) = (==)
instance REq Char where
(~=) = (==)
instance REq () where
() ~= () = True
instance (REq a, REq b) => REq (a,b) where
(x1,x2) ~= (y1,y2) = x1 ~= y1 && x2 ~= y2
instance (REq a, REq b, REq c) => REq (a,b,c) where
(x1,x2,x3) ~= (y1,y2,y3) = x1 ~= y1 && x2 ~= y2 && x3 ~= y3
instance (REq a, REq b, REq c, REq d) => REq (a,b,c,d) where
(x1,x2,x3,x4) ~= (y1,y2,y3,y4) = x1 ~= y1
&& x2 ~= y2
&& x3 ~= y3
&& x4 ~= y4
instance (REq a, REq b, REq c, REq d, REq e) => REq (a,b,c,d,e) where
(x1,x2,x3,x4,x5) ~= (y1,y2,y3,y4,y5) = x1 ~= y1
&& x2 ~= y2
&& x3 ~= y3
&& x4 ~= y4
&& x5 ~= y5
instance REq a => REq (Maybe a) where
Nothing ~= Nothing = True
(Just x) ~= (Just y) = x ~= y
_ ~= _ = False
instance REq a => REq (Event a) where
NoEvent ~= NoEvent = True
(Event x) ~= (Event y) = x ~= y
_ ~= _ = False
instance (REq a, REq b) => REq (Either a b) where
(Left x) ~= (Left y) = x ~= y
(Right x) ~= (Right y) = x ~= y
_ ~= _ = False
instance REq a => REq [a] where
[] ~= [] = True
(x:xs) ~= (y:ys) = x ~= y && xs ~= ys
_ ~= _ = False
testSF1 :: SF Double a -> [a]
testSF1 sf = take 25 (embed sf (deltaEncodeBy (~=) 0.25 [0.0..]))
testSF2 :: SF Double a -> [a]
testSF2 sf = take 25 (embed sf (deltaEncodeBy (~=) 0.25 input))
where
The initial 0.0 is just for result compatibility with an older
input = 0.0 : [ fromIntegral (b `div` freq) | b <- [1..] :: [Int] ]
freq = 5
fun_prod f g = \(x,y) -> (f x, g y)
assoc :: ((a,b),c) -> (a,(b,c))
assoc ((a,b),c) = (a,(b,c))
assocInv :: (a,(b,c)) -> ((a,b),c)
assocInv (a,(b,c)) = ((a,b),c)
|
53fa021627116112f924af521ce544f15f2d6ff94c107dde121a1a2a4ca0bd90 | michalkonecny/aern2 | Conversions.hs | # OPTIONS_GHC -Wno - orphans #
|
Module : AERN2.MP.Float . Conversions
Description : Conversions and comparisons of arbitrary precision floats
Copyright : ( c ) : :
Stability : experimental
Portability : portable
Conversions and comparisons of arbitrary precision floating point numbers
Module : AERN2.MP.Float.Conversions
Description : Conversions and comparisons of arbitrary precision floats
Copyright : (c) Michal Konecny
License : BSD3
Maintainer :
Stability : experimental
Portability : portable
Conversions and comparisons of arbitrary precision floating point numbers
-}
module AERN2.MP.Float.Conversions
(
* MPFloat to other types ( see also instances )
toDouble
* MPFloat constructors ( see also instances )
, CanBeMPFloat, mpFloat
, fromIntegerCEDU
, fromRationalCEDU
-- * comparisons and constants (see also instances)
, zero, one, two
, nan, infinity
)
where
import MixedTypesNumPrelude
import qualified Prelude as P
import Data.Ratio
import Data . Convertible
-- import AERN2.Norm
import AERN2.MP.Precision
import qualified Data.CDAR as MPLow
import AERN2.MP.Float.Auxi
import AERN2.MP.Float.Type
import AERN2.MP.Float.Arithmetic
conversions to MPFloat
type CanBeMPFloat t = ConvertibleExactly t MPFloat
mpFloat :: (CanBeMPFloat t) => t -> MPFloat
mpFloat = convertExactly
instance ConvertibleExactly Integer MPFloat where
safeConvertExactly =
Right . MPFloat . P.fromInteger
instance ConvertibleExactly Int MPFloat where
safeConvertExactly = safeConvertExactly . integer
fromIntegerCEDU :: Precision -> Integer -> BoundsCEDU MPFloat
fromIntegerCEDU pp =
setPrecisionCEDU pp . MPFloat . P.fromInteger
fromRationalCEDU :: Precision -> Rational -> BoundsCEDU MPFloat
fromRationalCEDU pp =
setPrecisionCEDU pp . (MPFloat . MPLow.toApproxMB (p2cdarPrec pp))
conversions from MPFloat
instance ConvertibleExactly MPLow.Approx Rational where
safeConvertExactly = Right . P.toRational
instance ConvertibleExactly MPFloat Rational where
safeConvertExactly = safeConvertExactly . unMPFloat
toDouble :: MPFloat -> Double
toDouble = P.fromRational . rational
instance Convertible MPFloat Double where
safeConvert x
| isFinite dbl = Right dbl
| otherwise = convError "conversion to double: out of bounds" x
where
dbl = toDouble x
instance CanRound MPFloat where
properFraction (MPFloat x) = (n,f)
where
r = rational x
n = (numerator r) `P.quot` (denominator r)
f = ceduCentre $ (MPFloat x) `subCEDU` (MPFloat $ P.fromInteger n)
{- comparisons -}
instance HasEqAsymmetric MPLow.Approx MPLow.Approx
instance HasEqAsymmetric MPFloat MPFloat where
equalTo = lift2R equalTo
instance HasEqAsymmetric MPFloat Integer where
equalTo = convertSecond equalTo
instance HasEqAsymmetric Integer MPFloat where
equalTo = convertFirst equalTo
instance HasEqAsymmetric MPFloat Int where
equalTo = convertSecond equalTo
instance HasEqAsymmetric Int MPFloat where
equalTo = convertFirst equalTo
instance HasEqAsymmetric MPFloat Rational where
equalTo = convertFirst equalTo
instance HasEqAsymmetric Rational MPFloat where
equalTo = convertSecond equalTo
instance CanTestZero MPFloat
instance HasOrderAsymmetric MPLow.Approx MPLow.Approx
instance HasOrderAsymmetric MPFloat MPFloat where
lessThan = lift2R lessThan
leq = lift2R leq
instance HasOrderAsymmetric MPFloat Integer where
lessThan = convertSecond lessThan
leq = convertSecond leq
instance HasOrderAsymmetric Integer MPFloat where
lessThan = convertFirst lessThan
leq = convertFirst leq
instance HasOrderAsymmetric MPFloat Int where
lessThan = convertSecond lessThan
leq = convertSecond leq
instance HasOrderAsymmetric Int MPFloat where
lessThan = convertFirst lessThan
leq = convertFirst leq
instance HasOrderAsymmetric Rational MPFloat where
lessThan = convertSecond lessThan
leq = convertSecond leq
instance HasOrderAsymmetric MPFloat Rational where
lessThan = convertFirst lessThan
leq = convertFirst leq
instance CanTestPosNeg MPFloat
min ,
instance CanMinMaxAsymmetric MPFloat MPFloat where
type MinMaxType MPFloat MPFloat = MPFloat
max x y
| isNaN x = x
| isNaN y = y
| otherwise = lift2 P.max x y
min x y
| isNaN x = x
| isNaN y = y
| otherwise = lift2 P.min x y
{- constants -}
zero, one, two :: MPFloat
zero = mpFloat 0
one = mpFloat 1
two = mpFloat 2
nan, infinity :: MPFloat
nan = MPFloat MPLow.Bottom
infinity = nan
itisNaN :: MPFloat -> Bool
itisNaN (MPFloat MPLow.Bottom) = True
itisNaN _ = False
instance CanTestFinite MPFloat where
isInfinite = itisNaN
isFinite = not . itisNaN
instance CanTestNaN MPFloat where
isNaN = itisNaN
| null | https://raw.githubusercontent.com/michalkonecny/aern2/f42a9cc59006bdeda270ac969e1e7b414ad2223d/aern2-mp/src/AERN2/MP/Float/Conversions.hs | haskell | * comparisons and constants (see also instances)
import AERN2.Norm
comparisons
constants | # OPTIONS_GHC -Wno - orphans #
|
Module : AERN2.MP.Float . Conversions
Description : Conversions and comparisons of arbitrary precision floats
Copyright : ( c ) : :
Stability : experimental
Portability : portable
Conversions and comparisons of arbitrary precision floating point numbers
Module : AERN2.MP.Float.Conversions
Description : Conversions and comparisons of arbitrary precision floats
Copyright : (c) Michal Konecny
License : BSD3
Maintainer :
Stability : experimental
Portability : portable
Conversions and comparisons of arbitrary precision floating point numbers
-}
module AERN2.MP.Float.Conversions
(
* MPFloat to other types ( see also instances )
toDouble
* MPFloat constructors ( see also instances )
, CanBeMPFloat, mpFloat
, fromIntegerCEDU
, fromRationalCEDU
, zero, one, two
, nan, infinity
)
where
import MixedTypesNumPrelude
import qualified Prelude as P
import Data.Ratio
import Data . Convertible
import AERN2.MP.Precision
import qualified Data.CDAR as MPLow
import AERN2.MP.Float.Auxi
import AERN2.MP.Float.Type
import AERN2.MP.Float.Arithmetic
conversions to MPFloat
type CanBeMPFloat t = ConvertibleExactly t MPFloat
mpFloat :: (CanBeMPFloat t) => t -> MPFloat
mpFloat = convertExactly
instance ConvertibleExactly Integer MPFloat where
safeConvertExactly =
Right . MPFloat . P.fromInteger
instance ConvertibleExactly Int MPFloat where
safeConvertExactly = safeConvertExactly . integer
fromIntegerCEDU :: Precision -> Integer -> BoundsCEDU MPFloat
fromIntegerCEDU pp =
setPrecisionCEDU pp . MPFloat . P.fromInteger
fromRationalCEDU :: Precision -> Rational -> BoundsCEDU MPFloat
fromRationalCEDU pp =
setPrecisionCEDU pp . (MPFloat . MPLow.toApproxMB (p2cdarPrec pp))
conversions from MPFloat
instance ConvertibleExactly MPLow.Approx Rational where
safeConvertExactly = Right . P.toRational
instance ConvertibleExactly MPFloat Rational where
safeConvertExactly = safeConvertExactly . unMPFloat
toDouble :: MPFloat -> Double
toDouble = P.fromRational . rational
instance Convertible MPFloat Double where
safeConvert x
| isFinite dbl = Right dbl
| otherwise = convError "conversion to double: out of bounds" x
where
dbl = toDouble x
instance CanRound MPFloat where
properFraction (MPFloat x) = (n,f)
where
r = rational x
n = (numerator r) `P.quot` (denominator r)
f = ceduCentre $ (MPFloat x) `subCEDU` (MPFloat $ P.fromInteger n)
instance HasEqAsymmetric MPLow.Approx MPLow.Approx
instance HasEqAsymmetric MPFloat MPFloat where
equalTo = lift2R equalTo
instance HasEqAsymmetric MPFloat Integer where
equalTo = convertSecond equalTo
instance HasEqAsymmetric Integer MPFloat where
equalTo = convertFirst equalTo
instance HasEqAsymmetric MPFloat Int where
equalTo = convertSecond equalTo
instance HasEqAsymmetric Int MPFloat where
equalTo = convertFirst equalTo
instance HasEqAsymmetric MPFloat Rational where
equalTo = convertFirst equalTo
instance HasEqAsymmetric Rational MPFloat where
equalTo = convertSecond equalTo
instance CanTestZero MPFloat
instance HasOrderAsymmetric MPLow.Approx MPLow.Approx
instance HasOrderAsymmetric MPFloat MPFloat where
lessThan = lift2R lessThan
leq = lift2R leq
instance HasOrderAsymmetric MPFloat Integer where
lessThan = convertSecond lessThan
leq = convertSecond leq
instance HasOrderAsymmetric Integer MPFloat where
lessThan = convertFirst lessThan
leq = convertFirst leq
instance HasOrderAsymmetric MPFloat Int where
lessThan = convertSecond lessThan
leq = convertSecond leq
instance HasOrderAsymmetric Int MPFloat where
lessThan = convertFirst lessThan
leq = convertFirst leq
instance HasOrderAsymmetric Rational MPFloat where
lessThan = convertSecond lessThan
leq = convertSecond leq
instance HasOrderAsymmetric MPFloat Rational where
lessThan = convertFirst lessThan
leq = convertFirst leq
instance CanTestPosNeg MPFloat
min ,
instance CanMinMaxAsymmetric MPFloat MPFloat where
type MinMaxType MPFloat MPFloat = MPFloat
max x y
| isNaN x = x
| isNaN y = y
| otherwise = lift2 P.max x y
min x y
| isNaN x = x
| isNaN y = y
| otherwise = lift2 P.min x y
zero, one, two :: MPFloat
zero = mpFloat 0
one = mpFloat 1
two = mpFloat 2
nan, infinity :: MPFloat
nan = MPFloat MPLow.Bottom
infinity = nan
itisNaN :: MPFloat -> Bool
itisNaN (MPFloat MPLow.Bottom) = True
itisNaN _ = False
instance CanTestFinite MPFloat where
isInfinite = itisNaN
isFinite = not . itisNaN
instance CanTestNaN MPFloat where
isNaN = itisNaN
|
f9c0e1285005c7560879330ce2cf9a7cb7bca641f89bfc51b2cd8c614bd3445b | IndecisionTree/adventofcode2022 | Tree.hs | module Utils.Tree (
BTree (..),
Context (..),
Zipper,
mkZipper,
unZipper,
up,
down,
topmost,
insert,
findChild,
modify
) where
import Data.Bifunctor (first)
import Data.List (elemIndex, findIndex)
data BTree a = Node a [BTree a] | Leaf a
deriving (Eq, Show, Functor)
data Context a = Root | Branch a [BTree a] [BTree a] (Context a)
deriving (Eq, Show)
type Zipper a = (BTree a, Context a)
mkZipper :: BTree a -> Zipper a
mkZipper t = (t, Root)
unZipper :: Zipper a -> BTree a
unZipper = fst . topmost
up :: Zipper a -> Zipper a
up z@(_, Root) = z
up (tree, Branch x before after ctx) =
(Node x (before ++ tree:after), ctx)
down :: Int -> Zipper a -> Zipper a
down _ z@(Leaf _, _) = z
down i (Node p children, ctx)
| i < 0 || i >= length children = error "'i' out of range"
| otherwise =
let (before, x:after) = splitAt i children in
(x, Branch p before after ctx)
topmost :: Zipper a -> Zipper a
topmost z@(_, Root) = z
topmost z = topmost $ up z
insert :: BTree a -> Zipper a -> Zipper a
insert t = first (prependChild t)
prependChild :: BTree a -> BTree a -> BTree a
prependChild _ (Leaf _) = error "cannot append a child node to a Leaf"
prependChild child (Node x children) = Node x (child:children)
findChild :: (a -> Bool) -> Zipper a -> Maybe Int
findChild _ (Leaf _, _) = Nothing
findChild f (Node _ children, _) = findIndex f (value <$> children)
value :: BTree a -> a
value (Leaf x) = x
value (Node x _) = x
modify :: (a -> a) -> Zipper a -> Zipper a
modify f (tree, ctx) = case tree of
Node x children -> (Node (f x) children, ctx)
Leaf x -> (Leaf (f x), ctx)
| null | https://raw.githubusercontent.com/IndecisionTree/adventofcode2022/3ef082dc306acca983ff1c4e9845c0638cc6f390/solutions/Utils/Tree.hs | haskell | module Utils.Tree (
BTree (..),
Context (..),
Zipper,
mkZipper,
unZipper,
up,
down,
topmost,
insert,
findChild,
modify
) where
import Data.Bifunctor (first)
import Data.List (elemIndex, findIndex)
data BTree a = Node a [BTree a] | Leaf a
deriving (Eq, Show, Functor)
data Context a = Root | Branch a [BTree a] [BTree a] (Context a)
deriving (Eq, Show)
type Zipper a = (BTree a, Context a)
mkZipper :: BTree a -> Zipper a
mkZipper t = (t, Root)
unZipper :: Zipper a -> BTree a
unZipper = fst . topmost
up :: Zipper a -> Zipper a
up z@(_, Root) = z
up (tree, Branch x before after ctx) =
(Node x (before ++ tree:after), ctx)
down :: Int -> Zipper a -> Zipper a
down _ z@(Leaf _, _) = z
down i (Node p children, ctx)
| i < 0 || i >= length children = error "'i' out of range"
| otherwise =
let (before, x:after) = splitAt i children in
(x, Branch p before after ctx)
topmost :: Zipper a -> Zipper a
topmost z@(_, Root) = z
topmost z = topmost $ up z
insert :: BTree a -> Zipper a -> Zipper a
insert t = first (prependChild t)
prependChild :: BTree a -> BTree a -> BTree a
prependChild _ (Leaf _) = error "cannot append a child node to a Leaf"
prependChild child (Node x children) = Node x (child:children)
findChild :: (a -> Bool) -> Zipper a -> Maybe Int
findChild _ (Leaf _, _) = Nothing
findChild f (Node _ children, _) = findIndex f (value <$> children)
value :: BTree a -> a
value (Leaf x) = x
value (Node x _) = x
modify :: (a -> a) -> Zipper a -> Zipper a
modify f (tree, ctx) = case tree of
Node x children -> (Node (f x) children, ctx)
Leaf x -> (Leaf (f x), ctx)
| |
e056d90c7d91fd4472c5b82ba1a474518092614ad96b2b85c297df13eee5f947 | MarcosPividori/push-notify | Types.hs | # LANGUAGE OverloadedStrings , DeriveGeneric #
| This Module defines the main data types for the Push Service .
module Network.PushNotify.General.Types
( -- * Push Settings
Device(..)
, PushServiceConfig(..)
, RegisterResult(..)
, GCMConfig(..)
, PushConfig(..)
-- * Push Manager
, PushManager(..)
-- * Push Message
, PushNotification(..)
, generalNotif
-- * Push Result
, PushResult(..)
, IsPushResult(..)
) where
import Network.PushNotify.Gcm
import Network.PushNotify.Apns
import Network.PushNotify.Mpns
import Network.PushNotify.Ccs
import Network.HTTP.Conduit
import Network.HTTP.Types.Status
import Control.Exception
import qualified Data.Map as M
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString as BS
import qualified Data.Text.Encoding as E
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import Data.Aeson
import Data.Default
import Data.Hashable
import Data.List
import Data.Monoid
import Data.Text (Text,pack)
import Text.XML
-- | Unique identifier of an app/device.
^ An Android app .
| APNS DeviceToken -- ^ An iOS app.
^ A WPhone app .
deriving (Show, Read, Eq)
instance Hashable Device where
hashWithSalt s (GCM n) = s `hashWithSalt`
(0::Int) `hashWithSalt` n
hashWithSalt s (MPNS n) = s `hashWithSalt`
(1::Int) `hashWithSalt` n
hashWithSalt s (APNS n) = s `hashWithSalt`
(2::Int) `hashWithSalt` n
-- | General notification to be sent.
data PushNotification = PushNotification {
apnsNotif :: Maybe APNSmessage
, gcmNotif :: Maybe GCMmessage
, mpnsNotif :: Maybe MPNSmessage
} deriving Show
instance Default PushNotification where
def = PushNotification Nothing Nothing Nothing
-- | 'generalNotif' Builds a general notification from JSON data.
--
If data length exceeds 256 bytes ( max payload limit for APNS ) it will fails .
--
For MPNS , data will be XML - labeled as " .
generalNotif :: Object -> IO PushNotification
generalNotif dat = do
let msg = PushNotification {
apnsNotif = Just def{ rest = Just dat}
, gcmNotif = Just def{ data_object = Just dat}
, mpnsNotif = Just def{ restXML = Document (Prologue [] Nothing [])
(Element (Name "jsonData" Nothing Nothing)
M.empty
[NodeContent $ E.decodeUtf8 $
BS.concat . BL.toChunks $ encode dat])
[]
}
}
if ((BL.length . encode . apnsNotif) msg > 256)
then fail "Too long payload"
else return msg
-- | Settings for GCM service.
data GCMConfig = Http GCMHttpConfig | Ccs GCMCcsConfig
| Main settings for the different Push Services . @Nothing@ means the service wo n't be used .
data PushConfig = PushConfig{
gcmConfig :: Maybe GCMConfig
, apnsConfig :: Maybe APNSConfig
, mpnsConfig :: Maybe MPNSConfig
}
instance Default PushConfig where
def = PushConfig Nothing Nothing Nothing
| ' RegisterResult ' represents the result of a device attempting to register
data RegisterResult = SuccessfulReg | ErrorReg Text
| Main settings for the Push Service .
data PushServiceConfig = PushServiceConfig {
pushConfig :: PushConfig -- ^ Main configuration.
, newMessageCallback :: Device -> Value -> IO () -- ^ The callback function to be called when receiving messages from devices
-- (This means through the CCS connection or as POST requests
to the Yesod subsite ) .
, newDeviceCallback :: Device -> Value -> IO RegisterResult -- ^ The callback function to be called when
-- a new device try to register on server.
, unRegisteredCallback :: Device -> IO () -- ^ The callback function to be called when a device unregisters.
, newIdCallback :: (Device,Device) -> IO () -- ^ The callback function to be called when a device's identifier changes.
}
instance Default PushServiceConfig where
def = PushServiceConfig {
pushConfig = def
, newMessageCallback = \_ _ -> return ()
, newDeviceCallback = \_ _ -> return SuccessfulReg
, unRegisteredCallback = \_ -> return ()
, newIdCallback = \_ -> return ()
}
| Main manager for the Push Service .
--
This ' PushManager ' will be used to send notifications and also can be added as a subsite to a Yesod app
-- in order to receive registrations and messages from devices as HTTP POST requests.
data PushManager = PushManager {
^ Conduit manager for sending push notifications though POST requests .
^ Apns manager for sending push notifications though APNS servers .
, ccsManager :: Maybe CCSManager -- ^ Ccs manager for communicating with GCM through Cloud Connection Server.
, serviceConfig :: PushServiceConfig -- ^ Main configuration.
}
| PushResult represents a general result after communicating with a Push Server .
data PushResult = PushResult {
successful :: HS.HashSet Device -- ^ Notifications that were successfully sent.
, failed :: HM.HashMap Device (Either Text SomeException) -- ^ Notifications that were not successfully sent.
, toResend :: HS.HashSet Device -- ^ Failed notifications that you need to resend,
-- because servers were not available or there was a problem with the connection.
, unRegistered :: HS.HashSet Device -- ^ Set of unregistered devices.
, newIds :: HM.HashMap Device Device -- ^ Map of devices which have changed their identifiers. (old -> new)
} deriving Show
instance Default PushResult where
def = PushResult HS.empty HM.empty HS.empty HS.empty HM.empty
instance Monoid PushResult where
mempty = def
mappend (PushResult a1 b1 c1 d1 e1)
(PushResult a2 b2 c2 d2 e2) = PushResult (HS.union a1 a2) (HM.union b1 b2)
(HS.union c1 c2) (HS.union d1 d2) (HM.union e1 e2)
-- | This class represent the translation from a specific service's result into a general Push result.
class IsPushResult a where
toPushResult :: a -> PushResult
instance IsPushResult GCMresult where
toPushResult r = def {
successful = HS.map GCM $ HS.fromList $ HM.keys $ messagesIds r
, failed = HM.fromList $ map (\(x,y) -> (GCM x,Left y)) (HM.toList $ errorRest r)
<> map (\x -> (GCM x,Left "UnregisteredError")) (HS.toList $ errorUnRegistered r)
<> map (\x -> (GCM x,Left "InternalError")) (HS.toList $ errorToReSend r)
, toResend = HS.map GCM $ errorToReSend r
, unRegistered = HS.map GCM $ errorUnRegistered r <> (HS.fromList . HM.keys . errorRest) r
I decide to unregister all regId with error different to Unregistered or Unavailable . ( errorRest )
-- Because these are non-recoverable error.
, newIds = HM.fromList $ map (\(x,y) -> (GCM x,GCM y)) $ HM.toList $ newRegids r
}
instance IsPushResult APNSresult where
toPushResult r = def {
successful = HS.map APNS $ successfulTokens r
, failed = HM.fromList $ map (\x -> (APNS x , Left "CommunicatingError")) $ HS.toList $ toReSendTokens r
, toResend = HS.map APNS $ toReSendTokens r
}
instance IsPushResult APNSFeedBackresult where
toPushResult r = def {
unRegistered = HS.fromList $ map APNS $ HM.keys $ unRegisteredTokens r
}
instance IsPushResult MPNSresult where
toPushResult r = let (successList,failureList) = partition ((== Just Received) . notificationStatus . snd ) $
HM.toList $ successfullResults r
in def {
successful = HS.fromList $ map (MPNS . fst) successList
, failed = (HM.fromList $ map (\(x,y) -> (MPNS x , Right y)) (HM.toList $ errorException r))
<> (HM.fromList $ map (\(x,y) -> (MPNS x , Left $ pack $ show $ notificationStatus y)) failureList)
, toResend = HS.map MPNS . HS.fromList . HM.keys . HM.filter error500 $ errorException r
, unRegistered = HS.map MPNS . HS.fromList . HM.keys . HM.filter ((== Just Expired) . subscriptionStatus) $ successfullResults r
} where
error500 :: SomeException -> Bool
error500 e = case (fromException e) :: Maybe HttpException of
Just (StatusCodeException status _ _) -> (statusCode status) >= 500
_ -> False
| null | https://raw.githubusercontent.com/MarcosPividori/push-notify/4c023c3fd731178d1d114774993a5e337225baa1/push-notify-general/Network/PushNotify/General/Types.hs | haskell | * Push Settings
* Push Manager
* Push Message
* Push Result
| Unique identifier of an app/device.
^ An iOS app.
| General notification to be sent.
| 'generalNotif' Builds a general notification from JSON data.
| Settings for GCM service.
^ Main configuration.
^ The callback function to be called when receiving messages from devices
(This means through the CCS connection or as POST requests
^ The callback function to be called when
a new device try to register on server.
^ The callback function to be called when a device unregisters.
^ The callback function to be called when a device's identifier changes.
in order to receive registrations and messages from devices as HTTP POST requests.
^ Ccs manager for communicating with GCM through Cloud Connection Server.
^ Main configuration.
^ Notifications that were successfully sent.
^ Notifications that were not successfully sent.
^ Failed notifications that you need to resend,
because servers were not available or there was a problem with the connection.
^ Set of unregistered devices.
^ Map of devices which have changed their identifiers. (old -> new)
| This class represent the translation from a specific service's result into a general Push result.
Because these are non-recoverable error. | # LANGUAGE OverloadedStrings , DeriveGeneric #
| This Module defines the main data types for the Push Service .
module Network.PushNotify.General.Types
Device(..)
, PushServiceConfig(..)
, RegisterResult(..)
, GCMConfig(..)
, PushConfig(..)
, PushManager(..)
, PushNotification(..)
, generalNotif
, PushResult(..)
, IsPushResult(..)
) where
import Network.PushNotify.Gcm
import Network.PushNotify.Apns
import Network.PushNotify.Mpns
import Network.PushNotify.Ccs
import Network.HTTP.Conduit
import Network.HTTP.Types.Status
import Control.Exception
import qualified Data.Map as M
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString as BS
import qualified Data.Text.Encoding as E
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import Data.Aeson
import Data.Default
import Data.Hashable
import Data.List
import Data.Monoid
import Data.Text (Text,pack)
import Text.XML
^ An Android app .
^ A WPhone app .
deriving (Show, Read, Eq)
instance Hashable Device where
hashWithSalt s (GCM n) = s `hashWithSalt`
(0::Int) `hashWithSalt` n
hashWithSalt s (MPNS n) = s `hashWithSalt`
(1::Int) `hashWithSalt` n
hashWithSalt s (APNS n) = s `hashWithSalt`
(2::Int) `hashWithSalt` n
data PushNotification = PushNotification {
apnsNotif :: Maybe APNSmessage
, gcmNotif :: Maybe GCMmessage
, mpnsNotif :: Maybe MPNSmessage
} deriving Show
instance Default PushNotification where
def = PushNotification Nothing Nothing Nothing
If data length exceeds 256 bytes ( max payload limit for APNS ) it will fails .
For MPNS , data will be XML - labeled as " .
generalNotif :: Object -> IO PushNotification
generalNotif dat = do
let msg = PushNotification {
apnsNotif = Just def{ rest = Just dat}
, gcmNotif = Just def{ data_object = Just dat}
, mpnsNotif = Just def{ restXML = Document (Prologue [] Nothing [])
(Element (Name "jsonData" Nothing Nothing)
M.empty
[NodeContent $ E.decodeUtf8 $
BS.concat . BL.toChunks $ encode dat])
[]
}
}
if ((BL.length . encode . apnsNotif) msg > 256)
then fail "Too long payload"
else return msg
data GCMConfig = Http GCMHttpConfig | Ccs GCMCcsConfig
| Main settings for the different Push Services . @Nothing@ means the service wo n't be used .
data PushConfig = PushConfig{
gcmConfig :: Maybe GCMConfig
, apnsConfig :: Maybe APNSConfig
, mpnsConfig :: Maybe MPNSConfig
}
instance Default PushConfig where
def = PushConfig Nothing Nothing Nothing
| ' RegisterResult ' represents the result of a device attempting to register
data RegisterResult = SuccessfulReg | ErrorReg Text
| Main settings for the Push Service .
data PushServiceConfig = PushServiceConfig {
to the Yesod subsite ) .
}
instance Default PushServiceConfig where
def = PushServiceConfig {
pushConfig = def
, newMessageCallback = \_ _ -> return ()
, newDeviceCallback = \_ _ -> return SuccessfulReg
, unRegisteredCallback = \_ -> return ()
, newIdCallback = \_ -> return ()
}
| Main manager for the Push Service .
This ' PushManager ' will be used to send notifications and also can be added as a subsite to a Yesod app
data PushManager = PushManager {
^ Conduit manager for sending push notifications though POST requests .
^ Apns manager for sending push notifications though APNS servers .
}
| PushResult represents a general result after communicating with a Push Server .
data PushResult = PushResult {
} deriving Show
instance Default PushResult where
def = PushResult HS.empty HM.empty HS.empty HS.empty HM.empty
instance Monoid PushResult where
mempty = def
mappend (PushResult a1 b1 c1 d1 e1)
(PushResult a2 b2 c2 d2 e2) = PushResult (HS.union a1 a2) (HM.union b1 b2)
(HS.union c1 c2) (HS.union d1 d2) (HM.union e1 e2)
class IsPushResult a where
toPushResult :: a -> PushResult
instance IsPushResult GCMresult where
toPushResult r = def {
successful = HS.map GCM $ HS.fromList $ HM.keys $ messagesIds r
, failed = HM.fromList $ map (\(x,y) -> (GCM x,Left y)) (HM.toList $ errorRest r)
<> map (\x -> (GCM x,Left "UnregisteredError")) (HS.toList $ errorUnRegistered r)
<> map (\x -> (GCM x,Left "InternalError")) (HS.toList $ errorToReSend r)
, toResend = HS.map GCM $ errorToReSend r
, unRegistered = HS.map GCM $ errorUnRegistered r <> (HS.fromList . HM.keys . errorRest) r
I decide to unregister all regId with error different to Unregistered or Unavailable . ( errorRest )
, newIds = HM.fromList $ map (\(x,y) -> (GCM x,GCM y)) $ HM.toList $ newRegids r
}
instance IsPushResult APNSresult where
toPushResult r = def {
successful = HS.map APNS $ successfulTokens r
, failed = HM.fromList $ map (\x -> (APNS x , Left "CommunicatingError")) $ HS.toList $ toReSendTokens r
, toResend = HS.map APNS $ toReSendTokens r
}
instance IsPushResult APNSFeedBackresult where
toPushResult r = def {
unRegistered = HS.fromList $ map APNS $ HM.keys $ unRegisteredTokens r
}
instance IsPushResult MPNSresult where
toPushResult r = let (successList,failureList) = partition ((== Just Received) . notificationStatus . snd ) $
HM.toList $ successfullResults r
in def {
successful = HS.fromList $ map (MPNS . fst) successList
, failed = (HM.fromList $ map (\(x,y) -> (MPNS x , Right y)) (HM.toList $ errorException r))
<> (HM.fromList $ map (\(x,y) -> (MPNS x , Left $ pack $ show $ notificationStatus y)) failureList)
, toResend = HS.map MPNS . HS.fromList . HM.keys . HM.filter error500 $ errorException r
, unRegistered = HS.map MPNS . HS.fromList . HM.keys . HM.filter ((== Just Expired) . subscriptionStatus) $ successfullResults r
} where
error500 :: SomeException -> Bool
error500 e = case (fromException e) :: Maybe HttpException of
Just (StatusCodeException status _ _) -> (statusCode status) >= 500
_ -> False
|
001056b5646aebf78a9ef750cb215e63ff39b8412b9b352f5954ed73459b8b04 | part-cw/lambdanative | graph-ccode.scm | ;; graph c code snippets
(c-declare #<<end-of-c-declare
#include <stdio.h>
#include <string.h>
#include <math.h>
#define TRUE 1
#define FALSE 0
#define DOWN 0
#define UP 1
#define EVEN 2
static char *graph_wc_linecomp(char *line, int Ept){
int count;
if (line[Ept-1] == '.'){ /* ERASE THE DECIMAL POINT */
count=Ept-1;
--Ept;
while (line[count] != '\0'){ /* Shift the string back by one */
line[count]= line[count+1];
++count;
}
}
if (line[Ept+1] == '+'){ /* ERASE the PLUS sign AFTER the E */
count=Ept+1;
--Ept;
while (line[count] != '\0'){ /* Shift the string back by one */
line[count]= line[count+1];
++count;
}
}
while (line[Ept+2] == '0'){ /* ERASE ANY UNNECESSARY ZEROES */
count=Ept+2;
while (line[count] != '\0'){ /* Shift the string back by one */
line[count]= line[count+1];
++count;
}
}
return( line );
}
static float graph_wc_ybottom(int multiplier, float cgymi, float cgldecy){
float bottom;
bottom= cgldecy*((float)multiplier)/10.;
while (bottom < cgymi) bottom *= 10.;
return(bottom);
}
static float graph_wc_xbottom(int multiplier, float cgxmi, float cgldecx){
float bottom;
bottom= cgldecx*((float)multiplier)/10.;
while (bottom < cgxmi) bottom *= 10.;
return(bottom);
}
static float graph_wc_trunc(float fvalue, int updown){
float decade,result;
double dublog;
dublog= log10(fvalue);
dublog= floor(dublog);
decade= pow(10.0, dublog);
result= fvalue/decade;
if (updown == UP)
result = ceil(result);
else if (updown == DOWN)
result = floor(result);
else result= floor(result + 0.5);
result *= decade;
return(result);
}
static int graph_findsigdec(float min, float range, int numsep, float ticks, int cgprecision, int cglinnumoff){
float var;
int expon,count,decpt,sigdec;
char line[30];
sigdec=0;
for (var= cglinnumoff*ticks;var<=range;var += numsep*ticks) {
if((var +min) < pow(10.0,(float)cgprecision))
sprintf(line, "%.*g", cgprecision, var+min);
else sprintf(line, "%.*e", cgprecision, var+min);
expon= FALSE;
count=0;
decpt=0;
while (line[count] != '\0') {
if (line[++count] == 'e' ) { /* search for exponent marker */
expon=TRUE;
}
if (line[count] == '.' ) /* locate decimal point */
decpt=count;
}
/ * is end of number * /
sigdec=((count-decpt-1)>sigdec)?(count-decpt-1):sigdec;
} /* end for() */
return(sigdec);
}
static char *graph_formaxnum(float value, int sigdec, int cgprecision){
static char line[30];
int expon, count, decpt, ept=0;
sprintf(line, "%#.*g", cgprecision, value);
/* convert number to string form */
expon= FALSE;
count=0;
decpt=0;
while (line[count] != '\0') {
if (line[++count] == 'e' ) {
expon=TRUE;
ept=count;
}
if (line[count] == '.' )
decpt=count;
}
if (expon == TRUE) { // need to clean up gcvt's exp notation
graph_wc_linecomp(line,ept);
count= -1;
while (line[++count] != '\0');
}
if(decpt) {
if (sigdec) {
if (sigdec<=cgprecision||expon) {
line[sigdec+decpt+1]='\0'; // original
} else {
line[cgprecision+decpt+1]='\0';
}
}
else line[decpt]='\0';
}
return line;
}
static char *graph_fix_string(char *text,int fix_minus){
char *sptr, *dptr;
char ch;
static char out[128];
sptr = text;
dptr = out;
while( (ch = *sptr++) != '\0'){
if( ch == '-'){
if(fix_minus){
/ * subst with octal 261 * /
*dptr++ = '2'; /* "-" --> "\261" */
*dptr++ = '6';
*dptr++ = '1';
}
}
else if( ch == '\\' || ch == '(' || ch == ')' ){
*dptr++ = '\\';
*dptr++ = ch;
}
else /* normal characters */
*dptr++ = ch; /* just copy */
}
*dptr++ = '\0'; /* string terminator */
return(out); /* return ptr to the fixed string */
}
end-of-c-declare
)
(define (graphaux:findsigdec arg1 arg2 arg3 arg4 arg5 arg6)
((c-lambda (float float int float int int) int "graph_findsigdec")
(flo arg1) (flo arg2) (fix arg3) (flo arg4) (fix arg5) (fix arg6)))
(define (graphaux:formaxnum arg1 arg2 arg3)
((c-lambda (float int int) char-string "graph_formaxnum") (flo arg1) (fix arg2) (fix arg3)))
(define (graphaux:fixstring arg1 arg2)
((c-lambda (char-string int) char-string "graph_fix_string") arg1 (fix arg2)))
(define (graphaux:wctrunc arg1 arg2)
((c-lambda (float int) float "graph_wc_trunc") (flo arg1) (fix arg2)))
(define (graphaux:wcxbottom arg1 arg2 arg3)
((c-lambda (int float float) float "graph_wc_xbottom") (fix arg1) (flo arg2) (flo arg3)))
(define (graphaux:wcybottom arg1 arg2 arg3)
((c-lambda (int float float) float "graph_wc_ybottom") (fix arg1) (flo arg2) (flo arg3)))
eof
| null | https://raw.githubusercontent.com/part-cw/lambdanative/74ec19dddf2f2ff787ee70ad677bc13b9dfafc29/modules/graph/graph-ccode.scm | scheme | graph c code snippets
var<=range;var += numsep*ticks) {
// original
/* "-" --> "\261" */
/* just copy */
/* string terminator */
/* return ptr to the fixed string */ | (c-declare #<<end-of-c-declare
#include <stdio.h>
#include <string.h>
#include <math.h>
#define TRUE 1
#define FALSE 0
#define DOWN 0
#define UP 1
#define EVEN 2
static char *graph_wc_linecomp(char *line, int Ept){
if (line[Ept-1] == '.'){ /* ERASE THE DECIMAL POINT */
while (line[count] != '\0'){ /* Shift the string back by one */
}
}
if (line[Ept+1] == '+'){ /* ERASE the PLUS sign AFTER the E */
while (line[count] != '\0'){ /* Shift the string back by one */
}
}
while (line[Ept+2] == '0'){ /* ERASE ANY UNNECESSARY ZEROES */
while (line[count] != '\0'){ /* Shift the string back by one */
}
}
}
static float graph_wc_ybottom(int multiplier, float cgymi, float cgldecy){
}
static float graph_wc_xbottom(int multiplier, float cgxmi, float cgldecx){
}
static float graph_wc_trunc(float fvalue, int updown){
if (updown == UP)
else if (updown == DOWN)
}
static int graph_findsigdec(float min, float range, int numsep, float ticks, int cgprecision, int cglinnumoff){
if((var +min) < pow(10.0,(float)cgprecision))
while (line[count] != '\0') {
if (line[++count] == 'e' ) { /* search for exponent marker */
}
if (line[count] == '.' ) /* locate decimal point */
}
/ * is end of number * /
} /* end for() */
}
static char *graph_formaxnum(float value, int sigdec, int cgprecision){
/* convert number to string form */
while (line[count] != '\0') {
if (line[++count] == 'e' ) {
}
if (line[count] == '.' )
}
if (expon == TRUE) { // need to clean up gcvt's exp notation
}
if(decpt) {
if (sigdec) {
if (sigdec<=cgprecision||expon) {
} else {
}
}
}
}
static char *graph_fix_string(char *text,int fix_minus){
while( (ch = *sptr++) != '\0'){
if( ch == '-'){
if(fix_minus){
/ * subst with octal 261 * /
}
}
else if( ch == '\\' || ch == '(' || ch == ')' ){
}
else /* normal characters */
}
}
end-of-c-declare
)
(define (graphaux:findsigdec arg1 arg2 arg3 arg4 arg5 arg6)
((c-lambda (float float int float int int) int "graph_findsigdec")
(flo arg1) (flo arg2) (fix arg3) (flo arg4) (fix arg5) (fix arg6)))
(define (graphaux:formaxnum arg1 arg2 arg3)
((c-lambda (float int int) char-string "graph_formaxnum") (flo arg1) (fix arg2) (fix arg3)))
(define (graphaux:fixstring arg1 arg2)
((c-lambda (char-string int) char-string "graph_fix_string") arg1 (fix arg2)))
(define (graphaux:wctrunc arg1 arg2)
((c-lambda (float int) float "graph_wc_trunc") (flo arg1) (fix arg2)))
(define (graphaux:wcxbottom arg1 arg2 arg3)
((c-lambda (int float float) float "graph_wc_xbottom") (fix arg1) (flo arg2) (flo arg3)))
(define (graphaux:wcybottom arg1 arg2 arg3)
((c-lambda (int float float) float "graph_wc_ybottom") (fix arg1) (flo arg2) (flo arg3)))
eof
|
957cf1e9a4b008052b5fc0f253b6a75489928f535a888cb67250b882f0e21995 | lspitzner/brittany | Test501.hs | -- brittany { lconfig_columnAlignMode: { tag: ColumnAlignModeDisabled }, lconfig_indentPolicy: IndentPolicyLeft }
func = do
Foo
{ _lstate_indent = _lstate_indent lkasdlkjalsdjlakjsdlkjasldkjalskdjlkajsd
, _lstate_foo = _lstate_foo lkasdlkjalsdjlakjsdlkjasldkjalskdjlkajsd
}
| null | https://raw.githubusercontent.com/lspitzner/brittany/a15eed5f3608bf1fa7084fcf008c6ecb79542562/data/Test501.hs | haskell | brittany { lconfig_columnAlignMode: { tag: ColumnAlignModeDisabled }, lconfig_indentPolicy: IndentPolicyLeft } | func = do
Foo
{ _lstate_indent = _lstate_indent lkasdlkjalsdjlakjsdlkjasldkjalskdjlkajsd
, _lstate_foo = _lstate_foo lkasdlkjalsdjlakjsdlkjasldkjalskdjlkajsd
}
|
90dd471c754c0b3fa092ab1b22304a1066431a306881222c488dc5c157ebb305 | lingnand/VIMonad | InsertPosition.hs | module XMonad.Vim.InsertPosition
(
InsertPosition(..)
, insertPositionKeys
) where
data InsertPosition = Before | After | Head | Last deriving Eq
instance Show InsertPosition where
show Before = "insert before"
show After = "insert after"
show Head = "insert head"
show Last = "insert last"
insertPositionKeys = [ ("i" , Before)
, ("S-i", Head)
, ("a" , After)
, ("S-a", Last) ]
| null | https://raw.githubusercontent.com/lingnand/VIMonad/048e419fc4ef57a5235dbaeef8890faf6956b574/XMonadContrib/XMonad/Vim/InsertPosition.hs | haskell | module XMonad.Vim.InsertPosition
(
InsertPosition(..)
, insertPositionKeys
) where
data InsertPosition = Before | After | Head | Last deriving Eq
instance Show InsertPosition where
show Before = "insert before"
show After = "insert after"
show Head = "insert head"
show Last = "insert last"
insertPositionKeys = [ ("i" , Before)
, ("S-i", Head)
, ("a" , After)
, ("S-a", Last) ]
| |
335e422f17b1001e5675de79db777d44091e2075f4dee76f3784a1f1eee24857 | hannesm/gmap | gmap.ml | ( c ) 2017 , 2018 , all rights reserved
this code would n't exist without , thanks for the help !
module Order = struct
type (_,_) t =
| Lt : ('a, 'b) t
| Eq : ('a, 'a) t
| Gt : ('a, 'b) t
end
module type KEY = sig
type _ t
val compare : 'a t -> 'b t -> ('a, 'b) Order.t
end
module type S = sig
type 'a key
type t
val empty : t
val singleton : 'a key -> 'a -> t
val is_empty : t -> bool
val cardinal : t -> int
val mem : 'a key -> t -> bool
val find : 'a key -> t -> 'a option
val get : 'a key -> t -> 'a
val add_unless_bound : 'a key -> 'a -> t -> t option
val add : 'a key -> 'a -> t -> t
val remove : 'a key -> t -> t
val update : 'a key -> ('a option -> 'a option) -> t -> t
type b = B : 'a key * 'a -> b
val min_binding : t -> b option
val max_binding : t -> b option
val any_binding : t -> b option
val bindings : t -> b list
type eq = { f : 'a . 'a key -> 'a -> 'a -> bool }
val equal : eq -> t -> t -> bool
type mapper = { f : 'a. 'a key -> 'a -> 'a }
val map : mapper -> t -> t
val iter : (b -> unit) -> t -> unit
val fold : (b -> 'a -> 'a) -> t -> 'a -> 'a
val for_all : (b -> bool) -> t -> bool
val exists : (b -> bool) -> t -> bool
val filter : (b -> bool) -> t -> t
type 'a fold2 = { f : 'b. 'b key -> 'b option -> 'b option -> 'a -> 'a }
val fold2 : 'a fold2 -> t -> t -> 'a -> 'a
type merger = { f : 'a. 'a key -> 'a option -> 'a option -> 'a option }
val merge : merger -> t -> t -> t
type unionee = { f : 'a. 'a key -> 'a -> 'a -> 'a option }
val union : unionee -> t -> t -> t
end
module Make (Key : KEY) : S with type 'a key = 'a Key.t = struct
type 'a key = 'a Key.t
type k = K : 'a key -> k
type b = B : 'a key * 'a -> b
module M = Map.Make(struct
type t = k
let compare (K a) (K b) = match Key.compare a b with
| Order.Lt -> -1 | Order.Eq -> 0 | Order.Gt -> 1
end)
type t = b M.t
let empty = M.empty
let singleton k v = M.singleton (K k) (B (k, v))
let is_empty = M.is_empty
let mem k m = M.mem (K k) m
let add k v m = M.add (K k) (B (k, v)) m
let add_unless_bound k v m = if mem k m then None else Some (add k v m)
let remove k m = M.remove (K k) m
let get : type a. a key -> t -> a = fun k m ->
match M.find (K k) m with
| B (k', v) ->
TODO this compare ( and further below similar ones ) is only needed for
the type checker ( to get the k = k ' proof ) , because the invariant
foreach k . t [ K k ] = B ( k ' , v ) - > k = k ' is preserved by this library
it could be replaced by :
- Obj.magic
- vendor and slight modification of . Map
- using integers as key - > compare can be a single instruction
Stay better safe than sorry ( at least for now )
the type checker (to get the k = k' proof), because the invariant
foreach k . t [K k] = B (k', v) -> k = k' is preserved by this library
it could be replaced by:
- Obj.magic
- vendor and slight modification of Stdlib.Map
- using integers as key -> compare can be a single instruction
Stay better safe than sorry (at least for now) *)
match Key.compare k k' with
| Order.Eq -> v
| _ -> assert false
let find : type a. a key -> t -> a option = fun k m ->
try Some (get k m) with Not_found -> None
let update k f m =
match f (find k m) with
| None -> remove k m
| Some v -> add k v m
let any_binding m = try Some (snd (M.choose m)) with Not_found -> None
let min_binding m = try Some (snd (M.min_binding m)) with Not_found -> None
let max_binding m = try Some (snd (M.max_binding m)) with Not_found -> None
let bindings m = snd (List.split (M.bindings m))
let cardinal m = M.cardinal m
let for_all p m = M.for_all (fun _ b -> p b) m
let exists p m = M.exists (fun _ b -> p b) m
let iter f m = M.iter (fun _ b -> f b) m
let fold f m acc = M.fold (fun _ b acc -> f b acc) m acc
let filter p m = M.filter (fun _ b -> p b) m
type mapper = { f : 'a. 'a key -> 'a -> 'a }
let map f m = M.map (fun (B (k, v)) -> B (k, f.f k v)) m
type merger = { f : 'a. 'a key -> 'a option -> 'a option -> 'a option }
let merge f m m' =
let callf : type x y. x key -> x option -> y key -> y option -> b option =
fun k v k' v' ->
see above comment in get about this useless Key.compare
match Key.compare k k' with
| Order.Eq ->
(match f.f k v v' with
| None -> None
| Some v'' -> Some (B (k, v'')))
| _ -> assert false
in
M.merge (fun (K key) b b' ->
match b, b' with
(* Map.merge never calls f None None, just for the types *)
| None, None -> None
| None, Some B (k', v') -> callf key None k' (Some v')
| Some B (k, v), None -> callf k (Some v) key None
| Some B (k, v), Some B (k', v') -> callf k (Some v) k' (Some v')
)
m m'
type 'a fold2 = { f : 'b. 'b key -> 'b option -> 'b option -> 'a -> 'a }
let fold2 f m m' acc =
let local = ref acc in
let f k v1 v2 = local := f.f k v1 v2 !local; None in
ignore (merge { f } m m');
!local
type unionee = { f : 'a. 'a key -> 'a -> 'a -> 'a option }
let union f m m' =
M.union
(fun (K k) (B (k', v)) (B (k'', v')) ->
(* see above comment about compare *)
match Key.compare k k', Key.compare k k'' with
| Order.Eq, Order.Eq ->
(match f.f k v v' with None -> None | Some v'' -> Some (B (k, v'')))
| _ -> assert false)
m m'
type eq = { f : 'a . 'a key -> 'a -> 'a -> bool }
let equal cmp m m' =
M.equal (fun (B (k, v)) (B (k', v')) ->
(* see above comment about compare *)
match Key.compare k k' with
| Order.Eq -> cmp.f k v v'
| _ -> assert false)
m m'
end
| null | https://raw.githubusercontent.com/hannesm/gmap/10496da1ff70c5872fc5c7e28ee8cb73720dfae6/gmap.ml | ocaml | Map.merge never calls f None None, just for the types
see above comment about compare
see above comment about compare | ( c ) 2017 , 2018 , all rights reserved
this code would n't exist without , thanks for the help !
module Order = struct
type (_,_) t =
| Lt : ('a, 'b) t
| Eq : ('a, 'a) t
| Gt : ('a, 'b) t
end
module type KEY = sig
type _ t
val compare : 'a t -> 'b t -> ('a, 'b) Order.t
end
module type S = sig
type 'a key
type t
val empty : t
val singleton : 'a key -> 'a -> t
val is_empty : t -> bool
val cardinal : t -> int
val mem : 'a key -> t -> bool
val find : 'a key -> t -> 'a option
val get : 'a key -> t -> 'a
val add_unless_bound : 'a key -> 'a -> t -> t option
val add : 'a key -> 'a -> t -> t
val remove : 'a key -> t -> t
val update : 'a key -> ('a option -> 'a option) -> t -> t
type b = B : 'a key * 'a -> b
val min_binding : t -> b option
val max_binding : t -> b option
val any_binding : t -> b option
val bindings : t -> b list
type eq = { f : 'a . 'a key -> 'a -> 'a -> bool }
val equal : eq -> t -> t -> bool
type mapper = { f : 'a. 'a key -> 'a -> 'a }
val map : mapper -> t -> t
val iter : (b -> unit) -> t -> unit
val fold : (b -> 'a -> 'a) -> t -> 'a -> 'a
val for_all : (b -> bool) -> t -> bool
val exists : (b -> bool) -> t -> bool
val filter : (b -> bool) -> t -> t
type 'a fold2 = { f : 'b. 'b key -> 'b option -> 'b option -> 'a -> 'a }
val fold2 : 'a fold2 -> t -> t -> 'a -> 'a
type merger = { f : 'a. 'a key -> 'a option -> 'a option -> 'a option }
val merge : merger -> t -> t -> t
type unionee = { f : 'a. 'a key -> 'a -> 'a -> 'a option }
val union : unionee -> t -> t -> t
end
module Make (Key : KEY) : S with type 'a key = 'a Key.t = struct
type 'a key = 'a Key.t
type k = K : 'a key -> k
type b = B : 'a key * 'a -> b
module M = Map.Make(struct
type t = k
let compare (K a) (K b) = match Key.compare a b with
| Order.Lt -> -1 | Order.Eq -> 0 | Order.Gt -> 1
end)
type t = b M.t
let empty = M.empty
let singleton k v = M.singleton (K k) (B (k, v))
let is_empty = M.is_empty
let mem k m = M.mem (K k) m
let add k v m = M.add (K k) (B (k, v)) m
let add_unless_bound k v m = if mem k m then None else Some (add k v m)
let remove k m = M.remove (K k) m
let get : type a. a key -> t -> a = fun k m ->
match M.find (K k) m with
| B (k', v) ->
TODO this compare ( and further below similar ones ) is only needed for
the type checker ( to get the k = k ' proof ) , because the invariant
foreach k . t [ K k ] = B ( k ' , v ) - > k = k ' is preserved by this library
it could be replaced by :
- Obj.magic
- vendor and slight modification of . Map
- using integers as key - > compare can be a single instruction
Stay better safe than sorry ( at least for now )
the type checker (to get the k = k' proof), because the invariant
foreach k . t [K k] = B (k', v) -> k = k' is preserved by this library
it could be replaced by:
- Obj.magic
- vendor and slight modification of Stdlib.Map
- using integers as key -> compare can be a single instruction
Stay better safe than sorry (at least for now) *)
match Key.compare k k' with
| Order.Eq -> v
| _ -> assert false
let find : type a. a key -> t -> a option = fun k m ->
try Some (get k m) with Not_found -> None
let update k f m =
match f (find k m) with
| None -> remove k m
| Some v -> add k v m
let any_binding m = try Some (snd (M.choose m)) with Not_found -> None
let min_binding m = try Some (snd (M.min_binding m)) with Not_found -> None
let max_binding m = try Some (snd (M.max_binding m)) with Not_found -> None
let bindings m = snd (List.split (M.bindings m))
let cardinal m = M.cardinal m
let for_all p m = M.for_all (fun _ b -> p b) m
let exists p m = M.exists (fun _ b -> p b) m
let iter f m = M.iter (fun _ b -> f b) m
let fold f m acc = M.fold (fun _ b acc -> f b acc) m acc
let filter p m = M.filter (fun _ b -> p b) m
type mapper = { f : 'a. 'a key -> 'a -> 'a }
let map f m = M.map (fun (B (k, v)) -> B (k, f.f k v)) m
type merger = { f : 'a. 'a key -> 'a option -> 'a option -> 'a option }
let merge f m m' =
let callf : type x y. x key -> x option -> y key -> y option -> b option =
fun k v k' v' ->
see above comment in get about this useless Key.compare
match Key.compare k k' with
| Order.Eq ->
(match f.f k v v' with
| None -> None
| Some v'' -> Some (B (k, v'')))
| _ -> assert false
in
M.merge (fun (K key) b b' ->
match b, b' with
| None, None -> None
| None, Some B (k', v') -> callf key None k' (Some v')
| Some B (k, v), None -> callf k (Some v) key None
| Some B (k, v), Some B (k', v') -> callf k (Some v) k' (Some v')
)
m m'
type 'a fold2 = { f : 'b. 'b key -> 'b option -> 'b option -> 'a -> 'a }
let fold2 f m m' acc =
let local = ref acc in
let f k v1 v2 = local := f.f k v1 v2 !local; None in
ignore (merge { f } m m');
!local
type unionee = { f : 'a. 'a key -> 'a -> 'a -> 'a option }
let union f m m' =
M.union
(fun (K k) (B (k', v)) (B (k'', v')) ->
match Key.compare k k', Key.compare k k'' with
| Order.Eq, Order.Eq ->
(match f.f k v v' with None -> None | Some v'' -> Some (B (k, v'')))
| _ -> assert false)
m m'
type eq = { f : 'a . 'a key -> 'a -> 'a -> bool }
let equal cmp m m' =
M.equal (fun (B (k, v)) (B (k', v')) ->
match Key.compare k k' with
| Order.Eq -> cmp.f k v v'
| _ -> assert false)
m m'
end
|
5e62388041a42a02037d75f85c2cc5b318268b0245cd1d399cce7f865f49b603 | alavrik/piqi-erlang | piqic_erlang_out.erl | Copyright 2009 , 2010 , 2011 , 2012 , 2013 , 2014
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
generation of gen_*/1,2,3 functions for Protocol Buffers , JSON , XML and Piq
% deserialization
-module(piqic_erlang_out).
-compile([export_all, nowarn_export_all]).
-include("piqic.hrl").
-define(DEBUG,1).
-include("debug.hrl").
gen_piqi(Context) ->
Piqi = Context#context.piqi,
gen_typedefs(Context, Piqi#piqi.typedef).
gen_typedefs(Context, Typedefs) ->
iod("\n\n",
[gen_typedef(Context, X) || X <- Typedefs]
++
[gen_typedef_1(Context, X) || X <- Typedefs]
++
case piqic:is_protobuf_only(Context) of
true -> [];
false ->
[gen_typedef_multiformat(Context, X) || X <- Typedefs]
end
).
% field serializer
gen_typedef(Context, Typedef = {Type, X}) ->
Spec = gen_spec(Context, Typedef),
GenFun =
case Type of
piqi_record ->
gen_record(Context, X);
variant ->
gen_variant(Context, X);
piqi_list ->
gen_list(Context, X);
enum ->
gen_enum(Context, X);
alias ->
gen_alias(Context, X)
end,
[
Spec, "\n",
GenFun
].
Protobuf serializer
gen_typedef_1(Context, Typedef) ->
Spec = gen_spec_1(Context, Typedef),
ErlName = typedef_erlname(Typedef),
[
Spec, "\n",
"gen_", ErlName, "(X) ->\n",
" ", "field_gen_", ErlName, "('undefined', X).\n"
].
gen_spec(Context, Typedef) ->
[
"-spec field_gen_", typedef_erlname(Typedef), "(",
"Code :: piqirun_code(), "
"X :: ", gen_output_type_name(Context, Typedef), ") -> iolist().\n"
].
generate gen_<name>/1 spec
gen_spec_1(Context, Typedef) ->
[
"-spec gen_", typedef_erlname(Typedef), "(",
"X :: ", gen_output_type_name(Context, Typedef), ") -> iolist().\n"
].
gen_output_type_name(Context, Typedef) ->
piqic_erlang_types:gen_out_typedef_type(Context, Typedef).
% mutliformat serializers: gen_*/2, gen_*/3
% TODO: generate -specs
gen_typedef_multiformat(Context, Typedef) ->
ScopedName = piqic:typedef_scoped_name(Context, Typedef),
ErlName = typedef_erlname(Typedef),
[
gen_typedef_2(ScopedName, ErlName),
"\n\n",
gen_typedef_3(ScopedName, ErlName)
].
mutliformat serializer
gen_typedef_2(_Name, ErlName) ->
[
"gen_", ErlName, "(X, Format) ->\n",
" ", "gen_", ErlName, "(X, Format, []).\n"
].
mutliformat serializer with additional options
gen_typedef_3(Name, ErlName) ->
[
"gen_", ErlName, "(X, Format, Options) ->\n",
" Iolist = gen_", ErlName, "(X),\n",
" ", gen_convert(Name, "'pb'", "Format", "iolist_to_binary(Iolist), Options"), ".\n"
].
gen_convert(ScopedName, InputFormat, OutputFormat, Data) ->
[
"piqirun_ext:convert(?MODULE, ",
iod(", ", [
["<<\"", ScopedName, "\">>"], InputFormat, OutputFormat, Data
]),
")"
].
gen_alias(Context, X) ->
case piqic:can_be_protobuf_packed(Context, {alias, X}) of
false ->
gen_unpacked_alias(Context, X);
true ->
if a value can be packed , we need to generate two functions : one
for generating regular ( unpacked ) representation , and another one
% for generating packed form
iod("\n\n", [
gen_unpacked_alias(Context, X),
gen_packed_alias(Context, X)
])
end.
gen_unpacked_alias(Context, X) ->
[
"field_gen_", X#alias.erlang_name, "(Code, X) ->\n"
" ",
gen_alias_type(Context, X, X#alias.protobuf_wire_type, _IsPacked = false),
"(Code, ",
piqic:gen_convert_value(Context, X#alias.erlang_type, "_to_", X#alias.type, "X"),
").\n"
].
gen_packed_alias(Context, X) ->
packed_field_gen _ * has arity 1 , because values of such fields can
% not be encoded independently: all values for a repeated packed field must
% be encoded all at once
[
"packed_field_gen_", X#alias.erlang_name, "(X) ->\n"
" ",
gen_alias_type(Context, X, X#alias.protobuf_wire_type, _IsPacked = true),
"(",
piqic:gen_convert_value(Context, X#alias.erlang_type, "_to_", X#alias.type, "X"),
").\n"
].
gen_list(Context, X) ->
IsPacked = X#piqi_list.protobuf_packed,
PackedPrefix = ?if_true(IsPacked, "packed_", ""),
TypeName = X#piqi_list.type,
[
"field_gen_", X#piqi_list.erlang_name, "(Code, X) ->\n",
" ", ?PIQIRUN, "gen_", PackedPrefix, "list(Code, ",
"fun ", gen_type(Context, TypeName, IsPacked), "/", ?if_true(IsPacked, "1", "2"), % arity
", X).\n"
].
gen_enum(Context, X) ->
generate two functions : one for parsing normal value ; another one -- for
% packed value
iod("\n\n", [
gen_unpacked_enum(Context, X),
gen_packed_enum(Context, X)
]).
gen_unpacked_enum(Context, X) ->
Consts = gen_consts(X#enum.option),
[
"field_gen_", X#enum.erlang_name, "(Code, X) ->\n",
" ", ?PIQIRUN, "integer_to_signed_varint(Code,\n",
Consts,
" ).\n"
].
gen_packed_enum(Context, X) ->
Consts = gen_consts(X#enum.option),
[
"packed_field_gen_", X#enum.erlang_name, "(X) ->\n",
" ", ?PIQIRUN, "integer_to_packed_signed_varint(\n",
Consts,
" ).\n"
].
gen_consts(Consts) ->
Clauses = [gen_const(C) || C <- Consts],
[
" ", "case X of\n",
" ", iod(";\n ", Clauses), "\n"
" ", "end\n"
].
gen_const(X) ->
[
X#option.erlang_name, " -> ", gen_code(X#option.code)
].
gen_variant(Context, X) ->
L = [gen_option(Context, O) || O <- X#variant.option],
Options = lists:append(L), % flatten
[
"field_gen_", X#variant.erlang_name, "(Code, X) ->\n",
" ", ?PIQIRUN, "gen_variant(Code,\n",
" ", "case X of\n",
" ", iod(";\n ", Options), "\n"
" ", "end\n"
" ).\n"
].
gen_option(Context, X) ->
gen_option(Context, X, _OuterOption = 'undefined').
gen_option(Context, X, OuterOption) ->
Name = erlname_of_option(Context, X),
Code = gen_code(X#option.code),
case X#option.type of
'undefined' ->
case OuterOption =/= 'undefined' of
true ->
gen_inner_option(Name, OuterOption);
false ->
Clause = [
Name, " -> ", ?PIQIRUN, "gen_bool_field(", Code, ", true)"
],
[Clause]
end;
TypeName ->
{ParentPiqi, Typedef} = resolve_type_name(Context, TypeName),
case Typedef of
{Type, Y} when X#option.erlang_name =:= 'undefined', (Type =:= variant orelse Type =:= enum) ->
% handle variant and enum subtyping cases by lifting their
% labels and clauses to the top level
Options =
case Type of
variant -> Y#variant.option;
enum -> Y#enum.option
end,
% recursively generate cases from "included" variants and
% enums
OuterOption2 = ?defined(OuterOption, {Context, X}),
ParentContext = piqic:switch_context(Context, ParentPiqi),
L = [gen_option(ParentContext, O, OuterOption2) || O <- Options],
lists:append(L); % flatten
_ ->
% general case
case OuterOption =/= 'undefined' of
true ->
Pattern = ["{", Name, ", _}"],
gen_inner_option(Pattern, OuterOption);
false ->
Res = [
"{", Name, ", Y} -> ",
gen_type(Context, TypeName), "(", Code, ", Y)"
],
[Res]
end
end
end.
gen_inner_option(Pattern, {Context, X}) ->
Code = gen_code(X#option.code),
Clause = [
Pattern, " -> ", gen_type(Context, X#option.type), "(", Code, ", X)"
],
[Clause].
gen_record(Context, X) ->
% order fields by their codes
Fields = lists:sort(
fun (A, B) -> A#field.code =< B#field.code end,
X#piqi_record.field
),
Name = X#piqi_record.erlang_name,
ScopedName = piqic:scoped_erlname(Context, Name),
UnknownFields =
case piqic:get_option(Context, gen_preserve_unknown_fields) of
false -> [];
true ->
["[", ?PIQIRUN, "gen_parsed_field(F) || F <- X#", ScopedName, ".piqi_unknown_pb]"]
end,
GeneratorsCode =
case Fields of
[] when UnknownFields =:= [] ->
"[]";
[] ->
UnknownFields;
_ ->
FieldGenerators = [gen_field(Context, ScopedName, F) || F <- Fields],
case UnknownFields of
[] ->
[
"[\n",
" ", iod(",\n ", FieldGenerators), "\n",
" ", "]"
];
_ ->
[
"[\n",
" ", iod(",\n ", FieldGenerators), "\n",
" |", UnknownFields, "]"
]
end
end,
prevent Erlang warning on unused variable
ArgVariable =
case Fields of
[] when UnknownFields =:= [] ->
["#", ScopedName, "{}"];
_ ->
"X"
end,
[
"field_gen_", Name, "(Code, ", ArgVariable, ") ->\n",
" ", ?PIQIRUN, "gen_record(Code, ", GeneratorsCode, ").\n"
].
gen_field(Context, RecordName, X) ->
Name = erlname_of_field(Context, X),
ScopedName = [
"X#", RecordName, ".", Name
],
Mode = piqic:gen_field_mode(X),
Code = gen_code(X#field.code),
IsPacked = X#field.protobuf_packed,
case X#field.type of
% TODO: remove eventually -- keeping for backward compatibility with
older piqi which expects flags to only be true if present , and never
% false; see piqic:transform_flag(X) for details
'undefined' -> % flag, i.e. field w/o type
[
?PIQIRUN, "gen_flag(", Code, ", ", ScopedName, ")"
];
TypeName ->
[
?PIQIRUN, "gen_", Mode, "_field(", Code, ", ",
"fun ", gen_type(Context, TypeName, IsPacked), "/", ?if_true(IsPacked, "1", "2"), % arity
", ",
ScopedName,
")"
]
end.
gen_type(Context, TypeName) ->
gen_type(Context, TypeName, _IsPacked = false).
gen_type(Context, TypeName, IsPacked) ->
{ParentPiqi, Typedef} = resolve_type_name(Context, TypeName),
ParentMod = piqic:gen_parent_mod(Context, ParentPiqi),
PackedPrefix = ?if_true(IsPacked, "packed_", ""),
[
ParentMod, PackedPrefix, "field_gen_", typedef_erlname(Typedef)
].
gen_alias_type(Context, Alias, WireType, IsPacked) ->
case Alias#alias.type of
'undefined' -> % we are dealing with built-in type
gen_builtin_type(Context,
Alias#alias.piqi_type,
Alias#alias.erlang_type,
WireType, IsPacked);
TypeName ->
{ParentPiqi, Typedef} = resolve_type_name(Context, TypeName),
case Typedef of
{alias, A} when WireType =/= 'undefined' ->
% need special handing in case when higher-level alias
% overrides protobuf_wire_type
ParentContext = piqic:switch_context(Context, ParentPiqi),
gen_alias_type(ParentContext, A, WireType, IsPacked);
_ ->
gen_type(Context, TypeName, IsPacked)
end
end.
gen_builtin_type(Context, PiqiType, ErlType, WireType, IsPacked) ->
case PiqiType of
any ->
"field_gen_piqi_any";
_ ->
PackedPrefix = ?if_true(IsPacked, "packed_", ""),
TypeName = piqic:gen_builtin_type_name(PiqiType, ErlType),
WireTypeName = piqic:gen_wire_type_name(PiqiType, WireType),
[
?PIQIRUN, TypeName, "_to_", PackedPrefix, WireTypeName
]
end.
| null | https://raw.githubusercontent.com/alavrik/piqi-erlang/063ecc4f8fd54543acf01953b0a63b2b7ebf17a9/src/piqic_erlang_out.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
deserialization
field serializer
mutliformat serializers: gen_*/2, gen_*/3
TODO: generate -specs
for generating packed form
not be encoded independently: all values for a repeated packed field must
be encoded all at once
arity
packed value
flatten
handle variant and enum subtyping cases by lifting their
labels and clauses to the top level
recursively generate cases from "included" variants and
enums
flatten
general case
order fields by their codes
TODO: remove eventually -- keeping for backward compatibility with
false; see piqic:transform_flag(X) for details
flag, i.e. field w/o type
arity
we are dealing with built-in type
need special handing in case when higher-level alias
overrides protobuf_wire_type | Copyright 2009 , 2010 , 2011 , 2012 , 2013 , 2014
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
generation of gen_*/1,2,3 functions for Protocol Buffers , JSON , XML and Piq
-module(piqic_erlang_out).
-compile([export_all, nowarn_export_all]).
-include("piqic.hrl").
-define(DEBUG,1).
-include("debug.hrl").
gen_piqi(Context) ->
Piqi = Context#context.piqi,
gen_typedefs(Context, Piqi#piqi.typedef).
gen_typedefs(Context, Typedefs) ->
iod("\n\n",
[gen_typedef(Context, X) || X <- Typedefs]
++
[gen_typedef_1(Context, X) || X <- Typedefs]
++
case piqic:is_protobuf_only(Context) of
true -> [];
false ->
[gen_typedef_multiformat(Context, X) || X <- Typedefs]
end
).
gen_typedef(Context, Typedef = {Type, X}) ->
Spec = gen_spec(Context, Typedef),
GenFun =
case Type of
piqi_record ->
gen_record(Context, X);
variant ->
gen_variant(Context, X);
piqi_list ->
gen_list(Context, X);
enum ->
gen_enum(Context, X);
alias ->
gen_alias(Context, X)
end,
[
Spec, "\n",
GenFun
].
Protobuf serializer
gen_typedef_1(Context, Typedef) ->
Spec = gen_spec_1(Context, Typedef),
ErlName = typedef_erlname(Typedef),
[
Spec, "\n",
"gen_", ErlName, "(X) ->\n",
" ", "field_gen_", ErlName, "('undefined', X).\n"
].
gen_spec(Context, Typedef) ->
[
"-spec field_gen_", typedef_erlname(Typedef), "(",
"Code :: piqirun_code(), "
"X :: ", gen_output_type_name(Context, Typedef), ") -> iolist().\n"
].
generate gen_<name>/1 spec
gen_spec_1(Context, Typedef) ->
[
"-spec gen_", typedef_erlname(Typedef), "(",
"X :: ", gen_output_type_name(Context, Typedef), ") -> iolist().\n"
].
gen_output_type_name(Context, Typedef) ->
piqic_erlang_types:gen_out_typedef_type(Context, Typedef).
gen_typedef_multiformat(Context, Typedef) ->
ScopedName = piqic:typedef_scoped_name(Context, Typedef),
ErlName = typedef_erlname(Typedef),
[
gen_typedef_2(ScopedName, ErlName),
"\n\n",
gen_typedef_3(ScopedName, ErlName)
].
mutliformat serializer
gen_typedef_2(_Name, ErlName) ->
[
"gen_", ErlName, "(X, Format) ->\n",
" ", "gen_", ErlName, "(X, Format, []).\n"
].
mutliformat serializer with additional options
gen_typedef_3(Name, ErlName) ->
[
"gen_", ErlName, "(X, Format, Options) ->\n",
" Iolist = gen_", ErlName, "(X),\n",
" ", gen_convert(Name, "'pb'", "Format", "iolist_to_binary(Iolist), Options"), ".\n"
].
gen_convert(ScopedName, InputFormat, OutputFormat, Data) ->
[
"piqirun_ext:convert(?MODULE, ",
iod(", ", [
["<<\"", ScopedName, "\">>"], InputFormat, OutputFormat, Data
]),
")"
].
gen_alias(Context, X) ->
case piqic:can_be_protobuf_packed(Context, {alias, X}) of
false ->
gen_unpacked_alias(Context, X);
true ->
if a value can be packed , we need to generate two functions : one
for generating regular ( unpacked ) representation , and another one
iod("\n\n", [
gen_unpacked_alias(Context, X),
gen_packed_alias(Context, X)
])
end.
gen_unpacked_alias(Context, X) ->
[
"field_gen_", X#alias.erlang_name, "(Code, X) ->\n"
" ",
gen_alias_type(Context, X, X#alias.protobuf_wire_type, _IsPacked = false),
"(Code, ",
piqic:gen_convert_value(Context, X#alias.erlang_type, "_to_", X#alias.type, "X"),
").\n"
].
gen_packed_alias(Context, X) ->
packed_field_gen _ * has arity 1 , because values of such fields can
[
"packed_field_gen_", X#alias.erlang_name, "(X) ->\n"
" ",
gen_alias_type(Context, X, X#alias.protobuf_wire_type, _IsPacked = true),
"(",
piqic:gen_convert_value(Context, X#alias.erlang_type, "_to_", X#alias.type, "X"),
").\n"
].
gen_list(Context, X) ->
IsPacked = X#piqi_list.protobuf_packed,
PackedPrefix = ?if_true(IsPacked, "packed_", ""),
TypeName = X#piqi_list.type,
[
"field_gen_", X#piqi_list.erlang_name, "(Code, X) ->\n",
" ", ?PIQIRUN, "gen_", PackedPrefix, "list(Code, ",
", X).\n"
].
gen_enum(Context, X) ->
generate two functions : one for parsing normal value ; another one -- for
iod("\n\n", [
gen_unpacked_enum(Context, X),
gen_packed_enum(Context, X)
]).
gen_unpacked_enum(Context, X) ->
Consts = gen_consts(X#enum.option),
[
"field_gen_", X#enum.erlang_name, "(Code, X) ->\n",
" ", ?PIQIRUN, "integer_to_signed_varint(Code,\n",
Consts,
" ).\n"
].
gen_packed_enum(Context, X) ->
Consts = gen_consts(X#enum.option),
[
"packed_field_gen_", X#enum.erlang_name, "(X) ->\n",
" ", ?PIQIRUN, "integer_to_packed_signed_varint(\n",
Consts,
" ).\n"
].
gen_consts(Consts) ->
Clauses = [gen_const(C) || C <- Consts],
[
" ", "case X of\n",
" ", iod(";\n ", Clauses), "\n"
" ", "end\n"
].
gen_const(X) ->
[
X#option.erlang_name, " -> ", gen_code(X#option.code)
].
gen_variant(Context, X) ->
L = [gen_option(Context, O) || O <- X#variant.option],
[
"field_gen_", X#variant.erlang_name, "(Code, X) ->\n",
" ", ?PIQIRUN, "gen_variant(Code,\n",
" ", "case X of\n",
" ", iod(";\n ", Options), "\n"
" ", "end\n"
" ).\n"
].
gen_option(Context, X) ->
gen_option(Context, X, _OuterOption = 'undefined').
gen_option(Context, X, OuterOption) ->
Name = erlname_of_option(Context, X),
Code = gen_code(X#option.code),
case X#option.type of
'undefined' ->
case OuterOption =/= 'undefined' of
true ->
gen_inner_option(Name, OuterOption);
false ->
Clause = [
Name, " -> ", ?PIQIRUN, "gen_bool_field(", Code, ", true)"
],
[Clause]
end;
TypeName ->
{ParentPiqi, Typedef} = resolve_type_name(Context, TypeName),
case Typedef of
{Type, Y} when X#option.erlang_name =:= 'undefined', (Type =:= variant orelse Type =:= enum) ->
Options =
case Type of
variant -> Y#variant.option;
enum -> Y#enum.option
end,
OuterOption2 = ?defined(OuterOption, {Context, X}),
ParentContext = piqic:switch_context(Context, ParentPiqi),
L = [gen_option(ParentContext, O, OuterOption2) || O <- Options],
_ ->
case OuterOption =/= 'undefined' of
true ->
Pattern = ["{", Name, ", _}"],
gen_inner_option(Pattern, OuterOption);
false ->
Res = [
"{", Name, ", Y} -> ",
gen_type(Context, TypeName), "(", Code, ", Y)"
],
[Res]
end
end
end.
gen_inner_option(Pattern, {Context, X}) ->
Code = gen_code(X#option.code),
Clause = [
Pattern, " -> ", gen_type(Context, X#option.type), "(", Code, ", X)"
],
[Clause].
gen_record(Context, X) ->
Fields = lists:sort(
fun (A, B) -> A#field.code =< B#field.code end,
X#piqi_record.field
),
Name = X#piqi_record.erlang_name,
ScopedName = piqic:scoped_erlname(Context, Name),
UnknownFields =
case piqic:get_option(Context, gen_preserve_unknown_fields) of
false -> [];
true ->
["[", ?PIQIRUN, "gen_parsed_field(F) || F <- X#", ScopedName, ".piqi_unknown_pb]"]
end,
GeneratorsCode =
case Fields of
[] when UnknownFields =:= [] ->
"[]";
[] ->
UnknownFields;
_ ->
FieldGenerators = [gen_field(Context, ScopedName, F) || F <- Fields],
case UnknownFields of
[] ->
[
"[\n",
" ", iod(",\n ", FieldGenerators), "\n",
" ", "]"
];
_ ->
[
"[\n",
" ", iod(",\n ", FieldGenerators), "\n",
" |", UnknownFields, "]"
]
end
end,
prevent Erlang warning on unused variable
ArgVariable =
case Fields of
[] when UnknownFields =:= [] ->
["#", ScopedName, "{}"];
_ ->
"X"
end,
[
"field_gen_", Name, "(Code, ", ArgVariable, ") ->\n",
" ", ?PIQIRUN, "gen_record(Code, ", GeneratorsCode, ").\n"
].
gen_field(Context, RecordName, X) ->
Name = erlname_of_field(Context, X),
ScopedName = [
"X#", RecordName, ".", Name
],
Mode = piqic:gen_field_mode(X),
Code = gen_code(X#field.code),
IsPacked = X#field.protobuf_packed,
case X#field.type of
older piqi which expects flags to only be true if present , and never
[
?PIQIRUN, "gen_flag(", Code, ", ", ScopedName, ")"
];
TypeName ->
[
?PIQIRUN, "gen_", Mode, "_field(", Code, ", ",
", ",
ScopedName,
")"
]
end.
gen_type(Context, TypeName) ->
gen_type(Context, TypeName, _IsPacked = false).
gen_type(Context, TypeName, IsPacked) ->
{ParentPiqi, Typedef} = resolve_type_name(Context, TypeName),
ParentMod = piqic:gen_parent_mod(Context, ParentPiqi),
PackedPrefix = ?if_true(IsPacked, "packed_", ""),
[
ParentMod, PackedPrefix, "field_gen_", typedef_erlname(Typedef)
].
gen_alias_type(Context, Alias, WireType, IsPacked) ->
case Alias#alias.type of
gen_builtin_type(Context,
Alias#alias.piqi_type,
Alias#alias.erlang_type,
WireType, IsPacked);
TypeName ->
{ParentPiqi, Typedef} = resolve_type_name(Context, TypeName),
case Typedef of
{alias, A} when WireType =/= 'undefined' ->
ParentContext = piqic:switch_context(Context, ParentPiqi),
gen_alias_type(ParentContext, A, WireType, IsPacked);
_ ->
gen_type(Context, TypeName, IsPacked)
end
end.
gen_builtin_type(Context, PiqiType, ErlType, WireType, IsPacked) ->
case PiqiType of
any ->
"field_gen_piqi_any";
_ ->
PackedPrefix = ?if_true(IsPacked, "packed_", ""),
TypeName = piqic:gen_builtin_type_name(PiqiType, ErlType),
WireTypeName = piqic:gen_wire_type_name(PiqiType, WireType),
[
?PIQIRUN, TypeName, "_to_", PackedPrefix, WireTypeName
]
end.
|
3924560b64d4f9726e14c45f267cf2c7ff95da38fb43b28d64f89309c44a2ea0 | lexi-lambda/hackett | require.rkt | #lang racket/base
(require (for-syntax racket/base)
racket/require
racket/require-syntax
syntax/parse/define)
(provide postfix-in)
(define-for-syntax ((add-postfix postfix) str)
(string-append str postfix))
(define-require-syntax postfix-in
(syntax-parser
[(_ post-id:id require-spec)
#:with post-str (symbol->string (syntax-e #'post-id))
#'(filtered-in (add-postfix 'post-str) require-spec)]))
| null | https://raw.githubusercontent.com/lexi-lambda/hackett/e90ace9e4a056ec0a2a267f220cb29b756cbefce/hackett-lib/hackett/private/util/require.rkt | racket | #lang racket/base
(require (for-syntax racket/base)
racket/require
racket/require-syntax
syntax/parse/define)
(provide postfix-in)
(define-for-syntax ((add-postfix postfix) str)
(string-append str postfix))
(define-require-syntax postfix-in
(syntax-parser
[(_ post-id:id require-spec)
#:with post-str (symbol->string (syntax-e #'post-id))
#'(filtered-in (add-postfix 'post-str) require-spec)]))
| |
d70e0b765b3cce4bf33509f5d034050ce8d2df44f79a7384101440d0d74c666a | srid/neuron | Query.hs | # LANGUAGE FlexibleContexts #
{-# LANGUAGE GADTs #-}
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE NoImplicitPrelude #
module Neuron.CLI.Query
( runQuery,
)
where
import Colog (WithLog)
import Data.Aeson (ToJSON)
import qualified Data.Aeson as Aeson
import qualified Data.Aeson.Encode.Pretty as AesonPretty
import qualified Data.Set as Set
import Data.Some (withSome)
import qualified Data.TagTree as TagTree
import Neuron.CLI.Logging (Message)
import Neuron.CLI.Types
import qualified Neuron.Cache as Cache
import qualified Neuron.Cache.Type as Cache
import qualified Neuron.Plugin.Plugins.Tags as Tags
import qualified Neuron.Reactor as Reactor
import qualified Neuron.Zettelkasten.Graph as G
import qualified Neuron.Zettelkasten.Query as Q
import Relude
runQuery :: forall m env. (MonadApp m, MonadFail m, MonadApp m, MonadIO m, WithLog env Message m) => QueryCommand -> m ()
runQuery QueryCommand {..} = do
Cache.NeuronCache {..} <-
fmap Cache.stripCache $
if cached
then Cache.getCache
else do
Reactor.loadZettelkasten >>= \case
Left e -> fail $ toString e
Right (ch, _, _) -> pure ch
case query of
CliQuery_ById zid -> do
let result = G.getZettel zid neuroncacheGraph
bool printPrettyJson (printJsonLine . maybeToList) jsonl result
CliQuery_Zettels -> do
let result = G.getZettels neuroncacheGraph
bool printPrettyJson printJsonLine jsonl result
CliQuery_Tags -> do
let result = Set.unions $ Tags.getZettelTags <$> G.getZettels neuroncacheGraph
bool printPrettyJson (printJsonLine . Set.toList) jsonl result
CliQuery_ByTag tag -> do
let q = TagTree.mkDefaultTagQuery $ one $ TagTree.mkTagPatternFromTag tag
zs = G.getZettels neuroncacheGraph
result = Tags.zettelsByTag Tags.getZettelTags zs q
bool printPrettyJson printJsonLine jsonl result
CliQuery_Graph someQ ->
withSome someQ $ \q -> do
result <- either (fail . show) pure $ Q.runGraphQuery neuroncacheGraph q
bool printPrettyJson printJsonLine jsonl [Q.graphQueryResultJson q result neuroncacheErrors]
where
printJsonLine :: ToJSON a => [a] -> m ()
printJsonLine = mapM_ (putLBSLn . Aeson.encode)
printPrettyJson :: ToJSON a => a -> m ()
printPrettyJson =
putLBSLn
. AesonPretty.encodePretty'
AesonPretty.defConfig
{ -- Sort hash map by keys for consistency
AesonPretty.confCompare = compare
}
| null | https://raw.githubusercontent.com/srid/neuron/bfa276ee6eb7b146408e16653b2188d974ee993e/src/Neuron/CLI/Query.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
Sort hash map by keys for consistency | # LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE NoImplicitPrelude #
module Neuron.CLI.Query
( runQuery,
)
where
import Colog (WithLog)
import Data.Aeson (ToJSON)
import qualified Data.Aeson as Aeson
import qualified Data.Aeson.Encode.Pretty as AesonPretty
import qualified Data.Set as Set
import Data.Some (withSome)
import qualified Data.TagTree as TagTree
import Neuron.CLI.Logging (Message)
import Neuron.CLI.Types
import qualified Neuron.Cache as Cache
import qualified Neuron.Cache.Type as Cache
import qualified Neuron.Plugin.Plugins.Tags as Tags
import qualified Neuron.Reactor as Reactor
import qualified Neuron.Zettelkasten.Graph as G
import qualified Neuron.Zettelkasten.Query as Q
import Relude
runQuery :: forall m env. (MonadApp m, MonadFail m, MonadApp m, MonadIO m, WithLog env Message m) => QueryCommand -> m ()
runQuery QueryCommand {..} = do
Cache.NeuronCache {..} <-
fmap Cache.stripCache $
if cached
then Cache.getCache
else do
Reactor.loadZettelkasten >>= \case
Left e -> fail $ toString e
Right (ch, _, _) -> pure ch
case query of
CliQuery_ById zid -> do
let result = G.getZettel zid neuroncacheGraph
bool printPrettyJson (printJsonLine . maybeToList) jsonl result
CliQuery_Zettels -> do
let result = G.getZettels neuroncacheGraph
bool printPrettyJson printJsonLine jsonl result
CliQuery_Tags -> do
let result = Set.unions $ Tags.getZettelTags <$> G.getZettels neuroncacheGraph
bool printPrettyJson (printJsonLine . Set.toList) jsonl result
CliQuery_ByTag tag -> do
let q = TagTree.mkDefaultTagQuery $ one $ TagTree.mkTagPatternFromTag tag
zs = G.getZettels neuroncacheGraph
result = Tags.zettelsByTag Tags.getZettelTags zs q
bool printPrettyJson printJsonLine jsonl result
CliQuery_Graph someQ ->
withSome someQ $ \q -> do
result <- either (fail . show) pure $ Q.runGraphQuery neuroncacheGraph q
bool printPrettyJson printJsonLine jsonl [Q.graphQueryResultJson q result neuroncacheErrors]
where
printJsonLine :: ToJSON a => [a] -> m ()
printJsonLine = mapM_ (putLBSLn . Aeson.encode)
printPrettyJson :: ToJSON a => a -> m ()
printPrettyJson =
putLBSLn
. AesonPretty.encodePretty'
AesonPretty.defConfig
AesonPretty.confCompare = compare
}
|
b50c8b40ee4161db9e6e92bd85b9485b7ad9aca5a6ae3e42ca0d8dad7d97aa03 | jesperes/aoc_erlang | aoc2020_day11.erl | %%%=============================================================================
%%% @doc Advent of code puzzle solution
%%% @end
%%%=============================================================================
-module(aoc2020_day11).
-behavior(aoc_puzzle).
-export([ parse/1
, solve1/1
, solve2/1
, info/0
]).
-include("aoc_puzzle.hrl").
%%------------------------------------------------------------------------------
%% @doc info/0
%% Returns info about this puzzle.
%% @end
%%------------------------------------------------------------------------------
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{ module = ?MODULE
, year = 2020
, day = 11
, name = "Seating System"
, expected = {2093, 1862}
, has_input_file = true
}.
%%==============================================================================
%% Types
%%==============================================================================
-type coord() :: { X :: integer()
, Y :: integer()
}.
-type grid() :: #{coord() => integer()}.
-type input_type() :: grid().
-type result1_type() :: any().
-type result2_type() :: result1_type().
%%------------------------------------------------------------------------------
%% @doc parse/1
%% Parses input file.
%% @end
%%------------------------------------------------------------------------------
-spec parse(Input :: binary()) -> input_type().
parse(Input) ->
to_map(string:tokens(binary_to_list(Input), "\n\r")).
%%------------------------------------------------------------------------------
%% @doc solve1/1
Solves part 1 . Receives parsed input as returned from parse/1 .
%% @end
%%------------------------------------------------------------------------------
-spec solve1(Grid :: input_type()) -> result1_type().
solve1(Grid) ->
iterate_until_same(Grid, fun compute_next1/3).
%%------------------------------------------------------------------------------
%% @doc solve2/1
Solves part 2 . Receives parsed input as returned from parse/1 .
%% @end
%%------------------------------------------------------------------------------
-spec solve2(Grid :: input_type()) -> result2_type().
solve2(Grid) ->
iterate_until_same(Grid, fun compute_next2/3).
%%==============================================================================
%% Internals
%%==============================================================================
%% Iterate until the grid does not change
iterate_until_same(Grid, Fun) ->
Next = iterate_one_step(Grid, Fun),
case Next =:= Grid of
true ->
maps:fold(fun(_, $#, Acc) -> Acc + 1;
(_, _, Acc) -> Acc
end, 0, Next);
false ->
iterate_until_same(Next, Fun)
end.
iterate_one_step(Grid, Fun) ->
maps:fold(
fun(K, V, Acc) ->
maps:put(K, Fun(K, V, Grid), Acc)
end, #{}, Grid).
%% Compute the next state of cell `V' at coordinate `Coord'.
compute_next1(Coord, V, OldGrid) ->
OccupiedAdj = occupied_adjacents(Coord, OldGrid),
case V of
$L when OccupiedAdj == 0 -> $#; % become occupied
$# when OccupiedAdj >= 4 -> $L; % become free
_ -> V % unchanged
end.
occupied_adjacents({X, Y}, Grid) ->
Deltas = [{-1, -1}, {0, -1}, {1, -1},
{-1, 0}, {1, 0},
{-1, 1}, {0, 1}, {1, 1}],
lists:foldl(
fun({Dx, Dy}, Acc) ->
case maps:get({X + Dx, Y + Dy}, Grid, undefined) of
$# -> Acc + 1;
_ -> Acc
end
end, 0, Deltas).
%% Compute the next state of cell `V' at coordinate `Coord'.
compute_next2(Coord, V, OldGrid) ->
VisibleAdj = visible_adjacents(Coord, OldGrid),
case V of
$L when VisibleAdj == 0 -> $#; % become occupied
$# when VisibleAdj >= 5 -> $L; % become free
_ -> V % unchanged
end.
visible_adjacents(Coord, Grid) ->
Deltas = [{-1, -1}, {0, -1}, {1, -1},
{-1, 0}, {1, 0},
{-1, 1}, {0, 1}, {1, 1}],
lists:foldl(
fun(Delta, Acc) ->
case find_first_in_direction(Coord, Delta, 1, Grid) of
{_, _} -> Acc + 1;
false -> Acc
end
end, 0, Deltas).
find_first_in_direction({X, Y} = Coord, {Dx, Dy} = Delta, Dist, Grid) ->
VisibleCoord = {X + Dx * Dist, Y + Dy * Dist},
case maps:get(VisibleCoord, Grid, undefined) of
$# -> VisibleCoord;
$. -> find_first_in_direction(Coord, Delta, Dist + 1, Grid);
_ -> false
end.
Parse input lines to a map
-spec to_map([string()]) -> grid().
to_map(Lines) ->
{_, Grid} =
lists:foldl(
fun(L, {Y, Map}) ->
{_, MapOut} =
lists:foldl(
fun(C, {X, Acc}) ->
{X + 1, maps:put({X, Y}, C, Acc)}
end, {0, Map}, L),
{Y + 1, MapOut}
end, {0, #{}}, Lines),
Grid.
%%%_* Emacs ====================================================================
%%% Local Variables:
%%% allout-layout: t
erlang - indent - level : 2
%%% End:
| null | https://raw.githubusercontent.com/jesperes/aoc_erlang/cd92fe4ae4203876e6d7d3e8ea55c73544faac78/src/2020/aoc2020_day11.erl | erlang | =============================================================================
@doc Advent of code puzzle solution
@end
=============================================================================
------------------------------------------------------------------------------
@doc info/0
Returns info about this puzzle.
@end
------------------------------------------------------------------------------
==============================================================================
Types
==============================================================================
------------------------------------------------------------------------------
@doc parse/1
Parses input file.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc solve1/1
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc solve2/1
@end
------------------------------------------------------------------------------
==============================================================================
Internals
==============================================================================
Iterate until the grid does not change
Compute the next state of cell `V' at coordinate `Coord'.
become occupied
become free
unchanged
Compute the next state of cell `V' at coordinate `Coord'.
become occupied
become free
unchanged
_* Emacs ====================================================================
Local Variables:
allout-layout: t
End: | -module(aoc2020_day11).
-behavior(aoc_puzzle).
-export([ parse/1
, solve1/1
, solve2/1
, info/0
]).
-include("aoc_puzzle.hrl").
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{ module = ?MODULE
, year = 2020
, day = 11
, name = "Seating System"
, expected = {2093, 1862}
, has_input_file = true
}.
-type coord() :: { X :: integer()
, Y :: integer()
}.
-type grid() :: #{coord() => integer()}.
-type input_type() :: grid().
-type result1_type() :: any().
-type result2_type() :: result1_type().
-spec parse(Input :: binary()) -> input_type().
parse(Input) ->
to_map(string:tokens(binary_to_list(Input), "\n\r")).
Solves part 1 . Receives parsed input as returned from parse/1 .
-spec solve1(Grid :: input_type()) -> result1_type().
solve1(Grid) ->
iterate_until_same(Grid, fun compute_next1/3).
Solves part 2 . Receives parsed input as returned from parse/1 .
-spec solve2(Grid :: input_type()) -> result2_type().
solve2(Grid) ->
iterate_until_same(Grid, fun compute_next2/3).
iterate_until_same(Grid, Fun) ->
Next = iterate_one_step(Grid, Fun),
case Next =:= Grid of
true ->
maps:fold(fun(_, $#, Acc) -> Acc + 1;
(_, _, Acc) -> Acc
end, 0, Next);
false ->
iterate_until_same(Next, Fun)
end.
iterate_one_step(Grid, Fun) ->
maps:fold(
fun(K, V, Acc) ->
maps:put(K, Fun(K, V, Grid), Acc)
end, #{}, Grid).
compute_next1(Coord, V, OldGrid) ->
OccupiedAdj = occupied_adjacents(Coord, OldGrid),
case V of
end.
occupied_adjacents({X, Y}, Grid) ->
Deltas = [{-1, -1}, {0, -1}, {1, -1},
{-1, 0}, {1, 0},
{-1, 1}, {0, 1}, {1, 1}],
lists:foldl(
fun({Dx, Dy}, Acc) ->
case maps:get({X + Dx, Y + Dy}, Grid, undefined) of
$# -> Acc + 1;
_ -> Acc
end
end, 0, Deltas).
compute_next2(Coord, V, OldGrid) ->
VisibleAdj = visible_adjacents(Coord, OldGrid),
case V of
end.
visible_adjacents(Coord, Grid) ->
Deltas = [{-1, -1}, {0, -1}, {1, -1},
{-1, 0}, {1, 0},
{-1, 1}, {0, 1}, {1, 1}],
lists:foldl(
fun(Delta, Acc) ->
case find_first_in_direction(Coord, Delta, 1, Grid) of
{_, _} -> Acc + 1;
false -> Acc
end
end, 0, Deltas).
find_first_in_direction({X, Y} = Coord, {Dx, Dy} = Delta, Dist, Grid) ->
VisibleCoord = {X + Dx * Dist, Y + Dy * Dist},
case maps:get(VisibleCoord, Grid, undefined) of
$# -> VisibleCoord;
$. -> find_first_in_direction(Coord, Delta, Dist + 1, Grid);
_ -> false
end.
Parse input lines to a map
-spec to_map([string()]) -> grid().
to_map(Lines) ->
{_, Grid} =
lists:foldl(
fun(L, {Y, Map}) ->
{_, MapOut} =
lists:foldl(
fun(C, {X, Acc}) ->
{X + 1, maps:put({X, Y}, C, Acc)}
end, {0, Map}, L),
{Y + 1, MapOut}
end, {0, #{}}, Lines),
Grid.
erlang - indent - level : 2
|
5aa34f3ecf73eb27491dca94e9a7726304da6e16a61303451c19909796b63d91 | racket/old-plt | update-binding-counts.scm | ;; This library is used by match.ss
(define-values (update-binding-counts update-binding-count)
(letrec
(
;;!(function update-binding-count
;; (form (update-binding-count render-list) -> list)
;; (contract list -> list))
;; This function is normally executed for its side effect of
;; setting the count for the number of times an expression used in
;; a test if found in the rest of the list of tests. This does
;; not only count occurrances of the exp in other tests but
;; whether the expression is also a sub expression in the other tests.
;; Arg:
;; render-list - a list of test structs
(update-binding-count
(lambda (render-list)
(define (inc-bind-count test)
(set-test-bind-count! test
(add1 (test-bind-count test))))
(if (null? render-list)
'()
(let ((cur-test (car render-list)))
(update-binding-count
(let loop ((l (cdr render-list)))
(cond ((null? l) '())
((>= (test-bind-count cur-test) 2) l)
((and (valid-for-let-binding (test-bind-exp cur-test))
(equal? (test-bind-exp cur-test)
(test-bind-exp (car l))))
(begin
(inc-bind-count cur-test)
(loop (cdr l))))
((sub-exp-contains (test-bind-exp cur-test)
(test-bind-exp (car l)))
(begin
(inc-bind-count cur-test)
(cons (car l) (loop (cdr l)))))
(else (cons (car l) (loop (cdr l)))))))))))
;;!(function valid-for-let-binding
;; (form (valid-for-let-binding exp) -> bool)
;; (contract s-exp -> bool)
;; (example (valid-for-let-binding 'x) -> #f))
This function is a predicate that if an expression
;; should be considered for let binding.
(valid-for-let-binding
(lambda (exp)
;; it must be a pair
;; the index must be an integer
'(match exp
(('vector-ref _ n) (number? n))
((? pair?) #t)
(_ #f))
;; the following is expanded fromt the above match expression
(let ((x exp))
(if (pair? x)
(if (and (equal? (car x) 'vector-ref)
(pair? (cdr x))
(pair? (cdr (cdr x)))
(null? (cdr (cdr (cdr x)))))
((lambda (n) (number? n)) (car (cdr (cdr x))))
((lambda () #t)))
((lambda () #f))))))
;;!(function sub-exp-contains
( form ( sub - exp - contains exp1 exp2 ) - > bool )
;; (contract (s-exp s-exp) -> bool)
( example ( sub - exp - contains ' ( cdr x ) ' ( car ) ) ) - > # t ) )
This function returns true if exp2 contains a sub - expression
;; that is equal? to exp1. For this function to work the subexp
must always be in the second position in a exp . This is a
;; convention that is followed throughout the match program.
(sub-exp-contains
(lambda (exp1 exp2)
'(match exp2
(() #f)
((_ sub-exp _ ...)
(if (and (valid-for-let-binding sub-exp)
(equal? sub-exp exp1))
#t
(sub-exp-contains exp1 sub-exp)))
(_ #f))
;; The following was expanded from the above match expression
(let ((x exp2))
(if (null? x)
((lambda () #f))
(if (and (pair? x) (pair? (cdr x)) (list? (cdr (cdr x))))
((lambda (sub-exp)
(if (and (pair? sub-exp)
(equal? sub-exp exp1))
#t
(sub-exp-contains exp1 sub-exp)))
(car (cdr x)))
((lambda () #f)))))))
;;!(function update-binding-counts
;; (form (update-binding-counts render-lists) -> list)
;; (contract list -> list))
;; This function calls update-binding-count for each render list
;; in the list of render lists. This is used mainly for its side
;; affects. The result is of no consequence.
(update-binding-counts
(lambda (render-lists)
(map update-binding-count (map car render-lists))))
)
(values update-binding-counts update-binding-count)))
| null | https://raw.githubusercontent.com/racket/old-plt/a580d75deae2a0d2f3d8a93bc3c4f8f1f619b5b7/collects/mzlib/private/plt-match/update-binding-counts.scm | scheme | This library is used by match.ss
!(function update-binding-count
(form (update-binding-count render-list) -> list)
(contract list -> list))
This function is normally executed for its side effect of
setting the count for the number of times an expression used in
a test if found in the rest of the list of tests. This does
not only count occurrances of the exp in other tests but
whether the expression is also a sub expression in the other tests.
Arg:
render-list - a list of test structs
!(function valid-for-let-binding
(form (valid-for-let-binding exp) -> bool)
(contract s-exp -> bool)
(example (valid-for-let-binding 'x) -> #f))
should be considered for let binding.
it must be a pair
the index must be an integer
the following is expanded fromt the above match expression
!(function sub-exp-contains
(contract (s-exp s-exp) -> bool)
that is equal? to exp1. For this function to work the subexp
convention that is followed throughout the match program.
The following was expanded from the above match expression
!(function update-binding-counts
(form (update-binding-counts render-lists) -> list)
(contract list -> list))
This function calls update-binding-count for each render list
in the list of render lists. This is used mainly for its side
affects. The result is of no consequence. |
(define-values (update-binding-counts update-binding-count)
(letrec
(
(update-binding-count
(lambda (render-list)
(define (inc-bind-count test)
(set-test-bind-count! test
(add1 (test-bind-count test))))
(if (null? render-list)
'()
(let ((cur-test (car render-list)))
(update-binding-count
(let loop ((l (cdr render-list)))
(cond ((null? l) '())
((>= (test-bind-count cur-test) 2) l)
((and (valid-for-let-binding (test-bind-exp cur-test))
(equal? (test-bind-exp cur-test)
(test-bind-exp (car l))))
(begin
(inc-bind-count cur-test)
(loop (cdr l))))
((sub-exp-contains (test-bind-exp cur-test)
(test-bind-exp (car l)))
(begin
(inc-bind-count cur-test)
(cons (car l) (loop (cdr l)))))
(else (cons (car l) (loop (cdr l)))))))))))
This function is a predicate that if an expression
(valid-for-let-binding
(lambda (exp)
'(match exp
(('vector-ref _ n) (number? n))
((? pair?) #t)
(_ #f))
(let ((x exp))
(if (pair? x)
(if (and (equal? (car x) 'vector-ref)
(pair? (cdr x))
(pair? (cdr (cdr x)))
(null? (cdr (cdr (cdr x)))))
((lambda (n) (number? n)) (car (cdr (cdr x))))
((lambda () #t)))
((lambda () #f))))))
( form ( sub - exp - contains exp1 exp2 ) - > bool )
( example ( sub - exp - contains ' ( cdr x ) ' ( car ) ) ) - > # t ) )
This function returns true if exp2 contains a sub - expression
must always be in the second position in a exp . This is a
(sub-exp-contains
(lambda (exp1 exp2)
'(match exp2
(() #f)
((_ sub-exp _ ...)
(if (and (valid-for-let-binding sub-exp)
(equal? sub-exp exp1))
#t
(sub-exp-contains exp1 sub-exp)))
(_ #f))
(let ((x exp2))
(if (null? x)
((lambda () #f))
(if (and (pair? x) (pair? (cdr x)) (list? (cdr (cdr x))))
((lambda (sub-exp)
(if (and (pair? sub-exp)
(equal? sub-exp exp1))
#t
(sub-exp-contains exp1 sub-exp)))
(car (cdr x)))
((lambda () #f)))))))
(update-binding-counts
(lambda (render-lists)
(map update-binding-count (map car render-lists))))
)
(values update-binding-counts update-binding-count)))
|
aec93583d0bc458596627041d9f45bf91f994c31f9afe0a8d941ff8a12537487 | neilprosser/mr-maestro | setup_test.clj | (ns maestro.setup-test
(:require [cheshire.core :as json]
[clj-time.core :as time]
[maestro.setup :refer :all]
[midje.sweet :refer :all]))
(fact "that we configurate Joda correctly"
(configure-joda)
(json/generate-string (time/now)) => truthy)
| null | https://raw.githubusercontent.com/neilprosser/mr-maestro/469790fd712262016729c1d83d4b4e11869237a2/test/maestro/setup_test.clj | clojure | (ns maestro.setup-test
(:require [cheshire.core :as json]
[clj-time.core :as time]
[maestro.setup :refer :all]
[midje.sweet :refer :all]))
(fact "that we configurate Joda correctly"
(configure-joda)
(json/generate-string (time/now)) => truthy)
| |
a21342b034a6e1357d4ae5f1e4445dedfb3cfa5d4e6bb00ce7731b1dcd765a22 | fujita-y/ypsilon | parameters.scm | #!nobacktrace
(library (srfi :39 parameters) (export make-parameter parameterize) (import (srfi srfi-39)))
| null | https://raw.githubusercontent.com/fujita-y/ypsilon/820aa1b0258eb1854172c7909aef7462bb0e2adb/sitelib/srfi/%253a39/parameters.scm | scheme | #!nobacktrace
(library (srfi :39 parameters) (export make-parameter parameterize) (import (srfi srfi-39)))
| |
801bef20eb2a6bdf22d4570f0dfad64ce79169289593404f7f06bf4e136f3df2 | yomimono/stitchcraft | notty_canvas.ml | open Stitchy.Types
open Canvas
let input =
let doc = "file from which to read. -, the default, is stdin." in
Cmdliner.Arg.(value & pos 0 string "-" & info [] ~doc)
let start_view = { Canvas__Controls.x_off = 0; y_off = 0; zoom = 1; block_display = `Symbol }
let disp input =
let open Lwt.Infix in
let initialize_pattern input =
match String.compare input "-" with
| 0 -> begin
try Yojson.Safe.from_channel stdin, false
with _ -> failwith "couldn't read input from stdin"
end
| _ ->
try Yojson.Safe.from_file input, true
with _ -> failwith "couldn't read input file"
in
let update_pattern input =
try Yojson.Safe.from_file input |> pattern_of_yojson
with _ -> failwith "reading updated file failed"
in
let aux () : unit Lwt.t =
let term = Notty_lwt.Term.create () in
Lwt_inotify.create () >>= fun inotify ->
let user_input_stream = Lwt_stream.map
(fun event -> `Terminal event)
(Notty_lwt.Term.events term)
in
let create_streams = function
| false -> Lwt.return user_input_stream
| true ->
Lwt_inotify.add_watch inotify input Inotify.([S_Close_write]) >>= fun _watch ->
let file_watch_stream = Lwt_stream.map
(fun _ -> `Pattern)
(Lwt_stream.from @@
fun () -> Lwt_inotify.read inotify >|= fun e -> Some e)
in
Lwt.return @@
Lwt_stream.choose [
user_input_stream;
file_watch_stream;
]
in
let (start_state, watch_input) = initialize_pattern input in
match start_state |> pattern_of_yojson with
| Error e -> failwith (Printf.sprintf "failed to parse input json: %s" e)
| Ok pattern ->
(* we don't care about the fabric part of the estimate,
* so it's OK to pass 0 for margin inches here *)
let thread_totals = Estimator.((materials ~margin_inches:0.
pattern).threads |> thread_totals) in
Notty_lwt.Term.image term @@ main_view pattern start_view thread_totals (Notty_lwt.Term.size term) >>= fun () ->
create_streams watch_input >>= fun stream ->
let rec loop (pattern : pattern) (view : Canvas__Controls.view) =
(Lwt_stream.last_new stream) >>= function
| `Pattern -> begin
match update_pattern input with
| Error _ -> loop pattern view
| Ok pattern ->
let size = Notty_lwt.Term.size term in
let thread_totals = Estimator.((materials ~margin_inches:0.
pattern).threads |> thread_totals) in
Notty_lwt.Term.image term (main_view pattern view thread_totals size) >>= fun () ->
loop pattern view
end
| `Terminal event ->
let size = Notty_lwt.Term.size term in
match step pattern view size event with
| None ->
Notty_lwt.Term.release term >>= fun () ->
Lwt_inotify.close inotify
| Some (refresh_pattern, view) ->
let thread_totals = Estimator.((materials ~margin_inches:0.
pattern).threads |> thread_totals) in
let pattern =
if refresh_pattern then begin
match update_pattern input with
| Ok new_pattern -> new_pattern
| Error _ -> pattern
end
else pattern
in
Notty_lwt.Term.image term (main_view pattern view thread_totals size) >>= fun () ->
loop pattern view
in
loop pattern start_view
in
Lwt_main.run @@ aux ()
let info =
let doc = "display/explore a cross-stitch pattern on the terminal" in
Cmdliner.Cmd.info "notty_canvas" ~doc
let disp_t = Cmdliner.Term.(const disp $ input)
let () =
exit @@ Cmdliner.Cmd.eval @@ Cmdliner.Cmd.v info disp_t
| null | https://raw.githubusercontent.com/yomimono/stitchcraft/6b57efa8a145541e976c9cb1f04ffde984b318bd/notty_canvas/src/notty_canvas.ml | ocaml | we don't care about the fabric part of the estimate,
* so it's OK to pass 0 for margin inches here | open Stitchy.Types
open Canvas
let input =
let doc = "file from which to read. -, the default, is stdin." in
Cmdliner.Arg.(value & pos 0 string "-" & info [] ~doc)
let start_view = { Canvas__Controls.x_off = 0; y_off = 0; zoom = 1; block_display = `Symbol }
let disp input =
let open Lwt.Infix in
let initialize_pattern input =
match String.compare input "-" with
| 0 -> begin
try Yojson.Safe.from_channel stdin, false
with _ -> failwith "couldn't read input from stdin"
end
| _ ->
try Yojson.Safe.from_file input, true
with _ -> failwith "couldn't read input file"
in
let update_pattern input =
try Yojson.Safe.from_file input |> pattern_of_yojson
with _ -> failwith "reading updated file failed"
in
let aux () : unit Lwt.t =
let term = Notty_lwt.Term.create () in
Lwt_inotify.create () >>= fun inotify ->
let user_input_stream = Lwt_stream.map
(fun event -> `Terminal event)
(Notty_lwt.Term.events term)
in
let create_streams = function
| false -> Lwt.return user_input_stream
| true ->
Lwt_inotify.add_watch inotify input Inotify.([S_Close_write]) >>= fun _watch ->
let file_watch_stream = Lwt_stream.map
(fun _ -> `Pattern)
(Lwt_stream.from @@
fun () -> Lwt_inotify.read inotify >|= fun e -> Some e)
in
Lwt.return @@
Lwt_stream.choose [
user_input_stream;
file_watch_stream;
]
in
let (start_state, watch_input) = initialize_pattern input in
match start_state |> pattern_of_yojson with
| Error e -> failwith (Printf.sprintf "failed to parse input json: %s" e)
| Ok pattern ->
let thread_totals = Estimator.((materials ~margin_inches:0.
pattern).threads |> thread_totals) in
Notty_lwt.Term.image term @@ main_view pattern start_view thread_totals (Notty_lwt.Term.size term) >>= fun () ->
create_streams watch_input >>= fun stream ->
let rec loop (pattern : pattern) (view : Canvas__Controls.view) =
(Lwt_stream.last_new stream) >>= function
| `Pattern -> begin
match update_pattern input with
| Error _ -> loop pattern view
| Ok pattern ->
let size = Notty_lwt.Term.size term in
let thread_totals = Estimator.((materials ~margin_inches:0.
pattern).threads |> thread_totals) in
Notty_lwt.Term.image term (main_view pattern view thread_totals size) >>= fun () ->
loop pattern view
end
| `Terminal event ->
let size = Notty_lwt.Term.size term in
match step pattern view size event with
| None ->
Notty_lwt.Term.release term >>= fun () ->
Lwt_inotify.close inotify
| Some (refresh_pattern, view) ->
let thread_totals = Estimator.((materials ~margin_inches:0.
pattern).threads |> thread_totals) in
let pattern =
if refresh_pattern then begin
match update_pattern input with
| Ok new_pattern -> new_pattern
| Error _ -> pattern
end
else pattern
in
Notty_lwt.Term.image term (main_view pattern view thread_totals size) >>= fun () ->
loop pattern view
in
loop pattern start_view
in
Lwt_main.run @@ aux ()
let info =
let doc = "display/explore a cross-stitch pattern on the terminal" in
Cmdliner.Cmd.info "notty_canvas" ~doc
let disp_t = Cmdliner.Term.(const disp $ input)
let () =
exit @@ Cmdliner.Cmd.eval @@ Cmdliner.Cmd.v info disp_t
|
5d85430a97605c263fa18345df89c38d0073094c4775b98c43a8bf2dd1db480d | geneweb/geneweb | dag.mli | TODOCP
module Pset : sig
type t = Gwdb.iper list
type elt = Gwdb.iper
val add : 'a -> 'a list -> 'a list
val empty : 'a list
val elements : 'a list -> 'a list
val mem : 'a -> 'a list -> bool
end
val get_dag_elems : Config.config -> Gwdb.base -> Gwdb.iper list
type ('a, 'b) sum = ('a, 'b) Def.choice
val make_dag :
Config.config ->
Gwdb.base ->
Gwdb.iper list ->
(Gwdb.iper, int) Def.choice Dag2html.dag
| null | https://raw.githubusercontent.com/geneweb/geneweb/747f43da396a706bd1da60d34c04493a190edf0f/lib/dag.mli | ocaml | TODOCP
module Pset : sig
type t = Gwdb.iper list
type elt = Gwdb.iper
val add : 'a -> 'a list -> 'a list
val empty : 'a list
val elements : 'a list -> 'a list
val mem : 'a -> 'a list -> bool
end
val get_dag_elems : Config.config -> Gwdb.base -> Gwdb.iper list
type ('a, 'b) sum = ('a, 'b) Def.choice
val make_dag :
Config.config ->
Gwdb.base ->
Gwdb.iper list ->
(Gwdb.iper, int) Def.choice Dag2html.dag
| |
68c0ccc4535853d88b6b02a26b9f10e3a50a2622d9cead7fcb544533c8db26a1 | rd--/hsc3 | numBuffers.help.hs | numBuffers ; the number of audio buffers available at the server ( by default 1024 , printing only )
poll (impulse kr 1 0) numBuffers 0 (label "numBuffers")
numBuffers
let f = 110 + numBuffers in sinOsc ar f 0 * 0.1
| null | https://raw.githubusercontent.com/rd--/hsc3/60cb422f0e2049f00b7e15076b2667b85ad8f638/Help/Ugen/numBuffers.help.hs | haskell | numBuffers ; the number of audio buffers available at the server ( by default 1024 , printing only )
poll (impulse kr 1 0) numBuffers 0 (label "numBuffers")
numBuffers
let f = 110 + numBuffers in sinOsc ar f 0 * 0.1
| |
a65b13748e912a69ee1349a0ffa25e731aa76a64d0952afd898a85248d8cef06 | philopon/apiary-benchmark | apiary.hs | # LANGUAGE QuasiQuotes #
# LANGUAGE CPP #
{-# LANGUAGE OverloadedStrings #-}
import System.Environment
import Web.Apiary
import Control.Concurrent (runInUnboundThread)
import Network.Wai.Handler.Warp (run)
import qualified Data.ByteString as S
import Control.Monad
#define SIMPLE(r) [capture|/deep/foo/bar/baz/r|] . method GET . action $ contentType "text/plain" >> bytes "deep"
main :: IO ()
main = do
port:_ <- getArgs
runApiary (runInUnboundThread . run (read port)) def $ do
[capture|/echo/hello-world|] . method GET . action $
contentType "text/plain" >> bytes "Hello World"
[capture|/echo/plain/s::S.ByteString/i::Int|] . method GET . action $ do
(s, i) <- [params|s,i|]
contentType "text/plain"
replicateM_ i (appendBytes s)
SIMPLE(0)
SIMPLE(1)
SIMPLE(2)
SIMPLE(3)
SIMPLE(4)
SIMPLE(5)
SIMPLE(6)
SIMPLE(7)
SIMPLE(8)
SIMPLE(9)
SIMPLE(10)
SIMPLE(11)
SIMPLE(12)
SIMPLE(13)
SIMPLE(14)
SIMPLE(15)
SIMPLE(16)
SIMPLE(17)
SIMPLE(18)
SIMPLE(19)
SIMPLE(20)
SIMPLE(21)
SIMPLE(22)
SIMPLE(23)
SIMPLE(24)
SIMPLE(25)
SIMPLE(26)
SIMPLE(27)
SIMPLE(28)
SIMPLE(29)
SIMPLE(30)
SIMPLE(31)
SIMPLE(32)
SIMPLE(33)
SIMPLE(34)
SIMPLE(35)
SIMPLE(36)
SIMPLE(37)
SIMPLE(38)
SIMPLE(39)
SIMPLE(40)
SIMPLE(41)
SIMPLE(42)
SIMPLE(43)
SIMPLE(44)
SIMPLE(45)
SIMPLE(46)
SIMPLE(47)
SIMPLE(48)
SIMPLE(49)
SIMPLE(50)
SIMPLE(51)
SIMPLE(52)
SIMPLE(53)
SIMPLE(54)
SIMPLE(55)
SIMPLE(56)
SIMPLE(57)
SIMPLE(58)
SIMPLE(59)
SIMPLE(60)
SIMPLE(61)
SIMPLE(62)
SIMPLE(63)
SIMPLE(64)
SIMPLE(65)
SIMPLE(66)
SIMPLE(67)
SIMPLE(68)
SIMPLE(69)
SIMPLE(70)
SIMPLE(71)
SIMPLE(72)
SIMPLE(73)
SIMPLE(74)
SIMPLE(75)
SIMPLE(76)
SIMPLE(77)
SIMPLE(78)
SIMPLE(79)
SIMPLE(80)
SIMPLE(81)
SIMPLE(82)
SIMPLE(83)
SIMPLE(84)
SIMPLE(85)
SIMPLE(86)
SIMPLE(87)
SIMPLE(88)
SIMPLE(89)
SIMPLE(90)
SIMPLE(91)
SIMPLE(92)
SIMPLE(93)
SIMPLE(94)
SIMPLE(95)
SIMPLE(96)
SIMPLE(97)
SIMPLE(98)
SIMPLE(99)
SIMPLE(100)
[capture|/after|] . method GET . action $
contentType "text/plain" >> bytes "after"
| null | https://raw.githubusercontent.com/philopon/apiary-benchmark/015322b87d1789682a8b9b91182ac2eaa2ba17e9/src/apiary.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE QuasiQuotes #
# LANGUAGE CPP #
import System.Environment
import Web.Apiary
import Control.Concurrent (runInUnboundThread)
import Network.Wai.Handler.Warp (run)
import qualified Data.ByteString as S
import Control.Monad
#define SIMPLE(r) [capture|/deep/foo/bar/baz/r|] . method GET . action $ contentType "text/plain" >> bytes "deep"
main :: IO ()
main = do
port:_ <- getArgs
runApiary (runInUnboundThread . run (read port)) def $ do
[capture|/echo/hello-world|] . method GET . action $
contentType "text/plain" >> bytes "Hello World"
[capture|/echo/plain/s::S.ByteString/i::Int|] . method GET . action $ do
(s, i) <- [params|s,i|]
contentType "text/plain"
replicateM_ i (appendBytes s)
SIMPLE(0)
SIMPLE(1)
SIMPLE(2)
SIMPLE(3)
SIMPLE(4)
SIMPLE(5)
SIMPLE(6)
SIMPLE(7)
SIMPLE(8)
SIMPLE(9)
SIMPLE(10)
SIMPLE(11)
SIMPLE(12)
SIMPLE(13)
SIMPLE(14)
SIMPLE(15)
SIMPLE(16)
SIMPLE(17)
SIMPLE(18)
SIMPLE(19)
SIMPLE(20)
SIMPLE(21)
SIMPLE(22)
SIMPLE(23)
SIMPLE(24)
SIMPLE(25)
SIMPLE(26)
SIMPLE(27)
SIMPLE(28)
SIMPLE(29)
SIMPLE(30)
SIMPLE(31)
SIMPLE(32)
SIMPLE(33)
SIMPLE(34)
SIMPLE(35)
SIMPLE(36)
SIMPLE(37)
SIMPLE(38)
SIMPLE(39)
SIMPLE(40)
SIMPLE(41)
SIMPLE(42)
SIMPLE(43)
SIMPLE(44)
SIMPLE(45)
SIMPLE(46)
SIMPLE(47)
SIMPLE(48)
SIMPLE(49)
SIMPLE(50)
SIMPLE(51)
SIMPLE(52)
SIMPLE(53)
SIMPLE(54)
SIMPLE(55)
SIMPLE(56)
SIMPLE(57)
SIMPLE(58)
SIMPLE(59)
SIMPLE(60)
SIMPLE(61)
SIMPLE(62)
SIMPLE(63)
SIMPLE(64)
SIMPLE(65)
SIMPLE(66)
SIMPLE(67)
SIMPLE(68)
SIMPLE(69)
SIMPLE(70)
SIMPLE(71)
SIMPLE(72)
SIMPLE(73)
SIMPLE(74)
SIMPLE(75)
SIMPLE(76)
SIMPLE(77)
SIMPLE(78)
SIMPLE(79)
SIMPLE(80)
SIMPLE(81)
SIMPLE(82)
SIMPLE(83)
SIMPLE(84)
SIMPLE(85)
SIMPLE(86)
SIMPLE(87)
SIMPLE(88)
SIMPLE(89)
SIMPLE(90)
SIMPLE(91)
SIMPLE(92)
SIMPLE(93)
SIMPLE(94)
SIMPLE(95)
SIMPLE(96)
SIMPLE(97)
SIMPLE(98)
SIMPLE(99)
SIMPLE(100)
[capture|/after|] . method GET . action $
contentType "text/plain" >> bytes "after"
|
4c9130f671d82c23038f47463bb52b60287789eac416c477998e5ec90ae46e84 | LambdaHack/LambdaHack | DungeonGen.hs | # LANGUAGE TupleSections #
-- | The dungeon generation routine. It creates empty dungeons, without
-- actors and without items, either lying on the floor or embedded inside tiles.
module Game.LambdaHack.Server.DungeonGen
( FreshDungeon(..), dungeonGen
#ifdef EXPOSE_INTERNAL
-- * Internal operations
, convertTileMaps, buildTileMap, anchorDown, buildLevel
, snapToStairList, placeDownStairs, levelFromCave
#endif
) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
import qualified Control.Monad.Trans.State.Strict as St
import Data.Either (rights)
import qualified Data.EnumMap.Strict as EM
import qualified Data.IntMap.Strict as IM
import qualified Data.Text as T
import qualified Data.Text.IO as T
import System.IO (hFlush, stdout)
import System.IO.Unsafe (unsafePerformIO)
import qualified System.Random.SplitMix32 as SM
import Game.LambdaHack.Common.Area
import Game.LambdaHack.Common.Kind
import Game.LambdaHack.Common.Level
import Game.LambdaHack.Common.Point
import qualified Game.LambdaHack.Common.PointArray as PointArray
import qualified Game.LambdaHack.Common.Tile as Tile
import Game.LambdaHack.Common.Time
import Game.LambdaHack.Common.Types
import Game.LambdaHack.Content.CaveKind
import Game.LambdaHack.Content.ModeKind
import qualified Game.LambdaHack.Content.PlaceKind as PK
import Game.LambdaHack.Content.RuleKind
import Game.LambdaHack.Content.TileKind (TileKind)
import qualified Game.LambdaHack.Content.TileKind as TK
import qualified Game.LambdaHack.Core.Dice as Dice
import Game.LambdaHack.Core.Random
import Game.LambdaHack.Definition.Defs
import qualified Game.LambdaHack.Definition.DefsInternal as DefsInternal
import Game.LambdaHack.Server.DungeonGen.AreaRnd
import Game.LambdaHack.Server.DungeonGen.Cave
import Game.LambdaHack.Server.DungeonGen.Place
import Game.LambdaHack.Server.ServerOptions
convertTileMaps :: COps -> Bool -> Rnd (ContentId TileKind)
-> Maybe (Rnd (ContentId TileKind)) -> Area -> TileMapEM
-> Rnd TileMap
convertTileMaps COps{ corule=RuleContent{rWidthMax, rHeightMax}
, cotile
, coTileSpeedup }
areAllWalkable cdefTile mpickPassable darea ltile = do
let outerId = ouniqGroup cotile TK.S_UNKNOWN_OUTER_FENCE
runCdefTile :: (SM.SMGen, (Int, [(Int, ContentId TileKind)]))
-> ( ContentId TileKind
, (SM.SMGen, (Int, [(Int, ContentId TileKind)])) )
runCdefTile (gen1, (pI, assocs)) =
let p = toEnum pI
in if inside darea p
then case assocs of
(p2, t2) : rest | p2 == pI -> (t2, (gen1, (pI + 1, rest)))
_ -> let (tile, gen2) = St.runState cdefTile gen1
in (tile, (gen2, (pI + 1, assocs)))
else (outerId, (gen1, (pI + 1, assocs)))
runUnfold gen =
let (gen1, gen2) = SM.splitSMGen gen
in (PointArray.unfoldrNA
rWidthMax rHeightMax runCdefTile
(gen1, (0, IM.assocs $ EM.enumMapToIntMap ltile)), gen2)
converted1 <- St.state runUnfold
case mpickPassable of
_ | areAllWalkable -> return converted1 -- all walkable; passes OK
Nothing -> return converted1 -- no walkable tiles for filling the map
Just pickPassable -> do -- some tiles walkable, so ensure connectivity
let passes p array =
Tile.isWalkable coTileSpeedup (array PointArray.! p)
-- If no point blocks on both ends, then I can eventually go
-- from bottom to top of the map and from left to right
-- unless there are disconnected areas inside rooms).
blocksHorizontal (Point x y) array =
not (passes (Point (x + 1) y) array
|| passes (Point (x - 1) y) array)
blocksVertical (Point x y) array =
not (passes (Point x (y + 1)) array
|| passes (Point x (y - 1)) array)
activeArea = fromMaybe (error $ "" `showFailure` darea) $ shrink darea
connect included blocks walkableTile array =
let g p c = if inside activeArea p
&& included p
&& not (Tile.isEasyOpen coTileSpeedup c)
&& p `EM.notMember` ltile
&& blocks p array
then walkableTile
else c
in PointArray.imapA g array
walkable2 <- pickPassable
let converted2 = connect (even . px) blocksHorizontal walkable2 converted1
walkable3 <- pickPassable
let converted3 = connect (even . py) blocksVertical walkable3 converted2
walkable4 <- pickPassable
let converted4 =
connect (odd . px) blocksHorizontal walkable4 converted3
walkable5 <- pickPassable
let converted5 =
connect (odd . py) blocksVertical walkable5 converted4
return converted5
buildTileMap :: COps -> Cave -> Rnd TileMap
buildTileMap cops@COps{cotile, cocave} Cave{dkind, darea, dmap} = do
let CaveKind{cpassable, cdefTile} = okind cocave dkind
pickDefTile = fromMaybe (error $ "" `showFailure` cdefTile)
<$> opick cotile cdefTile (const True)
wcond = Tile.isEasyOpenKind
mpickPassable =
if cpassable
then Just $ fromMaybe (error $ "" `showFailure` cdefTile)
<$> opick cotile cdefTile wcond
else Nothing
nwcond = not . Tile.kindHasFeature TK.Walkable
areAllWalkable <- isNothing <$> opick cotile cdefTile nwcond
convertTileMaps cops areAllWalkable pickDefTile mpickPassable darea dmap
anchorDown :: Y
not 4 , asymmetric vs up , for staircase variety ;
symmetry kept for @cfenceApart@ caves , to save real estate
-- Create a level from a cave.
buildLevel :: COps -> ServerOptions
-> LevelId -> ContentId CaveKind -> CaveKind -> Int -> Int
-> Dice.AbsDepth -> [(Point, Text)]
-> Rnd (Level, [(Point, Text)])
buildLevel cops@COps{coplace, corule=RuleContent{..}} serverOptions
lid dkind kc doubleDownStairs singleDownStairs
totalDepth stairsFromUp = do
let d = if cfenceApart kc then 1 else 0
Simple rule for now : level has depth ( difficulty ) @abs lid@.
ldepth = Dice.AbsDepth $ abs $ fromEnum lid
darea =
let (lxPrev, lyPrev) = unzip $ map ((px &&& py) . fst) stairsFromUp
-- Stairs take some space, hence the additions.
lxMin = max 0
$ -4 - d + minimum (rWidthMax - 1 : lxPrev)
lxMax = min (rWidthMax - 1)
$ 4 + d + maximum (0 : lxPrev)
lyMin = max 0
$ -3 - d + minimum (rHeightMax - 1 : lyPrev)
lyMax = min (rHeightMax - 1)
$ 3 + d + maximum (0 : lyPrev)
-- Pick minimal cave size that fits all previous stairs.
xspan = max (lxMax - lxMin + 1) $ cXminSize kc
yspan = max (lyMax - lyMin + 1) $ cYminSize kc
x0 = min lxMin (rWidthMax - xspan)
y0 = min lyMin (rHeightMax - yspan)
in fromMaybe (error $ "" `showFailure` kc)
$ toArea (x0, y0, x0 + xspan - 1, y0 + yspan - 1)
(lstairsDouble, lstairsSingleUp) = splitAt doubleDownStairs stairsFromUp
pstairsSingleUp = map fst lstairsSingleUp
pstairsDouble = map fst lstairsDouble
pallUpStairs = pstairsDouble ++ pstairsSingleUp
boot = let (x0, y0, x1, y1) = fromArea darea
in rights $ map (snapToStairList 0 pallUpStairs)
[ Point (x0 + 4 + d) (y0 + 3 + d)
, Point (x1 - 4 - d) (y1 - anchorDown + 1) ]
fixedEscape <- case cescapeFreq kc of
[] -> return []
escapeFreq -> do
-- Escapes don't extend to other levels, so corners not harmful
-- (actually neither are the other restrictions inherited from stairs
-- placement, but we respect them to keep a uniform visual layout).
-- Allowing corners and generating before stars, because they are more
important that stairs ( except the first stairs , but they are guaranteed
-- unless the level has no incoming stairs, but if so, plenty of space).
mepos <- placeDownStairs "escape" True serverOptions lid
kc darea pallUpStairs boot
case mepos of
Just epos -> return [(epos, escapeFreq)]
Nothing -> return [] -- with some luck, there is an escape elsewhere
let pescape = map fst fixedEscape
pallUpAndEscape = pescape ++ pallUpStairs
addSingleDown :: [Point] -> Int -> Rnd [Point]
addSingleDown acc 0 = return acc
addSingleDown acc k = do
mpos <- placeDownStairs "stairs" False serverOptions lid
kc darea (pallUpAndEscape ++ acc) boot
case mpos of
Just pos -> addSingleDown (pos : acc) (k - 1)
Nothing -> return acc -- calling again won't change anything
pstairsSingleDown <- addSingleDown [] singleDownStairs
let freqDouble carried =
filter (\(gn, _) ->
carried `elem` T.words (DefsInternal.fromGroupName gn))
$ cstairFreq kc ++ cstairAllowed kc
fixedStairsDouble = map (second freqDouble) lstairsDouble
freqUp carried = renameFreqs (<+> "up") $ freqDouble carried
fixedStairsUp = map (second freqUp) lstairsSingleUp
freqDown = renameFreqs (<+> "down") $ cstairFreq kc
fixedStairsDown = map (, freqDown) pstairsSingleDown
pallExits = pallUpAndEscape ++ pstairsSingleDown
fixedCenters = EM.fromList $
fixedEscape ++ fixedStairsDouble ++ fixedStairsUp ++ fixedStairsDown
-- Avoid completely uniform levels (e.g., uniformly merged places).
bootExtra <- if EM.null fixedCenters then do
mpointExtra <-
placeDownStairs "extra boot" False serverOptions lid
kc darea pallExits boot
-- With sane content, @Nothing@ should never appear.
return $! maybeToList mpointExtra
else return []
let posUp Point{..} = Point (px - 1) py
posDn Point{..} = Point (px + 1) py
-- This and other places ensure there is always a continuous
-- staircase from bottom to top. This makes moving between empty
-- level much less boring. For new levels, it may be blocked by enemies
-- or not offer enough cover, so other staircases may be preferable.
lstair = ( map posUp $ pstairsDouble ++ pstairsSingleUp
, map posDn $ pstairsDouble ++ pstairsSingleDown )
cellSize <- castDiceXY ldepth totalDepth $ ccellSize kc
let subArea = fromMaybe (error $ "" `showFailure` kc) $ shrink darea
area = if cfenceApart kc then subArea else darea
(lgr, gs) = grid fixedCenters (boot ++ bootExtra) area cellSize
dsecret <- randomWord32
cave <- buildCave cops ldepth totalDepth darea dsecret dkind lgr gs bootExtra
cmap <- buildTileMap cops cave
-- The bang is needed to prevent caves memory drag until levels used.
let !lvl = levelFromCave cops cave ldepth cmap lstair pescape
stairCarried p0 =
let Place{qkind} = dstairs cave EM.! p0
freq = map (first $ T.words . tshow)
(PK.pfreq $ okind coplace qkind)
carriedAll = filter (\t -> any (\(ws, _) -> t `elem` ws) freq)
rstairWordCarried
in case carriedAll of
[t] -> (p0, t)
_ -> error $ "wrong carried stair word"
`showFailure` (freq, carriedAll, kc)
return (lvl, lstairsDouble ++ map stairCarried pstairsSingleDown)
snapToStairList :: Int -> [Point] -> Point -> Either Point Point
snapToStairList _ [] p = Right p
snapToStairList a (pos : rest) p =
let nx = if px pos > px p + 5 + a || px pos < px p - 5 - a
then px p
else px pos
ny = if py pos > py p + 3 + a || py pos < py p - 3 - a
then py p
else py pos
np = Point nx ny
in if np == pos then Left np else snapToStairList a rest np
-- Places yet another staircase (or escape), taking into account only
-- the already existing stairs.
placeDownStairs :: Text -> Bool -> ServerOptions -> LevelId
-> CaveKind -> Area -> [Point] -> [Point]
-> Rnd (Maybe Point)
placeDownStairs object cornerPermitted serverOptions lid
CaveKind{cminStairDist, cfenceApart} darea ps boot = do
let dist cmin p = all (\pos -> chessDist p pos > cmin) ps
(x0, y0, x1, y1) = fromArea darea
-- Stairs in corners often enlarge next caves, so refrain from
-- generating stairs, if only corner available (escapes special-cased).
-- The bottom-right corner is exempt, becuase far from messages
-- Also, avoid generating stairs at all on upper and left margins
-- to keep subsequent small levels away from messages on top-right.
rx = 9 -- enough to fit smallest stairs
ry = 6 -- enough to fit smallest stairs
wx = x1 - x0 + 1
wy = y1 - y0 + 1
notInCornerEtc Point{..} =
cornerPermitted
|| wx < 3 * rx + 3 || wy < 3 * ry + 3 -- everything is a corner
|| px > x0 + (wx - 3) `div` 3
&& py > y0 + (wy - 3) `div` 3
inCorner Point{..} = (px <= x0 + rx || px >= x1 - rx)
&& (py <= y0 + ry || py >= y1 - ry)
gpreference = if cornerPermitted then inCorner else notInCornerEtc
f p = case snapToStairList 0 ps p of
Left{} -> Nothing
Right np -> let nnp = either id id $ snapToStairList 0 boot np
in if notInCornerEtc nnp then Just nnp else Nothing
g p = case snapToStairList 2 ps p of
Left{} -> Nothing
Right np -> let nnp = either id id $ snapToStairList 2 boot np
in if gpreference nnp && dist cminStairDist nnp
then Just nnp
else Nothing
focusArea = let d = if cfenceApart then 1 else 0
in fromMaybe (error $ "" `showFailure` darea)
$ toArea ( x0 + 4 + d, y0 + 3 + d
, x1 - 4 - d, y1 - anchorDown + 1 )
mpos <- findPointInArea focusArea g 500 f
-- The message fits this debugging level:
let !_ = if isNothing mpos && sdumpInitRngs serverOptions
then unsafePerformIO $ do
T.hPutStrLn stdout $
"Failed to place" <+> object <+> "on level"
<+> tshow lid <> ", in" <+> tshow darea
hFlush stdout
-- Not really expensive, but shouldn't disrupt normal testing nor play.
#ifdef WITH_EXPENSIVE_ASSERTIONS
error "possible, but unexpected; alarm!"
#endif
else ()
return mpos
-- Build rudimentary level from a cave kind.
levelFromCave :: COps -> Cave -> Dice.AbsDepth
-> TileMap -> ([Point], [Point]) -> [Point]
-> Level
levelFromCave COps{coTileSpeedup} Cave{..} ldepth ltile lstair lescape =
let f n t | Tile.isExplorable coTileSpeedup t = n + 1
| otherwise = n
lexpl = PointArray.foldlA' f 0 ltile
in Level
{ lkind = dkind
, ldepth
, lfloor = EM.empty
, lembed = EM.empty
, lbig = EM.empty
, lproj = EM.empty
, ltile
, lentry = dentry
, larea = darea
, lsmell = EM.empty
, lstair
, lescape
, lseen = 0
, lexpl
, ltime = timeZero
, lnight = dnight
}
-- | Freshly generated and not yet populated dungeon.
data FreshDungeon = FreshDungeon
{ freshDungeon :: Dungeon -- ^ maps for all levels
, freshTotalDepth :: Dice.AbsDepth -- ^ absolute dungeon depth
}
-- | Generate the dungeon for a new game.
dungeonGen :: COps -> ServerOptions -> Caves -> Rnd FreshDungeon
dungeonGen cops@COps{cocave} serverOptions caves = do
let shuffleSegment :: ([Int], [GroupName CaveKind])
-> Rnd [(Int, GroupName CaveKind)]
shuffleSegment (ns, l) = assert (length ns == length l) $ do
lShuffled <- shuffle l
return $! zip ns lShuffled
cavesShuffled <- mapM shuffleSegment caves
let cavesFlat = concat cavesShuffled
absKeys = map (abs . fst) cavesFlat
freshTotalDepth = Dice.AbsDepth $ maximum $ 10 : absKeys
getCaveKindNum :: (Int, GroupName CaveKind)
-> Rnd ((LevelId, ContentId CaveKind, CaveKind), Int)
getCaveKindNum (ln, genName) = do
dkind <- fromMaybe (error $ "" `showFailure` genName)
<$> opick cocave genName (const True)
let kc = okind cocave dkind
ldepth = Dice.AbsDepth $ abs ln
maxStairsNum <- castDice ldepth freshTotalDepth $ cmaxStairsNum kc
return ((toEnum ln, dkind, kc), maxStairsNum)
caveKindNums <- mapM getCaveKindNum cavesFlat
let (caveKinds, caveNums) = unzip caveKindNums
caveNumNexts = zip caveNums $ drop 1 caveNums ++ [0]
placeStairs :: ([(Int, Int, Int)], Int)
-> (Int, Int)
-> ([(Int, Int, Int)], Int)
placeStairs (acc, nstairsFromUp) (maxStairsNum, maxStairsNumNext) =
let !_A1 = assert (nstairsFromUp <= maxStairsNum) ()
-- Any stairs coming from above are kept and if they exceed
-- @maxStairsNumNext@, the remainder ends here.
-- If they don't exceed the minimum of @maxStairsNum@
-- and @maxStairsNumNext@, the difference is filled up
-- with single downstairs. The computation below maximizes
-- the number of stairs at the cost of breaking some long
staircases , except for the first one , which is always kept .
-- Even without this exception, sometimes @maxStairsNum@
-- could not be reached.
doubleKept =
minimum [1, nstairsFromUp, maxStairsNum, maxStairsNumNext]
nstairsFromUp1 = nstairsFromUp - doubleKept
maxStairsNum1 = maxStairsNum - doubleKept
maxStairsNumNext1 = maxStairsNumNext - doubleKept
singleDownStairs =
min maxStairsNumNext1 $ maxStairsNum1 - nstairsFromUp1
remainingNext = maxStairsNumNext1 - singleDownStairs
doubleDownStairs = doubleKept
+ min nstairsFromUp1 remainingNext
!_A2 = assert (singleDownStairs >= 0) ()
!_A3 = assert (doubleDownStairs >= doubleKept) ()
in ( (nstairsFromUp, doubleDownStairs, singleDownStairs) : acc
, doubleDownStairs + singleDownStairs )
(caveStairs, nstairsFromUpLast) = foldl' placeStairs ([], 0) caveNumNexts
caveZipped = assert (nstairsFromUpLast == 0)
$ zip caveKinds (reverse caveStairs)
placeCaveKind :: ([(LevelId, Level)], [(Point, Text)])
-> ( (LevelId, ContentId CaveKind, CaveKind)
, (Int, Int, Int) )
-> Rnd ([(LevelId, Level)], [(Point, Text)])
placeCaveKind (lvls, stairsFromUp)
( (lid, dkind, kc)
, (nstairsFromUp, doubleDownStairs, singleDownStairs) ) = do
let !_A = assert (length stairsFromUp == nstairsFromUp) ()
(newLevel, ldown2) <-
-- lstairUp for the next level is lstairDown for the current level
buildLevel cops serverOptions
lid dkind kc doubleDownStairs singleDownStairs
freshTotalDepth stairsFromUp
return ((lid, newLevel) : lvls, ldown2)
(levels, stairsFromUpLast) <- foldlM' placeCaveKind ([], []) caveZipped
let freshDungeon = assert (null stairsFromUpLast) $ EM.fromList levels
return $! FreshDungeon{..}
| null | https://raw.githubusercontent.com/LambdaHack/LambdaHack/824351574a4e7cd68aa4eec4d7ec406ec6b22486/engine-src/Game/LambdaHack/Server/DungeonGen.hs | haskell | | The dungeon generation routine. It creates empty dungeons, without
actors and without items, either lying on the floor or embedded inside tiles.
* Internal operations
all walkable; passes OK
no walkable tiles for filling the map
some tiles walkable, so ensure connectivity
If no point blocks on both ends, then I can eventually go
from bottom to top of the map and from left to right
unless there are disconnected areas inside rooms).
Create a level from a cave.
Stairs take some space, hence the additions.
Pick minimal cave size that fits all previous stairs.
Escapes don't extend to other levels, so corners not harmful
(actually neither are the other restrictions inherited from stairs
placement, but we respect them to keep a uniform visual layout).
Allowing corners and generating before stars, because they are more
unless the level has no incoming stairs, but if so, plenty of space).
with some luck, there is an escape elsewhere
calling again won't change anything
Avoid completely uniform levels (e.g., uniformly merged places).
With sane content, @Nothing@ should never appear.
This and other places ensure there is always a continuous
staircase from bottom to top. This makes moving between empty
level much less boring. For new levels, it may be blocked by enemies
or not offer enough cover, so other staircases may be preferable.
The bang is needed to prevent caves memory drag until levels used.
Places yet another staircase (or escape), taking into account only
the already existing stairs.
Stairs in corners often enlarge next caves, so refrain from
generating stairs, if only corner available (escapes special-cased).
The bottom-right corner is exempt, becuase far from messages
Also, avoid generating stairs at all on upper and left margins
to keep subsequent small levels away from messages on top-right.
enough to fit smallest stairs
enough to fit smallest stairs
everything is a corner
The message fits this debugging level:
Not really expensive, but shouldn't disrupt normal testing nor play.
Build rudimentary level from a cave kind.
| Freshly generated and not yet populated dungeon.
^ maps for all levels
^ absolute dungeon depth
| Generate the dungeon for a new game.
Any stairs coming from above are kept and if they exceed
@maxStairsNumNext@, the remainder ends here.
If they don't exceed the minimum of @maxStairsNum@
and @maxStairsNumNext@, the difference is filled up
with single downstairs. The computation below maximizes
the number of stairs at the cost of breaking some long
Even without this exception, sometimes @maxStairsNum@
could not be reached.
lstairUp for the next level is lstairDown for the current level | # LANGUAGE TupleSections #
module Game.LambdaHack.Server.DungeonGen
( FreshDungeon(..), dungeonGen
#ifdef EXPOSE_INTERNAL
, convertTileMaps, buildTileMap, anchorDown, buildLevel
, snapToStairList, placeDownStairs, levelFromCave
#endif
) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
import qualified Control.Monad.Trans.State.Strict as St
import Data.Either (rights)
import qualified Data.EnumMap.Strict as EM
import qualified Data.IntMap.Strict as IM
import qualified Data.Text as T
import qualified Data.Text.IO as T
import System.IO (hFlush, stdout)
import System.IO.Unsafe (unsafePerformIO)
import qualified System.Random.SplitMix32 as SM
import Game.LambdaHack.Common.Area
import Game.LambdaHack.Common.Kind
import Game.LambdaHack.Common.Level
import Game.LambdaHack.Common.Point
import qualified Game.LambdaHack.Common.PointArray as PointArray
import qualified Game.LambdaHack.Common.Tile as Tile
import Game.LambdaHack.Common.Time
import Game.LambdaHack.Common.Types
import Game.LambdaHack.Content.CaveKind
import Game.LambdaHack.Content.ModeKind
import qualified Game.LambdaHack.Content.PlaceKind as PK
import Game.LambdaHack.Content.RuleKind
import Game.LambdaHack.Content.TileKind (TileKind)
import qualified Game.LambdaHack.Content.TileKind as TK
import qualified Game.LambdaHack.Core.Dice as Dice
import Game.LambdaHack.Core.Random
import Game.LambdaHack.Definition.Defs
import qualified Game.LambdaHack.Definition.DefsInternal as DefsInternal
import Game.LambdaHack.Server.DungeonGen.AreaRnd
import Game.LambdaHack.Server.DungeonGen.Cave
import Game.LambdaHack.Server.DungeonGen.Place
import Game.LambdaHack.Server.ServerOptions
convertTileMaps :: COps -> Bool -> Rnd (ContentId TileKind)
-> Maybe (Rnd (ContentId TileKind)) -> Area -> TileMapEM
-> Rnd TileMap
convertTileMaps COps{ corule=RuleContent{rWidthMax, rHeightMax}
, cotile
, coTileSpeedup }
areAllWalkable cdefTile mpickPassable darea ltile = do
let outerId = ouniqGroup cotile TK.S_UNKNOWN_OUTER_FENCE
runCdefTile :: (SM.SMGen, (Int, [(Int, ContentId TileKind)]))
-> ( ContentId TileKind
, (SM.SMGen, (Int, [(Int, ContentId TileKind)])) )
runCdefTile (gen1, (pI, assocs)) =
let p = toEnum pI
in if inside darea p
then case assocs of
(p2, t2) : rest | p2 == pI -> (t2, (gen1, (pI + 1, rest)))
_ -> let (tile, gen2) = St.runState cdefTile gen1
in (tile, (gen2, (pI + 1, assocs)))
else (outerId, (gen1, (pI + 1, assocs)))
runUnfold gen =
let (gen1, gen2) = SM.splitSMGen gen
in (PointArray.unfoldrNA
rWidthMax rHeightMax runCdefTile
(gen1, (0, IM.assocs $ EM.enumMapToIntMap ltile)), gen2)
converted1 <- St.state runUnfold
case mpickPassable of
let passes p array =
Tile.isWalkable coTileSpeedup (array PointArray.! p)
blocksHorizontal (Point x y) array =
not (passes (Point (x + 1) y) array
|| passes (Point (x - 1) y) array)
blocksVertical (Point x y) array =
not (passes (Point x (y + 1)) array
|| passes (Point x (y - 1)) array)
activeArea = fromMaybe (error $ "" `showFailure` darea) $ shrink darea
connect included blocks walkableTile array =
let g p c = if inside activeArea p
&& included p
&& not (Tile.isEasyOpen coTileSpeedup c)
&& p `EM.notMember` ltile
&& blocks p array
then walkableTile
else c
in PointArray.imapA g array
walkable2 <- pickPassable
let converted2 = connect (even . px) blocksHorizontal walkable2 converted1
walkable3 <- pickPassable
let converted3 = connect (even . py) blocksVertical walkable3 converted2
walkable4 <- pickPassable
let converted4 =
connect (odd . px) blocksHorizontal walkable4 converted3
walkable5 <- pickPassable
let converted5 =
connect (odd . py) blocksVertical walkable5 converted4
return converted5
buildTileMap :: COps -> Cave -> Rnd TileMap
buildTileMap cops@COps{cotile, cocave} Cave{dkind, darea, dmap} = do
let CaveKind{cpassable, cdefTile} = okind cocave dkind
pickDefTile = fromMaybe (error $ "" `showFailure` cdefTile)
<$> opick cotile cdefTile (const True)
wcond = Tile.isEasyOpenKind
mpickPassable =
if cpassable
then Just $ fromMaybe (error $ "" `showFailure` cdefTile)
<$> opick cotile cdefTile wcond
else Nothing
nwcond = not . Tile.kindHasFeature TK.Walkable
areAllWalkable <- isNothing <$> opick cotile cdefTile nwcond
convertTileMaps cops areAllWalkable pickDefTile mpickPassable darea dmap
anchorDown :: Y
not 4 , asymmetric vs up , for staircase variety ;
symmetry kept for @cfenceApart@ caves , to save real estate
buildLevel :: COps -> ServerOptions
-> LevelId -> ContentId CaveKind -> CaveKind -> Int -> Int
-> Dice.AbsDepth -> [(Point, Text)]
-> Rnd (Level, [(Point, Text)])
buildLevel cops@COps{coplace, corule=RuleContent{..}} serverOptions
lid dkind kc doubleDownStairs singleDownStairs
totalDepth stairsFromUp = do
let d = if cfenceApart kc then 1 else 0
Simple rule for now : level has depth ( difficulty ) @abs lid@.
ldepth = Dice.AbsDepth $ abs $ fromEnum lid
darea =
let (lxPrev, lyPrev) = unzip $ map ((px &&& py) . fst) stairsFromUp
lxMin = max 0
$ -4 - d + minimum (rWidthMax - 1 : lxPrev)
lxMax = min (rWidthMax - 1)
$ 4 + d + maximum (0 : lxPrev)
lyMin = max 0
$ -3 - d + minimum (rHeightMax - 1 : lyPrev)
lyMax = min (rHeightMax - 1)
$ 3 + d + maximum (0 : lyPrev)
xspan = max (lxMax - lxMin + 1) $ cXminSize kc
yspan = max (lyMax - lyMin + 1) $ cYminSize kc
x0 = min lxMin (rWidthMax - xspan)
y0 = min lyMin (rHeightMax - yspan)
in fromMaybe (error $ "" `showFailure` kc)
$ toArea (x0, y0, x0 + xspan - 1, y0 + yspan - 1)
(lstairsDouble, lstairsSingleUp) = splitAt doubleDownStairs stairsFromUp
pstairsSingleUp = map fst lstairsSingleUp
pstairsDouble = map fst lstairsDouble
pallUpStairs = pstairsDouble ++ pstairsSingleUp
boot = let (x0, y0, x1, y1) = fromArea darea
in rights $ map (snapToStairList 0 pallUpStairs)
[ Point (x0 + 4 + d) (y0 + 3 + d)
, Point (x1 - 4 - d) (y1 - anchorDown + 1) ]
fixedEscape <- case cescapeFreq kc of
[] -> return []
escapeFreq -> do
important that stairs ( except the first stairs , but they are guaranteed
mepos <- placeDownStairs "escape" True serverOptions lid
kc darea pallUpStairs boot
case mepos of
Just epos -> return [(epos, escapeFreq)]
let pescape = map fst fixedEscape
pallUpAndEscape = pescape ++ pallUpStairs
addSingleDown :: [Point] -> Int -> Rnd [Point]
addSingleDown acc 0 = return acc
addSingleDown acc k = do
mpos <- placeDownStairs "stairs" False serverOptions lid
kc darea (pallUpAndEscape ++ acc) boot
case mpos of
Just pos -> addSingleDown (pos : acc) (k - 1)
pstairsSingleDown <- addSingleDown [] singleDownStairs
let freqDouble carried =
filter (\(gn, _) ->
carried `elem` T.words (DefsInternal.fromGroupName gn))
$ cstairFreq kc ++ cstairAllowed kc
fixedStairsDouble = map (second freqDouble) lstairsDouble
freqUp carried = renameFreqs (<+> "up") $ freqDouble carried
fixedStairsUp = map (second freqUp) lstairsSingleUp
freqDown = renameFreqs (<+> "down") $ cstairFreq kc
fixedStairsDown = map (, freqDown) pstairsSingleDown
pallExits = pallUpAndEscape ++ pstairsSingleDown
fixedCenters = EM.fromList $
fixedEscape ++ fixedStairsDouble ++ fixedStairsUp ++ fixedStairsDown
bootExtra <- if EM.null fixedCenters then do
mpointExtra <-
placeDownStairs "extra boot" False serverOptions lid
kc darea pallExits boot
return $! maybeToList mpointExtra
else return []
let posUp Point{..} = Point (px - 1) py
posDn Point{..} = Point (px + 1) py
lstair = ( map posUp $ pstairsDouble ++ pstairsSingleUp
, map posDn $ pstairsDouble ++ pstairsSingleDown )
cellSize <- castDiceXY ldepth totalDepth $ ccellSize kc
let subArea = fromMaybe (error $ "" `showFailure` kc) $ shrink darea
area = if cfenceApart kc then subArea else darea
(lgr, gs) = grid fixedCenters (boot ++ bootExtra) area cellSize
dsecret <- randomWord32
cave <- buildCave cops ldepth totalDepth darea dsecret dkind lgr gs bootExtra
cmap <- buildTileMap cops cave
let !lvl = levelFromCave cops cave ldepth cmap lstair pescape
stairCarried p0 =
let Place{qkind} = dstairs cave EM.! p0
freq = map (first $ T.words . tshow)
(PK.pfreq $ okind coplace qkind)
carriedAll = filter (\t -> any (\(ws, _) -> t `elem` ws) freq)
rstairWordCarried
in case carriedAll of
[t] -> (p0, t)
_ -> error $ "wrong carried stair word"
`showFailure` (freq, carriedAll, kc)
return (lvl, lstairsDouble ++ map stairCarried pstairsSingleDown)
snapToStairList :: Int -> [Point] -> Point -> Either Point Point
snapToStairList _ [] p = Right p
snapToStairList a (pos : rest) p =
let nx = if px pos > px p + 5 + a || px pos < px p - 5 - a
then px p
else px pos
ny = if py pos > py p + 3 + a || py pos < py p - 3 - a
then py p
else py pos
np = Point nx ny
in if np == pos then Left np else snapToStairList a rest np
placeDownStairs :: Text -> Bool -> ServerOptions -> LevelId
-> CaveKind -> Area -> [Point] -> [Point]
-> Rnd (Maybe Point)
placeDownStairs object cornerPermitted serverOptions lid
CaveKind{cminStairDist, cfenceApart} darea ps boot = do
let dist cmin p = all (\pos -> chessDist p pos > cmin) ps
(x0, y0, x1, y1) = fromArea darea
wx = x1 - x0 + 1
wy = y1 - y0 + 1
notInCornerEtc Point{..} =
cornerPermitted
|| px > x0 + (wx - 3) `div` 3
&& py > y0 + (wy - 3) `div` 3
inCorner Point{..} = (px <= x0 + rx || px >= x1 - rx)
&& (py <= y0 + ry || py >= y1 - ry)
gpreference = if cornerPermitted then inCorner else notInCornerEtc
f p = case snapToStairList 0 ps p of
Left{} -> Nothing
Right np -> let nnp = either id id $ snapToStairList 0 boot np
in if notInCornerEtc nnp then Just nnp else Nothing
g p = case snapToStairList 2 ps p of
Left{} -> Nothing
Right np -> let nnp = either id id $ snapToStairList 2 boot np
in if gpreference nnp && dist cminStairDist nnp
then Just nnp
else Nothing
focusArea = let d = if cfenceApart then 1 else 0
in fromMaybe (error $ "" `showFailure` darea)
$ toArea ( x0 + 4 + d, y0 + 3 + d
, x1 - 4 - d, y1 - anchorDown + 1 )
mpos <- findPointInArea focusArea g 500 f
let !_ = if isNothing mpos && sdumpInitRngs serverOptions
then unsafePerformIO $ do
T.hPutStrLn stdout $
"Failed to place" <+> object <+> "on level"
<+> tshow lid <> ", in" <+> tshow darea
hFlush stdout
#ifdef WITH_EXPENSIVE_ASSERTIONS
error "possible, but unexpected; alarm!"
#endif
else ()
return mpos
levelFromCave :: COps -> Cave -> Dice.AbsDepth
-> TileMap -> ([Point], [Point]) -> [Point]
-> Level
levelFromCave COps{coTileSpeedup} Cave{..} ldepth ltile lstair lescape =
let f n t | Tile.isExplorable coTileSpeedup t = n + 1
| otherwise = n
lexpl = PointArray.foldlA' f 0 ltile
in Level
{ lkind = dkind
, ldepth
, lfloor = EM.empty
, lembed = EM.empty
, lbig = EM.empty
, lproj = EM.empty
, ltile
, lentry = dentry
, larea = darea
, lsmell = EM.empty
, lstair
, lescape
, lseen = 0
, lexpl
, ltime = timeZero
, lnight = dnight
}
data FreshDungeon = FreshDungeon
}
dungeonGen :: COps -> ServerOptions -> Caves -> Rnd FreshDungeon
dungeonGen cops@COps{cocave} serverOptions caves = do
let shuffleSegment :: ([Int], [GroupName CaveKind])
-> Rnd [(Int, GroupName CaveKind)]
shuffleSegment (ns, l) = assert (length ns == length l) $ do
lShuffled <- shuffle l
return $! zip ns lShuffled
cavesShuffled <- mapM shuffleSegment caves
let cavesFlat = concat cavesShuffled
absKeys = map (abs . fst) cavesFlat
freshTotalDepth = Dice.AbsDepth $ maximum $ 10 : absKeys
getCaveKindNum :: (Int, GroupName CaveKind)
-> Rnd ((LevelId, ContentId CaveKind, CaveKind), Int)
getCaveKindNum (ln, genName) = do
dkind <- fromMaybe (error $ "" `showFailure` genName)
<$> opick cocave genName (const True)
let kc = okind cocave dkind
ldepth = Dice.AbsDepth $ abs ln
maxStairsNum <- castDice ldepth freshTotalDepth $ cmaxStairsNum kc
return ((toEnum ln, dkind, kc), maxStairsNum)
caveKindNums <- mapM getCaveKindNum cavesFlat
let (caveKinds, caveNums) = unzip caveKindNums
caveNumNexts = zip caveNums $ drop 1 caveNums ++ [0]
placeStairs :: ([(Int, Int, Int)], Int)
-> (Int, Int)
-> ([(Int, Int, Int)], Int)
placeStairs (acc, nstairsFromUp) (maxStairsNum, maxStairsNumNext) =
let !_A1 = assert (nstairsFromUp <= maxStairsNum) ()
staircases , except for the first one , which is always kept .
doubleKept =
minimum [1, nstairsFromUp, maxStairsNum, maxStairsNumNext]
nstairsFromUp1 = nstairsFromUp - doubleKept
maxStairsNum1 = maxStairsNum - doubleKept
maxStairsNumNext1 = maxStairsNumNext - doubleKept
singleDownStairs =
min maxStairsNumNext1 $ maxStairsNum1 - nstairsFromUp1
remainingNext = maxStairsNumNext1 - singleDownStairs
doubleDownStairs = doubleKept
+ min nstairsFromUp1 remainingNext
!_A2 = assert (singleDownStairs >= 0) ()
!_A3 = assert (doubleDownStairs >= doubleKept) ()
in ( (nstairsFromUp, doubleDownStairs, singleDownStairs) : acc
, doubleDownStairs + singleDownStairs )
(caveStairs, nstairsFromUpLast) = foldl' placeStairs ([], 0) caveNumNexts
caveZipped = assert (nstairsFromUpLast == 0)
$ zip caveKinds (reverse caveStairs)
placeCaveKind :: ([(LevelId, Level)], [(Point, Text)])
-> ( (LevelId, ContentId CaveKind, CaveKind)
, (Int, Int, Int) )
-> Rnd ([(LevelId, Level)], [(Point, Text)])
placeCaveKind (lvls, stairsFromUp)
( (lid, dkind, kc)
, (nstairsFromUp, doubleDownStairs, singleDownStairs) ) = do
let !_A = assert (length stairsFromUp == nstairsFromUp) ()
(newLevel, ldown2) <-
buildLevel cops serverOptions
lid dkind kc doubleDownStairs singleDownStairs
freshTotalDepth stairsFromUp
return ((lid, newLevel) : lvls, ldown2)
(levels, stairsFromUpLast) <- foldlM' placeCaveKind ([], []) caveZipped
let freshDungeon = assert (null stairsFromUpLast) $ EM.fromList levels
return $! FreshDungeon{..}
|
4bd589e08962d684397722e67f97432cad2990c22e5be5e87274c13f3f2d64cd | MastodonC/kixi.datastore | dynamodb.clj | (ns kixi.datastore.metadatastore.dynamodb
(:require [com.stuartsierra.component :as component]
[kixi.comms :as c]
[kixi.datastore
[communication-specs :as cs]
[dynamodb :as db :refer [migrate]]
[metadatastore :as md :refer [MetaDataStore]]]
[taoensso
[encore :refer [get-subvector]]
[timbre :as timbre :refer [info error warn]]]
[kixi.datastore.metadatastore :as ms]
[taoensso.timbre :as log])
(:import com.amazonaws.services.dynamodbv2.model.ConditionalCheckFailedException))
(def id-col (db/dynamo-col ::md/id))
(def all-sharing-columns
(mapv
#(db/dynamo-col [::md/sharing %])
md/activities))
(defn primary-metadata-table
[profile]
(str profile "-kixi.datastore-metadatastore"))
(defn activity-metadata-table
[profile]
(str profile "-kixi.datastore-metadatastore.activity"))
(defn activity-metadata-created-index
[]
"provenance-created")
(def activity-table-pk :groupid-activity)
(defn activity-table-id
[group-id activity]
(str group-id "_" activity))
(def activity-table-projection
#{::md/id ::md/provenance ::md/schema ::md/file-type ::md/name ::md/description})
(defn sharing-columns->sets
[md]
(reduce
(fn [acc a]
(if (get-in acc [::md/sharing a])
(update-in acc [::md/sharing a] set)
acc))
md
md/activities))
(defmulti update-metadata-processor
(fn [conn update-event]
(::cs/file-metadata-update-type update-event)))
(defn insert-activity-row
[conn group-id activity metadata]
(let [id (activity-table-id group-id activity)
projection (assoc
(select-keys metadata activity-table-projection)
activity-table-pk id)]
(try
(db/put-item conn
(activity-metadata-table (:profile conn))
projection)
(catch ConditionalCheckFailedException e
(warn e "Activity row already exists: " id projection)))))
(defn remove-activity-row
[conn group-id activity md-id]
(let [id (activity-table-id group-id activity)]
(try
(db/delete-item conn
(activity-metadata-table (:profile conn))
{activity-table-pk id
id-col md-id})
(catch Exception e
(error e "Failed to delete activity row: " id)))))
(defmethod update-metadata-processor ::cs/file-metadata-created
[conn update-event]
(let [metadata (::md/file-metadata update-event)]
(info "Create: " metadata)
(try
(db/insert-data conn
(primary-metadata-table (:profile conn))
id-col
(sharing-columns->sets
metadata))
(catch ConditionalCheckFailedException e
(log/warn e "Metadata already exists: " metadata)))
(doseq [activity (keys (::md/sharing metadata))]
(doseq [group-id (get-in metadata [::md/sharing activity])]
(insert-activity-row conn group-id activity metadata)))))
(defmethod update-metadata-processor ::cs/file-metadata-structural-validation-checked
[conn update-event]
(info "Update: " update-event)
(db/merge-data conn
(primary-metadata-table (:profile conn))
id-col
(::md/id update-event)
(select-keys update-event
[::md/structural-validation])))
(defmethod update-metadata-processor ::cs/file-metadata-segmentation-add
[conn update-event]
(info "Update: " update-event)
(comment "This implementation is not idempotent, need a check to prevent repeat segement adds"
(db/append-list conn
(primary-metadata-table (:profile conn))
id-col
(::md/id update-event)
:add
[::md/segmentations]
(:kixi.group/id update-event))))
(defn update-sharing
[conn update-event]
(info "Update Share: " update-event)
(let [update-fn (case (::md/sharing-update update-event)
::md/sharing-conj :conj
::md/sharing-disj :disj)
metadata-id (::md/id update-event)]
(db/update-set conn
(primary-metadata-table (:profile conn))
id-col
metadata-id
update-fn
[::md/sharing (::md/activity update-event)]
(:kixi.group/id update-event))
(case (::md/sharing-update update-event)
::md/sharing-conj (let [metadata (db/get-item-ignore-tombstone
conn
(primary-metadata-table (:profile conn))
id-col
metadata-id)]
(insert-activity-row conn (:kixi.group/id update-event) (::md/activity update-event) metadata))
::md/sharing-disj (remove-activity-row conn (:kixi.group/id update-event) (::md/activity update-event) metadata-id))))
(defmethod update-metadata-processor ::cs/file-metadata-sharing-updated
[conn update-event]
(update-sharing conn update-event))
(defn dissoc-nonupdates
[md]
(reduce
(fn [acc [k v]]
(if (and (namespace k) (clojure.string/index-of (namespace k) ".update"))
(assoc acc k v)
acc))
{}
md))
(defmethod update-metadata-processor ::cs/file-metadata-update
[conn update-event]
(info "Update: " update-event)
(db/update-data conn
(primary-metadata-table (:profile conn))
id-col
(::md/id update-event)
(dissoc-nonupdates update-event)))
(defn sharing-changed-handler
[client]
(fn [event]
(update-sharing client event)))
(defn file-deleted-handler
[client]
(fn [event]
(info "Deleting file: " event)
(db/delete-data client
(primary-metadata-table (:profile client))
id-col
(::md/id event)
(:kixi.event/created-at event))))
(defn bundle-deleted-handler
[client]
(fn [event]
(info "Deleting bundle: " event)
(db/delete-data client
(primary-metadata-table (:profile client))
id-col
(::md/id event)
(:kixi.event/created-at event))))
(defn files-added-to-bundle-handler
[client]
(fn [event]
(info "Added files to bundle: " event)
(db/update-set client
(primary-metadata-table (:profile client))
id-col
(::md/id event)
:conj
[::md/bundled-ids]
(::md/bundled-ids event))))
(defn files-removed-from-bundle-handler
[client]
(fn [event]
(info "Removed files from bundle: " event)
(db/update-set client
(primary-metadata-table (:profile client))
id-col
(::md/id event)
:disj
[::md/bundled-ids]
(::md/bundled-ids event))))
(def sort-order->dynamo-comp
{"asc" :asc
"desc" :desc})
(defn comp-id-created-maps
[a b]
(cond
(nil? a) -1
(nil? b) 1
:else (let [c-comp (.compareTo ^String (get-in a [::md/provenance ::md/created])
^String (get-in b [::md/provenance ::md/created]))]
(if (zero? c-comp)
(.compareTo ^String (get a ::md/id)
^String (get b ::md/id))
c-comp))))
(def sort-order->knit-comp
{"asc" comp-id-created-maps
"desc" (fn [a b]
(comp-id-created-maps b a))})
(defn keep-if-at-least
"Collection must be ordered"
[cnt coll]
(letfn [(enough? [acc] (= cnt acc))
(process [remaining acc candidate]
(if (enough? acc)
(cons candidate
(let [remain (drop-while #{candidate} remaining)
new-candidate (first remain)]
(when new-candidate
(lazy-seq
(process (rest remain) 1 new-candidate)))))
(when-let [t (first remaining)]
(if (= t candidate)
(recur (rest remaining) (inc acc) candidate)
(recur (rest remaining) 1 t)))))]
(when (first coll)
(process (rest coll) 1 (first coll)))))
(def enforce-and-sematics keep-if-at-least)
(defn knit-ordered-data
[comparitor seqs-l]
(letfn [(head-element [firsts]
(first
(sort-by second comparitor
(map-indexed (fn [dex v] (vector dex v))
firsts))))
(more-data [seqs]
(some identity (map first seqs)))
(process [seqs]
(when (more-data seqs)
(let [firsts (mapv first seqs)
[head-dex head-val] (head-element firsts)]
(cons head-val
(lazy-seq
(process (update seqs head-dex rest)))))))]
(process (vec seqs-l))))
(defn all-ids-ordered
[client group-ids activities sort-cols sort-order]
(->> (for [g group-ids
a activities]
(db/query-index client
(activity-metadata-table (:profile client))
(activity-metadata-created-index)
{activity-table-pk
(activity-table-id g a)}
[sort-cols ::md/id]
(get sort-order->dynamo-comp sort-order)))
(knit-ordered-data (get sort-order->knit-comp sort-order))
(mapv ::md/id)
(enforce-and-sematics (count activities))
vec))
(defn criteria->activities
[criteria]
(->> criteria
::md/activities
(cons ::md/meta-read)
set))
(defn response-event
[r]
nil)
(defmulti default-values ::ms/type)
(defmethod default-values
:default
[_] {})
(defmethod default-values
"bundle"
[_]
{::ms/bundled-ids #{}})
(defrecord DynamoDb
[communications profile endpoint
client get-item]
MetaDataStore
(authorised
[this action id user-groups]
(when-let [item (get-item id {:projection all-sharing-columns})]
(not-empty (clojure.set/intersection (set (get-in item [::md/sharing action]))
(set user-groups)))))
(exists [this id]
(get-item id {:projection [id-col]}))
(retrieve [this id]
(when-let [item (get-item id)]
(merge (default-values item)
item)))
(query [this criteria from-index cnt sort-cols sort-order]
(when-not (= [::md/provenance ::md/created]
sort-cols)
(throw (new Exception "Only created timestamp sort supported")))
(let [group-ids (:kixi.user/groups criteria)
activities (criteria->activities criteria)
all-ids-ordered (all-ids-ordered client group-ids activities sort-cols sort-order)
target-ids (get-subvector all-ids-ordered from-index cnt)
items (if (not-empty target-ids)
(db/get-bulk-ordered client
(primary-metadata-table profile)
id-col
target-ids)
[])]
{:items items
:paging {:total (count all-ids-ordered)
:count (count items)
:index from-index}}))
component/Lifecycle
(start [component]
(if-not client
(let [client (assoc (select-keys component
db/client-kws)
:profile profile)
joplin-conf {:migrators {:migrator "joplin/kixi/datastore/metadatastore/migrators/dynamodb"}
:databases {:dynamodb (merge
{:type :dynamo
:migration-table (str profile "-kixi.datastore-metadatastore.migrations")}
client)}
:environments {:env [{:db :dynamodb :migrator :migrator}]}}]
(info "Starting File Metadata DynamoDb Store - " profile)
(migrate :env joplin-conf)
(c/attach-event-handler! communications
:kixi.datastore/metadatastore
:kixi.datastore.file-metadata/updated
"1.0.0"
(comp response-event (partial update-metadata-processor client) :kixi.comms.event/payload))
(c/attach-validating-event-handler! communications
:kixi.datastore/metadatastore
:kixi.datastore/sharing-changed
"1.0.0"
(sharing-changed-handler client))
(c/attach-validating-event-handler! communications
:kixi.datastore/metadatastore-file-delete
:kixi.datastore/file-deleted
"1.0.0"
(file-deleted-handler client))
(c/attach-validating-event-handler! communications
:kixi.datastore/metadatastore-bundle-delete
:kixi.datastore/bundle-deleted
"1.0.0"
(bundle-deleted-handler client))
(c/attach-validating-event-handler! communications
:kixi.datastore/metadatastore-add-files-to-bundle
:kixi.datastore/files-added-to-bundle
"1.0.0"
(files-added-to-bundle-handler client))
(c/attach-validating-event-handler! communications
:kixi.datastore/metadatastore-remove-files-from-bundle
:kixi.datastore/files-removed-from-bundle
"1.0.0"
(files-removed-from-bundle-handler client))
(assoc component
:client client
:get-item (partial db/get-item client (primary-metadata-table profile) id-col)))
component))
(stop [component]
(if client
(do (info "Destroying File Metadata DynamoDb Store")
(dissoc component :client :get-item))
component)))
| null | https://raw.githubusercontent.com/MastodonC/kixi.datastore/f33bba4b1fdd8c56cc7ac0f559ffe35254c9ca99/src/kixi/datastore/metadatastore/dynamodb.clj | clojure | (ns kixi.datastore.metadatastore.dynamodb
(:require [com.stuartsierra.component :as component]
[kixi.comms :as c]
[kixi.datastore
[communication-specs :as cs]
[dynamodb :as db :refer [migrate]]
[metadatastore :as md :refer [MetaDataStore]]]
[taoensso
[encore :refer [get-subvector]]
[timbre :as timbre :refer [info error warn]]]
[kixi.datastore.metadatastore :as ms]
[taoensso.timbre :as log])
(:import com.amazonaws.services.dynamodbv2.model.ConditionalCheckFailedException))
(def id-col (db/dynamo-col ::md/id))
(def all-sharing-columns
(mapv
#(db/dynamo-col [::md/sharing %])
md/activities))
(defn primary-metadata-table
[profile]
(str profile "-kixi.datastore-metadatastore"))
(defn activity-metadata-table
[profile]
(str profile "-kixi.datastore-metadatastore.activity"))
(defn activity-metadata-created-index
[]
"provenance-created")
(def activity-table-pk :groupid-activity)
(defn activity-table-id
[group-id activity]
(str group-id "_" activity))
(def activity-table-projection
#{::md/id ::md/provenance ::md/schema ::md/file-type ::md/name ::md/description})
(defn sharing-columns->sets
[md]
(reduce
(fn [acc a]
(if (get-in acc [::md/sharing a])
(update-in acc [::md/sharing a] set)
acc))
md
md/activities))
(defmulti update-metadata-processor
(fn [conn update-event]
(::cs/file-metadata-update-type update-event)))
(defn insert-activity-row
[conn group-id activity metadata]
(let [id (activity-table-id group-id activity)
projection (assoc
(select-keys metadata activity-table-projection)
activity-table-pk id)]
(try
(db/put-item conn
(activity-metadata-table (:profile conn))
projection)
(catch ConditionalCheckFailedException e
(warn e "Activity row already exists: " id projection)))))
(defn remove-activity-row
[conn group-id activity md-id]
(let [id (activity-table-id group-id activity)]
(try
(db/delete-item conn
(activity-metadata-table (:profile conn))
{activity-table-pk id
id-col md-id})
(catch Exception e
(error e "Failed to delete activity row: " id)))))
(defmethod update-metadata-processor ::cs/file-metadata-created
[conn update-event]
(let [metadata (::md/file-metadata update-event)]
(info "Create: " metadata)
(try
(db/insert-data conn
(primary-metadata-table (:profile conn))
id-col
(sharing-columns->sets
metadata))
(catch ConditionalCheckFailedException e
(log/warn e "Metadata already exists: " metadata)))
(doseq [activity (keys (::md/sharing metadata))]
(doseq [group-id (get-in metadata [::md/sharing activity])]
(insert-activity-row conn group-id activity metadata)))))
(defmethod update-metadata-processor ::cs/file-metadata-structural-validation-checked
[conn update-event]
(info "Update: " update-event)
(db/merge-data conn
(primary-metadata-table (:profile conn))
id-col
(::md/id update-event)
(select-keys update-event
[::md/structural-validation])))
(defmethod update-metadata-processor ::cs/file-metadata-segmentation-add
[conn update-event]
(info "Update: " update-event)
(comment "This implementation is not idempotent, need a check to prevent repeat segement adds"
(db/append-list conn
(primary-metadata-table (:profile conn))
id-col
(::md/id update-event)
:add
[::md/segmentations]
(:kixi.group/id update-event))))
(defn update-sharing
[conn update-event]
(info "Update Share: " update-event)
(let [update-fn (case (::md/sharing-update update-event)
::md/sharing-conj :conj
::md/sharing-disj :disj)
metadata-id (::md/id update-event)]
(db/update-set conn
(primary-metadata-table (:profile conn))
id-col
metadata-id
update-fn
[::md/sharing (::md/activity update-event)]
(:kixi.group/id update-event))
(case (::md/sharing-update update-event)
::md/sharing-conj (let [metadata (db/get-item-ignore-tombstone
conn
(primary-metadata-table (:profile conn))
id-col
metadata-id)]
(insert-activity-row conn (:kixi.group/id update-event) (::md/activity update-event) metadata))
::md/sharing-disj (remove-activity-row conn (:kixi.group/id update-event) (::md/activity update-event) metadata-id))))
(defmethod update-metadata-processor ::cs/file-metadata-sharing-updated
[conn update-event]
(update-sharing conn update-event))
(defn dissoc-nonupdates
[md]
(reduce
(fn [acc [k v]]
(if (and (namespace k) (clojure.string/index-of (namespace k) ".update"))
(assoc acc k v)
acc))
{}
md))
(defmethod update-metadata-processor ::cs/file-metadata-update
[conn update-event]
(info "Update: " update-event)
(db/update-data conn
(primary-metadata-table (:profile conn))
id-col
(::md/id update-event)
(dissoc-nonupdates update-event)))
(defn sharing-changed-handler
[client]
(fn [event]
(update-sharing client event)))
(defn file-deleted-handler
[client]
(fn [event]
(info "Deleting file: " event)
(db/delete-data client
(primary-metadata-table (:profile client))
id-col
(::md/id event)
(:kixi.event/created-at event))))
(defn bundle-deleted-handler
[client]
(fn [event]
(info "Deleting bundle: " event)
(db/delete-data client
(primary-metadata-table (:profile client))
id-col
(::md/id event)
(:kixi.event/created-at event))))
(defn files-added-to-bundle-handler
[client]
(fn [event]
(info "Added files to bundle: " event)
(db/update-set client
(primary-metadata-table (:profile client))
id-col
(::md/id event)
:conj
[::md/bundled-ids]
(::md/bundled-ids event))))
(defn files-removed-from-bundle-handler
[client]
(fn [event]
(info "Removed files from bundle: " event)
(db/update-set client
(primary-metadata-table (:profile client))
id-col
(::md/id event)
:disj
[::md/bundled-ids]
(::md/bundled-ids event))))
(def sort-order->dynamo-comp
{"asc" :asc
"desc" :desc})
(defn comp-id-created-maps
[a b]
(cond
(nil? a) -1
(nil? b) 1
:else (let [c-comp (.compareTo ^String (get-in a [::md/provenance ::md/created])
^String (get-in b [::md/provenance ::md/created]))]
(if (zero? c-comp)
(.compareTo ^String (get a ::md/id)
^String (get b ::md/id))
c-comp))))
(def sort-order->knit-comp
{"asc" comp-id-created-maps
"desc" (fn [a b]
(comp-id-created-maps b a))})
(defn keep-if-at-least
"Collection must be ordered"
[cnt coll]
(letfn [(enough? [acc] (= cnt acc))
(process [remaining acc candidate]
(if (enough? acc)
(cons candidate
(let [remain (drop-while #{candidate} remaining)
new-candidate (first remain)]
(when new-candidate
(lazy-seq
(process (rest remain) 1 new-candidate)))))
(when-let [t (first remaining)]
(if (= t candidate)
(recur (rest remaining) (inc acc) candidate)
(recur (rest remaining) 1 t)))))]
(when (first coll)
(process (rest coll) 1 (first coll)))))
(def enforce-and-sematics keep-if-at-least)
(defn knit-ordered-data
[comparitor seqs-l]
(letfn [(head-element [firsts]
(first
(sort-by second comparitor
(map-indexed (fn [dex v] (vector dex v))
firsts))))
(more-data [seqs]
(some identity (map first seqs)))
(process [seqs]
(when (more-data seqs)
(let [firsts (mapv first seqs)
[head-dex head-val] (head-element firsts)]
(cons head-val
(lazy-seq
(process (update seqs head-dex rest)))))))]
(process (vec seqs-l))))
(defn all-ids-ordered
[client group-ids activities sort-cols sort-order]
(->> (for [g group-ids
a activities]
(db/query-index client
(activity-metadata-table (:profile client))
(activity-metadata-created-index)
{activity-table-pk
(activity-table-id g a)}
[sort-cols ::md/id]
(get sort-order->dynamo-comp sort-order)))
(knit-ordered-data (get sort-order->knit-comp sort-order))
(mapv ::md/id)
(enforce-and-sematics (count activities))
vec))
(defn criteria->activities
[criteria]
(->> criteria
::md/activities
(cons ::md/meta-read)
set))
(defn response-event
[r]
nil)
(defmulti default-values ::ms/type)
(defmethod default-values
:default
[_] {})
(defmethod default-values
"bundle"
[_]
{::ms/bundled-ids #{}})
(defrecord DynamoDb
[communications profile endpoint
client get-item]
MetaDataStore
(authorised
[this action id user-groups]
(when-let [item (get-item id {:projection all-sharing-columns})]
(not-empty (clojure.set/intersection (set (get-in item [::md/sharing action]))
(set user-groups)))))
(exists [this id]
(get-item id {:projection [id-col]}))
(retrieve [this id]
(when-let [item (get-item id)]
(merge (default-values item)
item)))
(query [this criteria from-index cnt sort-cols sort-order]
(when-not (= [::md/provenance ::md/created]
sort-cols)
(throw (new Exception "Only created timestamp sort supported")))
(let [group-ids (:kixi.user/groups criteria)
activities (criteria->activities criteria)
all-ids-ordered (all-ids-ordered client group-ids activities sort-cols sort-order)
target-ids (get-subvector all-ids-ordered from-index cnt)
items (if (not-empty target-ids)
(db/get-bulk-ordered client
(primary-metadata-table profile)
id-col
target-ids)
[])]
{:items items
:paging {:total (count all-ids-ordered)
:count (count items)
:index from-index}}))
component/Lifecycle
(start [component]
(if-not client
(let [client (assoc (select-keys component
db/client-kws)
:profile profile)
joplin-conf {:migrators {:migrator "joplin/kixi/datastore/metadatastore/migrators/dynamodb"}
:databases {:dynamodb (merge
{:type :dynamo
:migration-table (str profile "-kixi.datastore-metadatastore.migrations")}
client)}
:environments {:env [{:db :dynamodb :migrator :migrator}]}}]
(info "Starting File Metadata DynamoDb Store - " profile)
(migrate :env joplin-conf)
(c/attach-event-handler! communications
:kixi.datastore/metadatastore
:kixi.datastore.file-metadata/updated
"1.0.0"
(comp response-event (partial update-metadata-processor client) :kixi.comms.event/payload))
(c/attach-validating-event-handler! communications
:kixi.datastore/metadatastore
:kixi.datastore/sharing-changed
"1.0.0"
(sharing-changed-handler client))
(c/attach-validating-event-handler! communications
:kixi.datastore/metadatastore-file-delete
:kixi.datastore/file-deleted
"1.0.0"
(file-deleted-handler client))
(c/attach-validating-event-handler! communications
:kixi.datastore/metadatastore-bundle-delete
:kixi.datastore/bundle-deleted
"1.0.0"
(bundle-deleted-handler client))
(c/attach-validating-event-handler! communications
:kixi.datastore/metadatastore-add-files-to-bundle
:kixi.datastore/files-added-to-bundle
"1.0.0"
(files-added-to-bundle-handler client))
(c/attach-validating-event-handler! communications
:kixi.datastore/metadatastore-remove-files-from-bundle
:kixi.datastore/files-removed-from-bundle
"1.0.0"
(files-removed-from-bundle-handler client))
(assoc component
:client client
:get-item (partial db/get-item client (primary-metadata-table profile) id-col)))
component))
(stop [component]
(if client
(do (info "Destroying File Metadata DynamoDb Store")
(dissoc component :client :get-item))
component)))
| |
0397565fce929cbefe713844e990b128b5d6af0ab9ca0ae9d1be4d720e0735be | modular-macros/ocaml-macros | w51.ml |
let rec fact = function
| 1 -> 1
| n -> n * (fact [@tailcall]) (n-1)
;;
| null | https://raw.githubusercontent.com/modular-macros/ocaml-macros/05372c7248b5a7b1aa507b3c581f710380f17fcd/testsuite/tests/warnings/w51.ml | ocaml |
let rec fact = function
| 1 -> 1
| n -> n * (fact [@tailcall]) (n-1)
;;
| |
5c4713820d807e6c0996337a7a93e55f9b06a89c023b6d012750e2e03e009514 | graninas/Pragmatic-Type-Level-Design | GameOfLife.hs | module TCA2.GameOfLife where
import CPrelude
import qualified Data.Map as Map
import qualified Data.Vector as V
import TCA2.Types
import TCA2.Automaton
data GoLRule
instance Dim2Automaton GoLRule TwoStateCell where
emptyCell _ = Dead
step = golStep
-- TODO: the actual logic
golStep :: Dim2Board GoLRule TwoStateCell -> Dim2Board GoLRule TwoStateCell
golStep Dim2Board {cells, xSize, ySize} = newBoard
where
newCells = cells
newBoard = Dim2Board newCells xSize ySize
| null | https://raw.githubusercontent.com/graninas/Pragmatic-Type-Level-Design/54b346bd0949f5d89407fd02e9477e07d9f6cfdc/demo-apps/type-class-automaton2/src/TCA2/GameOfLife.hs | haskell | TODO: the actual logic | module TCA2.GameOfLife where
import CPrelude
import qualified Data.Map as Map
import qualified Data.Vector as V
import TCA2.Types
import TCA2.Automaton
data GoLRule
instance Dim2Automaton GoLRule TwoStateCell where
emptyCell _ = Dead
step = golStep
golStep :: Dim2Board GoLRule TwoStateCell -> Dim2Board GoLRule TwoStateCell
golStep Dim2Board {cells, xSize, ySize} = newBoard
where
newCells = cells
newBoard = Dim2Board newCells xSize ySize
|
ad49cb878e95540ea54e3ab62b2741e2ef485e6c67f38c8661c6c9ee4bf21242 | Jyothsnasrinivas/eta-android-2048 | GameModel.hs | module GameModel ( emptyBoard
, initialGameState
, rotateBoard
, setTile
, tileToInt
, intToTile
, tilesWithCoordinates
, readTile
) where
import System.Random (StdGen)
import Data.List (transpose)
import Types
| Given a Board we return a tile which can be found on a given row and
-- column
readTile :: (Row, Column) -> Board -> Tile
readTile (row, column) (Board b) = (b !! row) !! column
-- | Set tile on a given board to a given row and column
setTile :: (Row, Column) -> Board -> Tile -> Board
setTile (row, column) (Board b) tile =
let r = b !! row
nr = take column r ++ [tile] ++ drop (column + 1) r
in Board $ take row b ++ [nr] ++ drop (row + 1) b
-- | Convert a tile to the int it represents. Empty tile is treated like 0
tileToInt :: Tile -> Int
tileToInt tile = case tile of
Number v -> v
Empty -> 0
-- | Convert an int into a tile representing it. 0 is treated like Empty
-- tile
intToTile :: Int -> Tile
intToTile n = case n of
0 -> Empty
_ -> Number n
-- | Convert a board into a list of all tiles with their respective
-- coordinates
tilesWithCoordinates :: Board -> [(Tile, Row, Column)]
tilesWithCoordinates (Board b) = concat
$ zipWith (\rowIndex row -> fmap (\(tile, columnIndex) -> (tile, rowIndex, columnIndex)) row) [0..]
$ fmap (\row -> zip row [0..])
b
| Rotate given board clockwise by 90 degrees
rotateBoard :: Board -> Board
rotateBoard (Board b) = Board $ reverse <$> transpose b
-- | A board of empty tiles
emptyBoard :: Board
emptyBoard = Board $ replicate 4 $ replicate 4 Empty
| Default starting game state without 2 initial tiles
initialGameState :: StdGen -> GameState
initialGameState g = GameState { board = emptyBoard
, score = 0
, status = InProgress
, gen = g
}
| null | https://raw.githubusercontent.com/Jyothsnasrinivas/eta-android-2048/82dfefeeb36129d480609bd2997822878f99f589/app/src/main/eta/GameModel.hs | haskell | column
| Set tile on a given board to a given row and column
| Convert a tile to the int it represents. Empty tile is treated like 0
| Convert an int into a tile representing it. 0 is treated like Empty
tile
| Convert a board into a list of all tiles with their respective
coordinates
| A board of empty tiles | module GameModel ( emptyBoard
, initialGameState
, rotateBoard
, setTile
, tileToInt
, intToTile
, tilesWithCoordinates
, readTile
) where
import System.Random (StdGen)
import Data.List (transpose)
import Types
| Given a Board we return a tile which can be found on a given row and
readTile :: (Row, Column) -> Board -> Tile
readTile (row, column) (Board b) = (b !! row) !! column
setTile :: (Row, Column) -> Board -> Tile -> Board
setTile (row, column) (Board b) tile =
let r = b !! row
nr = take column r ++ [tile] ++ drop (column + 1) r
in Board $ take row b ++ [nr] ++ drop (row + 1) b
tileToInt :: Tile -> Int
tileToInt tile = case tile of
Number v -> v
Empty -> 0
intToTile :: Int -> Tile
intToTile n = case n of
0 -> Empty
_ -> Number n
tilesWithCoordinates :: Board -> [(Tile, Row, Column)]
tilesWithCoordinates (Board b) = concat
$ zipWith (\rowIndex row -> fmap (\(tile, columnIndex) -> (tile, rowIndex, columnIndex)) row) [0..]
$ fmap (\row -> zip row [0..])
b
| Rotate given board clockwise by 90 degrees
rotateBoard :: Board -> Board
rotateBoard (Board b) = Board $ reverse <$> transpose b
emptyBoard :: Board
emptyBoard = Board $ replicate 4 $ replicate 4 Empty
| Default starting game state without 2 initial tiles
initialGameState :: StdGen -> GameState
initialGameState g = GameState { board = emptyBoard
, score = 0
, status = InProgress
, gen = g
}
|
0bd29696512be9b0d369146cfbf586a37585ae371dc313b2bde6636dc5ce5362 | mentat-collective/functional-numerics | golden_test.cljc | ;;
Copyright © 2017 .
This work is based on the Scmutils system of MIT / GNU Scheme :
Copyright © 2002 Massachusetts Institute of Technology
;;
;; This is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;; your option) any later version.
;;
;; This software is distributed in the hope that it will be useful, but
;; WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;; General Public License for more details.
;;
You should have received a copy of the GNU General Public License
;; along with this code; if not, see </>.
;;
(ns sicmutils.numerical.unimin.golden-test
(:require [clojure.test :refer [is deftest testing]]
[clojure.test.check.generators :as gen]
[com.gfredericks.test.chuck.clojure-test :refer [checking]
#?@(:cljs [:include-macros true])]
[same :refer [ish? zeroish? with-comparator]
#?@(:cljs [:include-macros true])]
[sicmutils.generic :as g]
[sicmutils.value :as v]
[sicmutils.numerical.unimin.bracket :as b]
[sicmutils.numerical.unimin.golden :as ug]))
(deftest golden-ratio-tests
(testing "constants work as defined"
(is (ish? ug/inv-phi (/ 1 ug/phi)))
(is (ish? ug/inv-phi2 (/ 1 (* ug/phi ug/phi))))
(is (ish? ug/inv-phi2 (/ ug/inv-phi ug/phi)))))
(deftest golden-cut-tests
(with-comparator (v/within 1e-8)
(checking "golden-cut"
100
[l gen/small-integer, r gen/small-integer]
(let [lo (min l r)
hi (max l r)
cut (ug/golden-cut l r)]
(is (<= lo cut hi) "golden-cut is always between (or
equal to) the two numbers."))
(when (not= l r)
(is (ish? (/ (- (ug/golden-cut l r) l)
(- r l))
(/ (- (ug/golden-cut r l) l)
(- (ug/golden-cut l r) l)))
"`golden-cut` returns the golden-ratioed point closer to the
right side. So the ratio of this point relative to the
original segment has to be equal to the ratio between the
shorter and longer cuts."))
(is (zeroish?
(+ (- (ug/golden-cut l r) l)
(- (ug/golden-cut r l) r)))
"ug/golden-cut returns the point between the two inputs such
that they add up to the original interval.")
(is (ish? (ug/golden-cut l (ug/golden-cut l r))
(ug/golden-cut r l))
"Golden-cutting twice in one direction is equivalent to
cutting once in the reverse direction."))
(checking "extend-pt vs golden-cut"
100
[x gen/small-integer, away-from gen/small-integer]
(let [x' (ug/extend-pt x away-from)]
(is (ish? x (ug/golden-cut x' away-from))
"Extending x away from a point should place x in the
golden-ratioed spot between them.")))))
(defn golden-checker
"Takes a description string, function of an offset, a bracketer and min/max
optimizer and runs a bunch of tests."
[description f bracket-fn optimizer]
(with-comparator (v/within 1e-5)
(checking description
100
[lower gen/large-integer
upper gen/large-integer
offset gen/small-integer]
(let [f (f offset)
upper (if (= lower upper) (inc lower) upper)
{:keys [lo hi]} (bracket-fn f {:xa lower :xb upper})
{:keys [result value converged? iterations fncalls] :as m}
(optimizer f lo hi
{:fn-tolerance 1e-10
:callback (fn [[xa] [xl] [xr] [xb] _]
(is (< xa xl xr xb)
"the l and r points
stay fully within
the bounds."))})]
(is (ish? result offset) "The result converges to the supplied offset.")
(is (= fncalls (+ 2 iterations))
"The bound search takes care of 2 fncalls, so we only need 2
additional (for the first interior points) in addition to 1
per iteration.")))))
(deftest golden-section-tests
(golden-checker "golden-min finds a quadratic min with help from bracket-min."
(fn [offset] (fn [x] (g/square (- x offset))))
b/bracket-min
ug/golden-section-min)
(golden-checker "golden-max finds a quadratic max with help from bracket-max."
(fn [offset] (fn [x] (- (g/square (- x offset)))))
b/bracket-max
ug/golden-section-max)
(with-comparator (v/within 1e-5)
(testing "with-helper"
(let [f (fn [x] (* x x))
{:keys [lo hi]} (b/bracket-min f {:xa -100 :xb -99})]
(is (ish?
{:result 0
:value 0
:converged? true
:iterations 35
:fncalls 37}
(ug/golden-section-min f lo hi {:fn-tolerance 1e-10}))
"Converges on 0, AND reuses the two function evaluations from the
bracketing process.")))
(testing "minimize"
(is (ish? {:result 2
:value 0
:converged? true
:iterations 26
:fncalls 30}
(-> (fn [x] (g/square (- x 2)))
(ug/golden-section-min 1 5 {:fn-tolerance 1e-10}))))
(is (ish? {:result 1.5
:value -0.8
:converged? true
:iterations 29
:fncalls 33}
(-> (fn [x] (- (g/square (- x 1.5)) 0.8))
(ug/golden-section-min -15 5 {:fn-tolerance 1e-10})))))))
| null | https://raw.githubusercontent.com/mentat-collective/functional-numerics/44856b0e3cd1f0dd9f8ebb2f67f4e85a68aa8380/test/numerical/unimin/golden_test.cljc | clojure |
This is free software; you can redistribute it and/or modify
either version 3 of the License , or ( at
your option) any later version.
This software is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
along with this code; if not, see </>.
| Copyright © 2017 .
This work is based on the Scmutils system of MIT / GNU Scheme :
Copyright © 2002 Massachusetts Institute of Technology
it under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
(ns sicmutils.numerical.unimin.golden-test
(:require [clojure.test :refer [is deftest testing]]
[clojure.test.check.generators :as gen]
[com.gfredericks.test.chuck.clojure-test :refer [checking]
#?@(:cljs [:include-macros true])]
[same :refer [ish? zeroish? with-comparator]
#?@(:cljs [:include-macros true])]
[sicmutils.generic :as g]
[sicmutils.value :as v]
[sicmutils.numerical.unimin.bracket :as b]
[sicmutils.numerical.unimin.golden :as ug]))
(deftest golden-ratio-tests
(testing "constants work as defined"
(is (ish? ug/inv-phi (/ 1 ug/phi)))
(is (ish? ug/inv-phi2 (/ 1 (* ug/phi ug/phi))))
(is (ish? ug/inv-phi2 (/ ug/inv-phi ug/phi)))))
(deftest golden-cut-tests
(with-comparator (v/within 1e-8)
(checking "golden-cut"
100
[l gen/small-integer, r gen/small-integer]
(let [lo (min l r)
hi (max l r)
cut (ug/golden-cut l r)]
(is (<= lo cut hi) "golden-cut is always between (or
equal to) the two numbers."))
(when (not= l r)
(is (ish? (/ (- (ug/golden-cut l r) l)
(- r l))
(/ (- (ug/golden-cut r l) l)
(- (ug/golden-cut l r) l)))
"`golden-cut` returns the golden-ratioed point closer to the
right side. So the ratio of this point relative to the
original segment has to be equal to the ratio between the
shorter and longer cuts."))
(is (zeroish?
(+ (- (ug/golden-cut l r) l)
(- (ug/golden-cut r l) r)))
"ug/golden-cut returns the point between the two inputs such
that they add up to the original interval.")
(is (ish? (ug/golden-cut l (ug/golden-cut l r))
(ug/golden-cut r l))
"Golden-cutting twice in one direction is equivalent to
cutting once in the reverse direction."))
(checking "extend-pt vs golden-cut"
100
[x gen/small-integer, away-from gen/small-integer]
(let [x' (ug/extend-pt x away-from)]
(is (ish? x (ug/golden-cut x' away-from))
"Extending x away from a point should place x in the
golden-ratioed spot between them.")))))
(defn golden-checker
"Takes a description string, function of an offset, a bracketer and min/max
optimizer and runs a bunch of tests."
[description f bracket-fn optimizer]
(with-comparator (v/within 1e-5)
(checking description
100
[lower gen/large-integer
upper gen/large-integer
offset gen/small-integer]
(let [f (f offset)
upper (if (= lower upper) (inc lower) upper)
{:keys [lo hi]} (bracket-fn f {:xa lower :xb upper})
{:keys [result value converged? iterations fncalls] :as m}
(optimizer f lo hi
{:fn-tolerance 1e-10
:callback (fn [[xa] [xl] [xr] [xb] _]
(is (< xa xl xr xb)
"the l and r points
stay fully within
the bounds."))})]
(is (ish? result offset) "The result converges to the supplied offset.")
(is (= fncalls (+ 2 iterations))
"The bound search takes care of 2 fncalls, so we only need 2
additional (for the first interior points) in addition to 1
per iteration.")))))
(deftest golden-section-tests
(golden-checker "golden-min finds a quadratic min with help from bracket-min."
(fn [offset] (fn [x] (g/square (- x offset))))
b/bracket-min
ug/golden-section-min)
(golden-checker "golden-max finds a quadratic max with help from bracket-max."
(fn [offset] (fn [x] (- (g/square (- x offset)))))
b/bracket-max
ug/golden-section-max)
(with-comparator (v/within 1e-5)
(testing "with-helper"
(let [f (fn [x] (* x x))
{:keys [lo hi]} (b/bracket-min f {:xa -100 :xb -99})]
(is (ish?
{:result 0
:value 0
:converged? true
:iterations 35
:fncalls 37}
(ug/golden-section-min f lo hi {:fn-tolerance 1e-10}))
"Converges on 0, AND reuses the two function evaluations from the
bracketing process.")))
(testing "minimize"
(is (ish? {:result 2
:value 0
:converged? true
:iterations 26
:fncalls 30}
(-> (fn [x] (g/square (- x 2)))
(ug/golden-section-min 1 5 {:fn-tolerance 1e-10}))))
(is (ish? {:result 1.5
:value -0.8
:converged? true
:iterations 29
:fncalls 33}
(-> (fn [x] (- (g/square (- x 1.5)) 0.8))
(ug/golden-section-min -15 5 {:fn-tolerance 1e-10})))))))
|
659114e35472edd1bd506974230391dfa15c7f7c203892199e4ee30c997b8778 | BranchTaken/Hemlock | test_bit_not.ml | open! Basis.Rudiments
open! Basis
open U128
let test () =
let rec test = function
| [] -> ()
| x :: xs' -> begin
File.Fmt.stdout
|> Fmt.fmt "bit_not "
|> fmt ~alt:true ~zpad:true ~width:32L ~radix:Radix.Hex ~pretty:true x
|> Fmt.fmt " -> "
|> fmt ~alt:true ~zpad:true ~width:32L ~radix:Radix.Hex ~pretty:true (bit_not x)
|> Fmt.fmt "\n"
|> ignore;
test xs'
end
in
let xs = [
of_string "0";
of_string "0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff"
] in
test xs
let _ = test ()
| null | https://raw.githubusercontent.com/BranchTaken/Hemlock/a07e362d66319108c1478a4cbebab765c1808b1a/bootstrap/test/basis/u128/test_bit_not.ml | ocaml | open! Basis.Rudiments
open! Basis
open U128
let test () =
let rec test = function
| [] -> ()
| x :: xs' -> begin
File.Fmt.stdout
|> Fmt.fmt "bit_not "
|> fmt ~alt:true ~zpad:true ~width:32L ~radix:Radix.Hex ~pretty:true x
|> Fmt.fmt " -> "
|> fmt ~alt:true ~zpad:true ~width:32L ~radix:Radix.Hex ~pretty:true (bit_not x)
|> Fmt.fmt "\n"
|> ignore;
test xs'
end
in
let xs = [
of_string "0";
of_string "0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff"
] in
test xs
let _ = test ()
| |
fae33b18ae75caaf2d5a77bc8ba4d1f98fc52e74f562a90d621c17a3db29a3f3 | thiagozg/GitHubJobs-Clojure-Service | job.clj | (ns github-jobs.logic.job
(:require [github-jobs.model.job :as model-job]
[schema.core :as s]
[github-jobs.schemata.job :as schemata]
[schema.coerce :as coerce])
(:import (java.util UUID)))
(s/defn datom-job->wire :- schemata/JobReference
[{:job/keys [github-id title url category]}]
{:id (.toString github-id)
:title title
:url url
:category category})
(s/defn wire->new-dto :- model-job/NewDto
[{:keys [id title url category]} :- schemata/JobReference]
{:job/id (UUID/randomUUID)
:job/github-id (coerce/string->uuid id)
:job/title title
:job/url url
:job/category category})
(s/defn wire->update-dto :- model-job/UpdateDto
[{:keys [title url category]} :- schemata/JobUpdate]
(cond-> {}
title (assoc :job/title title)
url (assoc :job/url url)
category (assoc :job/category category)))
| null | https://raw.githubusercontent.com/thiagozg/GitHubJobs-Clojure-Service/9b17da56ebe773f6aed96337ac516307d91a1403/src/github_jobs/logic/job.clj | clojure | (ns github-jobs.logic.job
(:require [github-jobs.model.job :as model-job]
[schema.core :as s]
[github-jobs.schemata.job :as schemata]
[schema.coerce :as coerce])
(:import (java.util UUID)))
(s/defn datom-job->wire :- schemata/JobReference
[{:job/keys [github-id title url category]}]
{:id (.toString github-id)
:title title
:url url
:category category})
(s/defn wire->new-dto :- model-job/NewDto
[{:keys [id title url category]} :- schemata/JobReference]
{:job/id (UUID/randomUUID)
:job/github-id (coerce/string->uuid id)
:job/title title
:job/url url
:job/category category})
(s/defn wire->update-dto :- model-job/UpdateDto
[{:keys [title url category]} :- schemata/JobUpdate]
(cond-> {}
title (assoc :job/title title)
url (assoc :job/url url)
category (assoc :job/category category)))
| |
b292c139d0807aacc0a923e209b81775fb96c11136730bccec5be26c79e09e15 | data61/Mirza | Client.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE TypeApplications #
module Mirza.SupplyChain.Tests.Client where
import Control.Exception (bracket)
import Data.Either (fromRight, isRight)
import Data.UUID (nil)
import Test.Tasty
import Test.Tasty.Hspec
import Test.Tasty.HUnit
import qualified Mirza.OrgRegistry.Types as ORT
import Mirza.SupplyChain.Types as ST
import qualified Mirza.OrgRegistry.Client.Servant as ORClient
import Mirza.SupplyChain.Client.Servant
import Mirza.Common.Utils (mockURI, readJWK)
import Mirza.Common.Tests.InitClient (TestData (..),
TokenTestSuite (..),
endApps, runApps)
import Mirza.SupplyChain.Database.Schema as Schema
import Mirza.OrgRegistry.Client.Servant (addPublicKey)
import Mirza.OrgRegistry.Tests.Utils (goodRsaPrivateKey,
goodRsaPublicKey)
import Mirza.Common.Tests.ServantUtils
import Mirza.Common.Tests.Utils
import Mirza.SupplyChain.Tests.Dummies
import Data.GS1.EventId as EvId
import Control.Monad.Except
import Control.Monad.Identity
import Crypto.JOSE (Alg (RS256), newJWSHeader,
signJWS)
import qualified Crypto.JOSE as JOSE
import Crypto.JOSE.Types (Base64Octets (..))
import Data.GS1.EPC (GS1CompanyPrefix (..))
-- === SCS Client tests
clientSpec :: IO TestTree
clientSpec = do
let eventInsertionTests = testCaseSteps "Can add single events" $ \step ->
bracket runApps endApps $ \testData -> do
let scsUrl = scsBaseUrl testData
http = runClient scsUrl
step "Can insert Object events"
TODO : Events need their EventId returned to user
http (insertGS1Event dummyObjEvent)
`shouldSatisfyIO` isRight
step "Can insert Aggregation events"
http (insertGS1Event dummyAggEvent)
`shouldSatisfyIO` isRight
step "Can insert Transaction events"
http (insertGS1Event dummyTransactEvent)
`shouldSatisfyIO` isRight
step "Can insert Transformation events"
http (insertGS1Event dummyTransfEvent)
`shouldSatisfyIO` isRight
step "Can insert Association event"
http (insertGS1Event dummyAssocEvent) `shouldSatisfyIO` isRight
let _eventSignTests = testCaseSteps "eventSign" $ \step ->
bracket runApps endApps $ \testData -> do
let scsUrl = scsBaseUrl testData
orUrl = orBaseUrl testData
httpSCS = runClient scsUrl
httpOR = runClient orUrl
token = testToken $ orTestTokenData testData
step "Adding a user to OR"
let prefix = GS1CompanyPrefix "1000001"
-- TODO: Create a user for associating with tests.
--let userOR = ORT.NewUser "EventSign Test Same User OAuthSub"
-- TODO Note: The org will now be associated with the token
-- and not created user, expect this may have to be fixed when we
-- devise a means of authing test users.
let orgName = "Org Name"
org = ORT.PartialNewOrg orgName (mockURI orgName)
httpOR (ORClient.addOrg token prefix org)
`shouldSatisfyIO` isRight
-- TODO: Create a user for associating with tests.
--httpOR (ORClient.addUser token userOR) `shouldSatisfyIO` isRight
step "Tying the user with a good key"
Just goodPubKey <- goodRsaPublicKey
Just goodPrivKey <- goodRsaPrivateKey
keyIdResponse <- httpOR (addPublicKey token prefix goodPubKey Nothing)
keyIdResponse `shouldSatisfy` isRight
let keyId = fromRight (ORKeyId nil) keyIdResponse
step "Inserting the object event"
objInsertionResponse <- httpSCS (insertGS1Event dummyObjEvent)
objInsertionResponse `shouldSatisfy` isRight
let (EventInfo _ (Base64Octets to_sign_event) _, (Schema.EventId eventId)) = fromRight (error "Should be right") objInsertionResponse
step "Signing the key"
Right mySig <- runExceptT @JOSE.Error (
signJWS to_sign_event (Identity (newJWSHeader ((), RS256),goodPrivKey))
)
let mySignedEvent = SignedEvent (EvId.EventId eventId) keyId mySig
httpSCS (eventSign mySignedEvent) `shouldSatisfyIO` isRight
let _transactionEventTest = testCaseSteps
"Signing and counter-signing a transaction event" $ \step ->
bracket runApps endApps $ \testData -> do
let scsUrl = scsBaseUrl testData
orUrl = orBaseUrl testData
httpSCS = runClient scsUrl
httpOR = runClient orUrl
token = testToken $ orTestTokenData testData
-- ===============================================
-- Giving user
-- ===============================================
-- TODO Note: The org will now be associated with the token
-- and not created user, expect this may have to be fixed when we
-- devise a means of authing test users.
step "Adding org for the Giver"
let prefixGiver = GS1CompanyPrefix "1000001"
orgGiverName = "Giver Org"
orgGiver = ORT.PartialNewOrg orgGiverName (mockURI orgGiverName)
httpOR (ORClient.addOrg token prefixGiver orgGiver)
`shouldSatisfyIO` isRight
-- TODO: Create a user for associating with tests.
--step "Adding the giver user to OR"
--let userORGiver = ORT.NewUser "EventSign Test Giver OAuthSub"
--httpOR (ORClient.addUser token userORGiver) `shouldSatisfyIO` isRight
step "Tying the giver user with a good key"
Just goodPubKeyGiver <- readJWK "./test/Mirza/Common/TestData/testKeys/goodJWKs/4096bit_rsa_pub.json"
Just goodPrivKeyGiver <- readJWK "./test/Mirza/Common/TestData/testKeys/goodJWKs/4096bit_rsa.json"
keyIdResponseGiver <- httpOR (addPublicKey token prefixGiver goodPubKeyGiver Nothing)
keyIdResponseGiver `shouldSatisfy` isRight
let keyIdGiver = fromRight (ORKeyId nil) keyIdResponseGiver
-- TODO Note: The org will now be associated with the token
-- and not created user, expect this will have to be fixed when we
-- devise a means of authing test users.
step "Adding org for receiver"
let prefixReceiver = GS1CompanyPrefix "1000002"
orgReceiverName = "Receiving Org"
orgReceiver = ORT.PartialNewOrg orgReceiverName (mockURI orgReceiverName)
httpOR (ORClient.addOrg token prefixReceiver orgReceiver)
`shouldSatisfyIO` isRight
-- TODO: Create a user for associating with tests.
--step "Adding the receiving user to OR"
--let userORReceiver = ORT.NewUser "EventSign Test Reciever OAuthSub"
--httpOR (ORClient.addUser token userORReceiver) `shouldSatisfyIO` isRight
step " Signing the event with the second user "
step "Tying the receiver user with a good key"
Just goodPubKeyReceiver <- readJWK "./test/Mirza/Common/TestData/testKeys/goodJWKs/16384bit_rsa_pub.json"
Just goodPrivKeyReceiver <- readJWK "./test/Mirza/Common/TestData/testKeys/goodJWKs/16384bit_rsa.json"
keyIdResponseReceiver <- httpOR (addPublicKey token prefixReceiver goodPubKeyReceiver Nothing)
keyIdResponseReceiver `shouldSatisfy` isRight
let keyIdReceiver = fromRight (ORKeyId nil) keyIdResponseReceiver
step "Inserting the transaction event with the giver user"
transactInsertionResponse <- httpSCS (insertGS1Event dummyTransactEvent)
transactInsertionResponse `shouldSatisfy` isRight
let (_transactEvInfo@(EventInfo insertedTransactEvent (Base64Octets to_sign_transact_event) _), (Schema.EventId transactEvId)) =
fromRight (error "Should be right") transactInsertionResponse
transactEventId = EvId.EventId transactEvId
step "Retrieving the event info"
eventInfoResult <- httpSCS (eventInfo transactEventId)
eventInfoResult `shouldSatisfy` isRight
let (Right eInfo) = eventInfoResult
step "Checking that we got the correct event back"
let retrievedTransactEvent = (eventInfoEvent eInfo)
retrievedTransactEvent `shouldBe` insertedTransactEvent
step "Checking event blockchain status"
let eventStatus = (eventInfoBlockChainStatus eInfo)
eventStatus `shouldBe` NeedMoreSignatures
step "Signing the transaction event with Giver"
Right giverSigTransact <- runExceptT @JOSE.Error $
signJWS to_sign_transact_event (Identity (newJWSHeader ((), RS256), goodPrivKeyGiver))
let myTransactSignedEvent = SignedEvent transactEventId keyIdGiver giverSigTransact
httpSCS (eventSign myTransactSignedEvent) `shouldSatisfyIO` isRight
step "Signing the transaction event with the receiver user"
Right receiverSig <- runExceptT @JOSE.Error $
signJWS to_sign_transact_event (Identity (newJWSHeader ((), RS256),goodPrivKeyReceiver))
let receiverSignedEvent = SignedEvent transactEventId keyIdReceiver receiverSig
httpSCS (eventSign receiverSignedEvent) `shouldSatisfyIO` isRight
step "Retrieving the event info again"
eventInfoResult2 <- httpSCS (eventInfo transactEventId)
eventInfoResult2 `shouldSatisfy` isRight
step "Checking that the status of the event has changed to Ready"
let (Right eInfo2) = eventInfoResult2
let eventStatus2 = (eventInfoBlockChainStatus eInfo2)
eventStatus2 `shouldBe` ReadyAndWaiting
let healthTests = testCaseSteps "Provides health status" $ \step ->
bracket runApps endApps $ \testData-> do
let baseurl = scsBaseUrl testData
http = runClient baseurl
step "Status results in 200"
healthResult <- http health
healthResult `shouldSatisfy` isRight
healthResult `shouldBe` (Right HealthResponse)
pure $ testGroup "Supply Chain Service Client Tests"
[ eventInsertionTests
-- TODO: Reinclude the following test cases which fail because we have not sorted out auth for test cases yet.
--, eventSignTests
, transactionEventTest
, healthTests
]
| null | https://raw.githubusercontent.com/data61/Mirza/24e5ccddfc307cceebcc5ce26d35e91020b8ee10/projects/or_scs/test/Mirza/SupplyChain/Tests/Client.hs | haskell | === SCS Client tests
TODO: Create a user for associating with tests.
let userOR = ORT.NewUser "EventSign Test Same User OAuthSub"
TODO Note: The org will now be associated with the token
and not created user, expect this may have to be fixed when we
devise a means of authing test users.
TODO: Create a user for associating with tests.
httpOR (ORClient.addUser token userOR) `shouldSatisfyIO` isRight
===============================================
Giving user
===============================================
TODO Note: The org will now be associated with the token
and not created user, expect this may have to be fixed when we
devise a means of authing test users.
TODO: Create a user for associating with tests.
step "Adding the giver user to OR"
let userORGiver = ORT.NewUser "EventSign Test Giver OAuthSub"
httpOR (ORClient.addUser token userORGiver) `shouldSatisfyIO` isRight
TODO Note: The org will now be associated with the token
and not created user, expect this will have to be fixed when we
devise a means of authing test users.
TODO: Create a user for associating with tests.
step "Adding the receiving user to OR"
let userORReceiver = ORT.NewUser "EventSign Test Reciever OAuthSub"
httpOR (ORClient.addUser token userORReceiver) `shouldSatisfyIO` isRight
TODO: Reinclude the following test cases which fail because we have not sorted out auth for test cases yet.
, eventSignTests | # LANGUAGE FlexibleContexts #
# LANGUAGE TypeApplications #
module Mirza.SupplyChain.Tests.Client where
import Control.Exception (bracket)
import Data.Either (fromRight, isRight)
import Data.UUID (nil)
import Test.Tasty
import Test.Tasty.Hspec
import Test.Tasty.HUnit
import qualified Mirza.OrgRegistry.Types as ORT
import Mirza.SupplyChain.Types as ST
import qualified Mirza.OrgRegistry.Client.Servant as ORClient
import Mirza.SupplyChain.Client.Servant
import Mirza.Common.Utils (mockURI, readJWK)
import Mirza.Common.Tests.InitClient (TestData (..),
TokenTestSuite (..),
endApps, runApps)
import Mirza.SupplyChain.Database.Schema as Schema
import Mirza.OrgRegistry.Client.Servant (addPublicKey)
import Mirza.OrgRegistry.Tests.Utils (goodRsaPrivateKey,
goodRsaPublicKey)
import Mirza.Common.Tests.ServantUtils
import Mirza.Common.Tests.Utils
import Mirza.SupplyChain.Tests.Dummies
import Data.GS1.EventId as EvId
import Control.Monad.Except
import Control.Monad.Identity
import Crypto.JOSE (Alg (RS256), newJWSHeader,
signJWS)
import qualified Crypto.JOSE as JOSE
import Crypto.JOSE.Types (Base64Octets (..))
import Data.GS1.EPC (GS1CompanyPrefix (..))
clientSpec :: IO TestTree
clientSpec = do
let eventInsertionTests = testCaseSteps "Can add single events" $ \step ->
bracket runApps endApps $ \testData -> do
let scsUrl = scsBaseUrl testData
http = runClient scsUrl
step "Can insert Object events"
TODO : Events need their EventId returned to user
http (insertGS1Event dummyObjEvent)
`shouldSatisfyIO` isRight
step "Can insert Aggregation events"
http (insertGS1Event dummyAggEvent)
`shouldSatisfyIO` isRight
step "Can insert Transaction events"
http (insertGS1Event dummyTransactEvent)
`shouldSatisfyIO` isRight
step "Can insert Transformation events"
http (insertGS1Event dummyTransfEvent)
`shouldSatisfyIO` isRight
step "Can insert Association event"
http (insertGS1Event dummyAssocEvent) `shouldSatisfyIO` isRight
let _eventSignTests = testCaseSteps "eventSign" $ \step ->
bracket runApps endApps $ \testData -> do
let scsUrl = scsBaseUrl testData
orUrl = orBaseUrl testData
httpSCS = runClient scsUrl
httpOR = runClient orUrl
token = testToken $ orTestTokenData testData
step "Adding a user to OR"
let prefix = GS1CompanyPrefix "1000001"
let orgName = "Org Name"
org = ORT.PartialNewOrg orgName (mockURI orgName)
httpOR (ORClient.addOrg token prefix org)
`shouldSatisfyIO` isRight
step "Tying the user with a good key"
Just goodPubKey <- goodRsaPublicKey
Just goodPrivKey <- goodRsaPrivateKey
keyIdResponse <- httpOR (addPublicKey token prefix goodPubKey Nothing)
keyIdResponse `shouldSatisfy` isRight
let keyId = fromRight (ORKeyId nil) keyIdResponse
step "Inserting the object event"
objInsertionResponse <- httpSCS (insertGS1Event dummyObjEvent)
objInsertionResponse `shouldSatisfy` isRight
let (EventInfo _ (Base64Octets to_sign_event) _, (Schema.EventId eventId)) = fromRight (error "Should be right") objInsertionResponse
step "Signing the key"
Right mySig <- runExceptT @JOSE.Error (
signJWS to_sign_event (Identity (newJWSHeader ((), RS256),goodPrivKey))
)
let mySignedEvent = SignedEvent (EvId.EventId eventId) keyId mySig
httpSCS (eventSign mySignedEvent) `shouldSatisfyIO` isRight
let _transactionEventTest = testCaseSteps
"Signing and counter-signing a transaction event" $ \step ->
bracket runApps endApps $ \testData -> do
let scsUrl = scsBaseUrl testData
orUrl = orBaseUrl testData
httpSCS = runClient scsUrl
httpOR = runClient orUrl
token = testToken $ orTestTokenData testData
step "Adding org for the Giver"
let prefixGiver = GS1CompanyPrefix "1000001"
orgGiverName = "Giver Org"
orgGiver = ORT.PartialNewOrg orgGiverName (mockURI orgGiverName)
httpOR (ORClient.addOrg token prefixGiver orgGiver)
`shouldSatisfyIO` isRight
step "Tying the giver user with a good key"
Just goodPubKeyGiver <- readJWK "./test/Mirza/Common/TestData/testKeys/goodJWKs/4096bit_rsa_pub.json"
Just goodPrivKeyGiver <- readJWK "./test/Mirza/Common/TestData/testKeys/goodJWKs/4096bit_rsa.json"
keyIdResponseGiver <- httpOR (addPublicKey token prefixGiver goodPubKeyGiver Nothing)
keyIdResponseGiver `shouldSatisfy` isRight
let keyIdGiver = fromRight (ORKeyId nil) keyIdResponseGiver
step "Adding org for receiver"
let prefixReceiver = GS1CompanyPrefix "1000002"
orgReceiverName = "Receiving Org"
orgReceiver = ORT.PartialNewOrg orgReceiverName (mockURI orgReceiverName)
httpOR (ORClient.addOrg token prefixReceiver orgReceiver)
`shouldSatisfyIO` isRight
step " Signing the event with the second user "
step "Tying the receiver user with a good key"
Just goodPubKeyReceiver <- readJWK "./test/Mirza/Common/TestData/testKeys/goodJWKs/16384bit_rsa_pub.json"
Just goodPrivKeyReceiver <- readJWK "./test/Mirza/Common/TestData/testKeys/goodJWKs/16384bit_rsa.json"
keyIdResponseReceiver <- httpOR (addPublicKey token prefixReceiver goodPubKeyReceiver Nothing)
keyIdResponseReceiver `shouldSatisfy` isRight
let keyIdReceiver = fromRight (ORKeyId nil) keyIdResponseReceiver
step "Inserting the transaction event with the giver user"
transactInsertionResponse <- httpSCS (insertGS1Event dummyTransactEvent)
transactInsertionResponse `shouldSatisfy` isRight
let (_transactEvInfo@(EventInfo insertedTransactEvent (Base64Octets to_sign_transact_event) _), (Schema.EventId transactEvId)) =
fromRight (error "Should be right") transactInsertionResponse
transactEventId = EvId.EventId transactEvId
step "Retrieving the event info"
eventInfoResult <- httpSCS (eventInfo transactEventId)
eventInfoResult `shouldSatisfy` isRight
let (Right eInfo) = eventInfoResult
step "Checking that we got the correct event back"
let retrievedTransactEvent = (eventInfoEvent eInfo)
retrievedTransactEvent `shouldBe` insertedTransactEvent
step "Checking event blockchain status"
let eventStatus = (eventInfoBlockChainStatus eInfo)
eventStatus `shouldBe` NeedMoreSignatures
step "Signing the transaction event with Giver"
Right giverSigTransact <- runExceptT @JOSE.Error $
signJWS to_sign_transact_event (Identity (newJWSHeader ((), RS256), goodPrivKeyGiver))
let myTransactSignedEvent = SignedEvent transactEventId keyIdGiver giverSigTransact
httpSCS (eventSign myTransactSignedEvent) `shouldSatisfyIO` isRight
step "Signing the transaction event with the receiver user"
Right receiverSig <- runExceptT @JOSE.Error $
signJWS to_sign_transact_event (Identity (newJWSHeader ((), RS256),goodPrivKeyReceiver))
let receiverSignedEvent = SignedEvent transactEventId keyIdReceiver receiverSig
httpSCS (eventSign receiverSignedEvent) `shouldSatisfyIO` isRight
step "Retrieving the event info again"
eventInfoResult2 <- httpSCS (eventInfo transactEventId)
eventInfoResult2 `shouldSatisfy` isRight
step "Checking that the status of the event has changed to Ready"
let (Right eInfo2) = eventInfoResult2
let eventStatus2 = (eventInfoBlockChainStatus eInfo2)
eventStatus2 `shouldBe` ReadyAndWaiting
let healthTests = testCaseSteps "Provides health status" $ \step ->
bracket runApps endApps $ \testData-> do
let baseurl = scsBaseUrl testData
http = runClient baseurl
step "Status results in 200"
healthResult <- http health
healthResult `shouldSatisfy` isRight
healthResult `shouldBe` (Right HealthResponse)
pure $ testGroup "Supply Chain Service Client Tests"
[ eventInsertionTests
, transactionEventTest
, healthTests
]
|
94b6c69786f6b9c88b092338a6e589e868ba457029d92375cf91aab34853244c | bia-technologies/statsbit | user.clj | Copyright 2020 BIA - Technologies Limited Liability Company
;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; -2.0
;;
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns user
(:require
[com.stuartsierra.component :as component]
[ru.bia-tech.statsbit.init]
[ru.bia-tech.statsbit.system :as system]
[ru.bia-tech.statsbit.migration :as migration]))
(defonce system (system/build :dev))
(defn start []
(alter-var-root #'system component/start-system))
(defn stop []
(alter-var-root #'system component/stop-system))
(defn migrate []
(migration/migrate :dev)
(migration/migrate :test))
| null | https://raw.githubusercontent.com/bia-technologies/statsbit/4102ca5e5d39b1c06541b49615c6de83e7f4ef36/backend/dev/user.clj | clojure |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | Copyright 2020 BIA - Technologies Limited Liability Company
distributed under the License is distributed on an " AS IS " BASIS ,
(ns user
(:require
[com.stuartsierra.component :as component]
[ru.bia-tech.statsbit.init]
[ru.bia-tech.statsbit.system :as system]
[ru.bia-tech.statsbit.migration :as migration]))
(defonce system (system/build :dev))
(defn start []
(alter-var-root #'system component/start-system))
(defn stop []
(alter-var-root #'system component/stop-system))
(defn migrate []
(migration/migrate :dev)
(migration/migrate :test))
|
04200334869e3383e2c76b569f3bc013a7b8e9e4ba9c4c14ce98c0376ad09229 | CloudI/CloudI | hackney_request.erl | %%% -*- erlang -*-
%%%
This file is part of hackney released under the Apache 2 license .
%%% See the NOTICE for more information.
%%%
%% @doc module handling the request
-module(hackney_request).
-include("hackney.hrl").
-include("hackney_lib.hrl").
-include_lib("hackney_internal.hrl").
-export([perform/2,
location/1,
send/2, send_chunk/2,
sendfile/3,
stream_body/2, end_stream_body/1,
stream_multipart/2,
encode_form/1,
default_ua/0]).
-export([is_default_port/1]).
-export([make_multipart_stream/2]).
64 MB is the default
perform(Client0, {Method0, Path0, Headers0, Body0}) ->
Method = hackney_bstr:to_upper(hackney_bstr:to_binary(Method0)),
Path = case Path0 of
<<"">> -> <<"/">>;
_ -> Path0
end,
#client{options=Options} = Client0,
%% basic & Cookies authorization handling
Cookies = proplists:get_value(cookie, Options, []),
DefaultHeaders = case proplists:get_value(basic_auth, Options) of
undefined ->
maybe_add_cookies(Cookies, [{<<"User-Agent">>, default_ua()}]);
{User, Pwd} ->
User1 = hackney_bstr:to_binary(User),
Pwd1 = hackney_bstr:to_binary(Pwd),
Credentials = base64:encode(<< User1/binary, ":", Pwd1/binary >>),
maybe_add_cookies(
Cookies,
[{<<"User-Agent">>, default_ua()},
{<<"Authorization">>, <<"Basic ", Credentials/binary>>}]
)
end,
%% detect the request type: normal or chunked
{Headers1, ReqType0} = req_type(
hackney_headers_new:merge(Headers0, hackney_headers_new:new(DefaultHeaders)),
Body0
),
%% add host eventually
Headers2 = maybe_add_host(Headers1, Client0#client.netloc),
%% get expect headers
Expect = expectation(Headers2),
%% build headers with the body.
{FinalHeaders, ReqType, Body, Client1} = case Body0 of
stream ->
{Headers2, ReqType0, stream, Client0};
stream_multipart ->
handle_multipart_body(Headers2, ReqType0, Client0);
{stream_multipart, Size} ->
handle_multipart_body(Headers2, ReqType0, Size, Client0);
{stream_multipart, Size, Boundary} ->
handle_multipart_body(Headers2, ReqType0,
Size, Boundary, Client0);
<<>> when Method =:= <<"POST">> orelse Method =:= <<"PUT">> ->
handle_body(Headers2, ReqType0, Body0, Client0);
[] when Method =:= <<"POST">> orelse Method =:= <<"PUT">> ->
handle_body(Headers2, ReqType0, Body0, Client0);
<<>> ->
{Headers2, ReqType0, Body0, Client0};
[] ->
{Headers2, ReqType0, Body0, Client0};
_ ->
handle_body(Headers2, ReqType0, Body0, Client0)
end,
%% build final client record
Client = case ReqType of
normal ->
Client1#client{send_fun=fun hackney_request:send/2, req_type=normal};
chunked ->
Client1#client{send_fun=fun hackney_request:send_chunk/2, req_type=chunked}
end,
%% request to send
HeadersData = [
<< Method/binary, " ", Path/binary, " HTTP/1.1", "\r\n" >>,
hackney_headers_new:to_iolist(FinalHeaders)
],
PerformAll = proplists:get_value(perform_all, Options, true),
?report_verbose("perform request", [{header_data, HeadersData},
{perform_all, PerformAll},
{expect, Expect}]),
case can_perform_all(Body, Expect, PerformAll) of
true ->
perform_all(Client, HeadersData, Body, Method, Path, Expect);
_ ->
case hackney_request:send(Client, HeadersData) of
ok when Body =:= stream ->
{ok, Client#client{response_state=stream,
method=Method,
path=Path,
expect=Expect}};
ok ->
case stream_body(Body, Client#client{expect=Expect}) of
{error, _Reason}=E ->
E;
{stop, Client2} ->
FinalClient = Client2#client{method=Method,
path=Path},
hackney_response:start_response(FinalClient);
{ok, Client2} ->
case end_stream_body(Client2) of
{ok, Client3} ->
FinalClient = Client3#client{method=Method,
path=Path},
hackney_response:start_response(FinalClient);
Error ->
Error
end
end;
Error ->
Error
end
end.
location(Client) ->
#client{headers=Headers, transport=Transport, netloc=Netloc, path=Path} = Client,
case hackney_headers_new:get_value(<<"location">>, Headers) of
undefined ->
Scheme = hackney_url:transport_scheme(Transport),
Url = #hackney_url{scheme=Scheme, netloc=Netloc, path=Path},
hackney_url:unparse_url(Url);
Location ->
Location
end.
stream_body(Msg, #client{expect=true}=Client) ->
case hackney_response:expect_response(Client) of
{continue, Client2} ->
stream_body(Msg, Client2);
{stop, Client2} ->
{stop, Client2};
Error ->
Error
end;
stream_body(eof, Client) ->
{ok, Client};
stream_body(<<>>, Client) ->
{ok, Client};
stream_body(Func, Client) when is_function(Func) ->
case Func() of
{ok, Data} ->
case stream_body(Data, Client) of
{ok, Client1} ->
stream_body(Func, Client1);
Error ->
Error
end;
eof ->
stream_body(eof, Client);
Err ->
Err
end;
stream_body({Func, State}, Client) when is_function(Func) ->
case Func(State) of
{ok, Data, NewState} ->
case stream_body(Data, Client) of
{ok, Client1} ->
stream_body({Func, NewState}, Client1);
Error ->
Error
end;
eof ->
stream_body(eof, Client);
Err ->
Err
end;
stream_body({file, FileName}, Client) ->
stream_body({file, FileName, []}, Client);
stream_body({file, FileName, Opts}, Client) ->
case sendfile(FileName, Opts, Client) of
{ok, _BytesSent} ->
{ok, Client};
Error ->
Error
end;
stream_body(Body, #client{send_fun=Send}=Client) ->
case Send(Client, Body) of
ok ->
{ok, Client};
Error ->
Error
end.
%% @doc stream multipart
stream_multipart(eof, #client{response_state=waiting}=Client) ->
{ok, Client};
stream_multipart(eof, #client{mp_boundary=Boundary}=Client) ->
case stream_body(hackney_multipart:mp_eof(Boundary), Client) of
{ok, Client1} ->
end_stream_body(Client1);
Error ->
Error
end;
stream_multipart({mp_mixed, Name, MixedBoundary}, #client{mp_boundary=Boundary}=Client) ->
{MpHeader, _} = hackney_multipart:mp_mixed_header({Name, MixedBoundary}, Boundary),
stream_body(<< MpHeader/binary, "\r\n" >>, Client);
stream_multipart({mp_mixed_eof, MixedBoundary}, Client) ->
Eof = hackney_multipart:mp_eof(MixedBoundary),
stream_body(<< Eof/binary, "\r\n" >>, Client);
stream_multipart({file, Path}, Client) ->
stream_multipart({file, Path, []}, Client);
stream_multipart({file, Path, <<Name/binary>>}, Client) ->
stream_multipart({file, Path, Name, []}, Client);
stream_multipart({file, Path, ExtraHeaders}, Client) ->
stream_multipart({file, Path, <<"file">>, ExtraHeaders}, Client);
stream_multipart({file, Path, _Name, _ExtraHeaders}=File,
#client{mp_boundary=Boundary}=Client) ->
{MpHeader, _} = hackney_multipart:mp_file_header(File, Boundary),
case stream_body(MpHeader, Client) of
{ok, Client1} ->
case stream_body({file, Path}, Client1) of
{ok, Client2} ->
stream_body(<<"\r\n">>, Client2);
Error ->
Error
end;
Error ->
Error
end;
stream_multipart({data, Name, Bin}, Client) ->
stream_multipart({data, Name, Bin, []}, Client);
stream_multipart({data, Name, Bin, ExtraHeaders},
#client{mp_boundary=Boundary}=Client) ->
Len = byte_size(Name),
{MpHeader, _} = hackney_multipart:mp_data_header({Name, Len, ExtraHeaders},
Boundary),
Bin1 = << MpHeader/binary, Bin/binary, "\r\n" >>,
stream_body(Bin1, Client);
stream_multipart({part, eof}, Client) ->
stream_body(<<"\r\n">>, Client);
stream_multipart({part, Headers}, #client{mp_boundary=Boundary}=Client)
when is_list(Headers) ->
MpHeader = hackney_multipart:mp_header(Headers, Boundary),
stream_body(MpHeader, Client);
stream_multipart({part, Name}, Client) when is_binary(Name) ->
stream_multipart({part, Name, []}, Client);
stream_multipart({part, Name, ExtraHeaders},
#client{mp_boundary=Boundary}=Client)
when is_list(ExtraHeaders) ->
%% part without content-length
CType = mimerl:filename(Name),
Headers = [{<<"Content-Disposition">>,
<<"form-data">>, [{<<"name">>, <<"\"", Name/binary, "\"">>}]},
{<<"Content-Type">>, CType}],
MpHeader = hackney_multipart:mp_header(Headers, Boundary),
stream_body(MpHeader, Client);
stream_multipart({part, Name, Len}, Client) when is_integer(Len)->
stream_multipart({part, Name, Len, []}, Client);
stream_multipart({part, Name, Len, ExtraHeaders},
#client{mp_boundary=Boundary}=Client) ->
{MpHeader, _} = hackney_multipart:mp_data_header({Name, Len, ExtraHeaders},
Boundary),
stream_body(MpHeader, Client);
stream_multipart({part_bin, Bin}, Client) ->
stream_body(Bin, Client).
send(#client{transport=Transport, socket=Skt}, Data) ->
Transport:send(Skt, Data).
send_chunk(Client, Data) ->
Length = iolist_size(Data),
send(Client, [io_lib:format("~.16b\r\n", [Length]), Data,
<<"\r\n">>]).
sendfile(FileName, Opts, #client{transport=hackney_tcp_tansport, socket=Skt,
req_type=normal}) ->
Offset = proplists:get_value(offset, Opts, 0),
Bytes = proplists:get_value(bytes, Opts, 0),
SendFileOpts = case proplists:get_value(chunk_size, Opts, ?CHUNK_SIZE) of
undefined -> Opts;
ChunkSize -> [{chunk_size, ChunkSize}]
end,
file:sendfile(FileName, Skt, Offset, Bytes, SendFileOpts);
sendfile(FileName, Opts, Client) ->
case file:open(FileName, [read, raw, binary]) of
{error, Reason} ->
{error, Reason};
{ok, Fd} ->
Res = sendfile_fallback(Fd, Opts, Client),
ok = file:close(Fd),
Res
end.
%% @doc encode a list of properties in a form.
encode_form(KVs) ->
Lines = hackney_url:qs(KVs),
CType = <<"application/x-www-form-urlencoded; charset=utf-8">>,
{erlang:byte_size(Lines), CType, Lines}.
%% internal
handle_body(Headers, ReqType0, Body0, Client) ->
{CLen, CType, Body} = case Body0 of
{form, KVs} ->
encode_form(KVs);
{multipart, Parts} ->
Boundary = hackney_multipart:boundary(),
MpLen = hackney_multipart:len_mp_stream(Parts, Boundary),
MpStream = make_multipart_stream(Parts, Boundary),
CT = << "multipart/form-data; boundary=", Boundary/binary >>,
{MpLen, CT, MpStream};
{file, FileName} ->
S= filelib:file_size(FileName),
FileName1 = hackney_bstr:to_binary(FileName),
CT = hackney_headers_new:get_value(
<<"content-type">>, Headers, mimerl:filename(FileName1)
),
{S, CT, Body0};
Func when is_function(Func) ->
CT = hackney_headers_new:get_value(
<<"content-type">>, Headers, <<"application/octet-stream">>
),
S = hackney_headers_new:get_value(<<"content-length">>, Headers),
{S, CT, Body0};
{Func, _} when is_function(Func) ->
CT = hackney_headers_new:get_value(
<<"content-type">>, Headers, <<"application/octet-stream">>
),
S = hackney_headers_new:get_value(<<"content-length">>, Headers),
{S, CT, Body0};
_ when is_list(Body0) -> % iolist case
Body1 = iolist_to_binary(Body0),
S = erlang:byte_size(Body1),
CT = hackney_headers_new:get_value(
<<"content-type">>, Headers, <<"application/octet-stream">>
),
{S, CT, Body1};
_ when is_binary(Body0) ->
S = erlang:byte_size(Body0),
CT = hackney_headers_new:get_value(
<<"content-type">>, Headers, <<"application/octet-stream">>
),
{S, CT, Body0}
end,
{NewHeaders, ReqType} = case {ReqType0, Body} of
{chunked, {file, _}} ->
Headers1 = hackney_headers_new:delete(
<<"transfer-encoding">>,
hackney_headers_new:store(
<<"Content-Type">>, CType,
hackney_headers_new:store(
<<"Content-Length">>, CLen, Headers))),
{Headers1, normal};
{chunked, _} ->
Headers1 = hackney_headers_new:delete(
<<"content-length">>,
hackney_headers_new:store(
<<"Content-Type">>, CType, Headers)),
{Headers1, chunked};
{_, _} when CLen =:= undefined ->
Headers1 = hackney_headers_new:delete(
<<"content-length">>,
hackney_headers_new:store(
[{<<"Content-Type">>, CType},
{<<"Transfer-Encoding">>, <<"chunked">>}],
Headers)),
{Headers1, chunked};
{_, _} ->
Headers1 = hackney_headers_new:delete(
<<"transfer-encoding">>,
hackney_headers_new:store(
[{<<"Content-Type">>, CType},
{<<"Content-Length">>, CLen}],
Headers)),
{Headers1, normal}
end,
{NewHeaders, ReqType, Body, Client}.
handle_multipart_body(Headers, ReqType, Client) ->
handle_multipart_body(Headers, ReqType, chunked, hackney_multipart:boundary(), Client).
handle_multipart_body(Headers, ReqType, CLen, Client) ->
handle_multipart_body(Headers, ReqType, CLen, hackney_multipart:boundary(), Client).
handle_multipart_body(Headers, ReqType, CLen, Boundary, Client) ->
CType = case hackney_headers_new:get_value(<<"content-type">>, Headers) of
undefined ->
<< "multipart/form-data; boundary=", Boundary/binary >>;
Value ->
case hackney_headers_new:parse_content_type(Value) of
{<<"multipart">>, _, _} -> Value;
_ ->
<< "multipart/form-data; boundary=", Boundary/binary >>
end
end,
{NewHeaders, ReqType1} = case {CLen, ReqType} of
{chunked, _} ->
Headers1 = hackney_headers_new:delete(
<<"content-length">>,
hackney_headers_new:store(
[{<<"Content-Type">>, CType},
{<<"Transfer-Encoding">>, <<"chunked">>}],
Headers)),
{Headers1, chunked};
{_, _} ->
Headers1 = hackney_headers_new:delete(
<<"transfer-encoding">>,
hackney_headers_new:store(
[{<<"Content-Type">>, CType},
{<<"Content-Length">>, CLen}],
Headers)),
{Headers1, normal}
end,
{NewHeaders, ReqType1, stream, Client#client{response_state=stream,
mp_boundary=Boundary}}.
req_type(Headers, stream) ->
Te = hackney_bstr:to_lower(
hackney_headers_new:get_value(<<"transfer-encoding">>, Headers, <<>>)
),
case Te of
<<"chunked">> -> {Headers, chunked};
_ ->
case hackney_headers_new:get_value(<<"content-length">>, Headers) of
undefined ->
Headers2 = hackney_headers_new:store(
<<"Transfer-Encoding">>, <<"chunked">>, Headers
),
{Headers2, chunked};
_ ->
{Headers, normal}
end
end;
req_type(Headers, _Body) ->
Te = hackney_bstr:to_lower(
hackney_headers_new:get_value(<<"transfer-encoding">>, Headers, <<>>)
),
case Te of
<<"chunked">> -> {Headers, chunked};
_ -> {Headers, normal}
end.
expectation(Headers) ->
Expect = hackney_headers_new:get_value(<<"expect">>, Headers, <<>>),
(hackney_bstr:to_lower(Expect) =:= <<"100-continue">>).
end_stream_body(#client{req_type=chunked}=Client) ->
case send_chunk(Client, <<>>) of
ok ->
{ok, Client#client{response_state=waiting}};
Error ->
Error
end;
end_stream_body(Client) ->
{ok, Client#client{response_state=waiting}}.
can_perform_all(Body, Expect, PerformAll) when Expect =:= false,
(is_list(Body) orelse is_binary(Body)) ->
PerformAll;
can_perform_all(_Body, _Expect, _PerformAll) ->
false.
perform_all(Client, HeadersData, Body, Method, Path, Expect) ->
case stream_body(iolist_to_binary([HeadersData, Body]),
Client#client{expect=Expect}) of
{error, _Reason}=E ->
E;
{stop, Client2} ->
FinalClient = Client2#client{method=Method, path=Path},
hackney_response:start_response(FinalClient);
{ok, Client2} ->
case end_stream_body(Client2) of
{ok, Client3} ->
FinalClient = Client3#client{method=Method,
path=Path},
hackney_response:start_response(FinalClient);
Error ->
Error
end
end.
sendfile_fallback(Fd, Opts, Client) ->
Offset = proplists:get_value(offset, Opts, 0),
Bytes = proplists:get_value(bytes, Opts, 0),
ChunkSize = proplists:get_value(chunk_size, Opts, ?CHUNK_SIZE),
{ok, CurrPos} = file:position(Fd, {cur, 0}),
{ok, _NewPos} = file:position(Fd, {bof, Offset}),
Res = sendfile_fallback(Fd, Bytes, ChunkSize, Client, 0),
{ok, _} = file:position(Fd, {bof, CurrPos}),
Res.
sendfile_fallback(Fd, Bytes, ChunkSize, #client{send_fun=Send}=Client, Sent)
when Bytes > Sent orelse Bytes =:= 0 ->
Length = if Bytes > 0 -> erlang:min(ChunkSize, Bytes - Sent);
true -> ChunkSize
end,
case file:read(Fd, Length) of
{ok, Data} ->
Len = iolist_size(Data),
case Send(Client, Data) of
ok ->
sendfile_fallback(Fd, Bytes, ChunkSize, Client,
Sent + Len);
Error ->
Error
end;
eof ->
{ok, Sent};
Error ->
Error
end;
sendfile_fallback(_, _, _, _, Sent) ->
{ok, Sent}.
-spec make_multipart_stream(list(), binary()) -> {fun(), list()}.
make_multipart_stream(Parts, Boundary) ->
Stream = lists:foldl(fun
({file, Path}, Acc) ->
{MpHeader, _} = hackney_multipart:mp_file_header(
{file, Path}, Boundary),
[<<"\r\n">>, {file, Path}, MpHeader | Acc];
({file, Path, ExtraHeaders}, Acc) ->
{MpHeader, _} = hackney_multipart:mp_file_header(
{file, Path, ExtraHeaders}, Boundary),
[<<"\r\n">>, {file, Path}, MpHeader | Acc];
({file, Path, Disposition, ExtraHeaders}, Acc) ->
{MpHeader, _} = hackney_multipart:mp_file_header(
{file, Path, Disposition, ExtraHeaders}, Boundary),
[<<"\r\n">>, {file, Path}, MpHeader | Acc];
({mp_mixed, Name, MixedBoundary}, Acc) ->
{MpHeader, _} = hackney_multipart:mp_mixed_header(
Name, MixedBoundary),
[<< MpHeader/binary, "\r\n" >> | Acc];
({mp_mixed_eof, MixedBoundary}, Acc) ->
Eof = hackney_multipart:mp_eof(MixedBoundary),
[<< Eof/binary, "\r\n" >> | Acc];
({Name, Bin}, Acc) ->
Len = byte_size(Bin),
{MpHeader, _} = hackney_multipart:mp_data_header(
{Name, Len}, Boundary),
PartBin = << MpHeader/binary, Bin/binary, "\r\n" >>,
[PartBin | Acc];
({Name, Bin, ExtraHeaders}, Acc) ->
Len = byte_size(Bin),
{MpHeader, _} = hackney_multipart:mp_data_header(
{Name, Len, ExtraHeaders}, Boundary),
PartBin = << MpHeader/binary, Bin/binary, "\r\n" >>,
[PartBin | Acc];
({Name, Bin, Disposition, ExtraHeaders}, Acc) ->
Len = byte_size(Bin),
{MpHeader, _} = hackney_multipart:mp_data_header(
{Name, Len, Disposition, ExtraHeaders},
Boundary),
PartBin = << MpHeader/binary, Bin/binary, "\r\n" >>,
[PartBin | Acc]
end, [], Parts),
FinalStream = lists:reverse([hackney_multipart:mp_eof(Boundary) |
Stream]),
%% function used to stream
StreamFun = fun
([]) ->
eof;
([Part | Rest]) ->
{ok, Part, Rest}
end,
{StreamFun, FinalStream}.
maybe_add_cookies([], Headers) ->
Headers;
maybe_add_cookies(Cookie, Headers) when is_binary(Cookie) ->
Headers ++ [{<<"Cookie">>, Cookie}];
maybe_add_cookies({Name, Value}, Headers) ->
Cookie = hackney_cookie:setcookie(Name, Value, []),
Headers ++ [{<<"Cookie">>, Cookie}];
maybe_add_cookies({Name, Value, Opts}, Headers) ->
Cookie = hackney_cookie:setcookie(Name, Value, Opts),
Headers ++ [{<<"Cookie">>, Cookie}];
maybe_add_cookies([{Name, Value} | Rest], Headers) ->
Cookie = hackney_cookie:setcookie(Name, Value, []),
Headers1 = Headers ++ [{<<"Cookie">>, Cookie}],
maybe_add_cookies(Rest, Headers1);
maybe_add_cookies([{Name, Value, Opts} | Rest], Headers) ->
Cookie = hackney_cookie:setcookie(Name, Value, Opts),
Headers1 = Headers ++ [{<<"Cookie">>, Cookie}],
maybe_add_cookies(Rest, Headers1);
maybe_add_cookies([Cookie | Rest], Headers) ->
Headers1 = Headers ++ [{<<"Cookie">>, Cookie}],
maybe_add_cookies(Rest, Headers1).
default_ua() ->
Version = case application:get_key(hackney, vsn) of
{ok, FullVersion} ->
list_to_binary(hd(string:tokens(FullVersion, "-")));
_ ->
<< "0.0.0" >>
end,
<< "hackney/", Version/binary >>.
maybe_add_host(Headers0, Netloc) ->
{_, Headers1} = hackney_headers_new:store_new(<<"Host">>, Netloc, Headers0),
Headers1.
is_default_port(#client{transport=hackney_tcp, port=80}) ->
true;
is_default_port(#client{transport=hackney_ssl, port=443}) ->
true;
is_default_port(_) ->
false.
| null | https://raw.githubusercontent.com/CloudI/CloudI/3e45031c7ee3e974ead2612ea7dd06c9edf973c9/src/external/cloudi_x_hackney/src/hackney_request.erl | erlang | -*- erlang -*-
See the NOTICE for more information.
@doc module handling the request
basic & Cookies authorization handling
detect the request type: normal or chunked
add host eventually
get expect headers
build headers with the body.
build final client record
request to send
@doc stream multipart
part without content-length
@doc encode a list of properties in a form.
internal
iolist case
function used to stream | This file is part of hackney released under the Apache 2 license .
-module(hackney_request).
-include("hackney.hrl").
-include("hackney_lib.hrl").
-include_lib("hackney_internal.hrl").
-export([perform/2,
location/1,
send/2, send_chunk/2,
sendfile/3,
stream_body/2, end_stream_body/1,
stream_multipart/2,
encode_form/1,
default_ua/0]).
-export([is_default_port/1]).
-export([make_multipart_stream/2]).
64 MB is the default
perform(Client0, {Method0, Path0, Headers0, Body0}) ->
Method = hackney_bstr:to_upper(hackney_bstr:to_binary(Method0)),
Path = case Path0 of
<<"">> -> <<"/">>;
_ -> Path0
end,
#client{options=Options} = Client0,
Cookies = proplists:get_value(cookie, Options, []),
DefaultHeaders = case proplists:get_value(basic_auth, Options) of
undefined ->
maybe_add_cookies(Cookies, [{<<"User-Agent">>, default_ua()}]);
{User, Pwd} ->
User1 = hackney_bstr:to_binary(User),
Pwd1 = hackney_bstr:to_binary(Pwd),
Credentials = base64:encode(<< User1/binary, ":", Pwd1/binary >>),
maybe_add_cookies(
Cookies,
[{<<"User-Agent">>, default_ua()},
{<<"Authorization">>, <<"Basic ", Credentials/binary>>}]
)
end,
{Headers1, ReqType0} = req_type(
hackney_headers_new:merge(Headers0, hackney_headers_new:new(DefaultHeaders)),
Body0
),
Headers2 = maybe_add_host(Headers1, Client0#client.netloc),
Expect = expectation(Headers2),
{FinalHeaders, ReqType, Body, Client1} = case Body0 of
stream ->
{Headers2, ReqType0, stream, Client0};
stream_multipart ->
handle_multipart_body(Headers2, ReqType0, Client0);
{stream_multipart, Size} ->
handle_multipart_body(Headers2, ReqType0, Size, Client0);
{stream_multipart, Size, Boundary} ->
handle_multipart_body(Headers2, ReqType0,
Size, Boundary, Client0);
<<>> when Method =:= <<"POST">> orelse Method =:= <<"PUT">> ->
handle_body(Headers2, ReqType0, Body0, Client0);
[] when Method =:= <<"POST">> orelse Method =:= <<"PUT">> ->
handle_body(Headers2, ReqType0, Body0, Client0);
<<>> ->
{Headers2, ReqType0, Body0, Client0};
[] ->
{Headers2, ReqType0, Body0, Client0};
_ ->
handle_body(Headers2, ReqType0, Body0, Client0)
end,
Client = case ReqType of
normal ->
Client1#client{send_fun=fun hackney_request:send/2, req_type=normal};
chunked ->
Client1#client{send_fun=fun hackney_request:send_chunk/2, req_type=chunked}
end,
HeadersData = [
<< Method/binary, " ", Path/binary, " HTTP/1.1", "\r\n" >>,
hackney_headers_new:to_iolist(FinalHeaders)
],
PerformAll = proplists:get_value(perform_all, Options, true),
?report_verbose("perform request", [{header_data, HeadersData},
{perform_all, PerformAll},
{expect, Expect}]),
case can_perform_all(Body, Expect, PerformAll) of
true ->
perform_all(Client, HeadersData, Body, Method, Path, Expect);
_ ->
case hackney_request:send(Client, HeadersData) of
ok when Body =:= stream ->
{ok, Client#client{response_state=stream,
method=Method,
path=Path,
expect=Expect}};
ok ->
case stream_body(Body, Client#client{expect=Expect}) of
{error, _Reason}=E ->
E;
{stop, Client2} ->
FinalClient = Client2#client{method=Method,
path=Path},
hackney_response:start_response(FinalClient);
{ok, Client2} ->
case end_stream_body(Client2) of
{ok, Client3} ->
FinalClient = Client3#client{method=Method,
path=Path},
hackney_response:start_response(FinalClient);
Error ->
Error
end
end;
Error ->
Error
end
end.
location(Client) ->
#client{headers=Headers, transport=Transport, netloc=Netloc, path=Path} = Client,
case hackney_headers_new:get_value(<<"location">>, Headers) of
undefined ->
Scheme = hackney_url:transport_scheme(Transport),
Url = #hackney_url{scheme=Scheme, netloc=Netloc, path=Path},
hackney_url:unparse_url(Url);
Location ->
Location
end.
stream_body(Msg, #client{expect=true}=Client) ->
case hackney_response:expect_response(Client) of
{continue, Client2} ->
stream_body(Msg, Client2);
{stop, Client2} ->
{stop, Client2};
Error ->
Error
end;
stream_body(eof, Client) ->
{ok, Client};
stream_body(<<>>, Client) ->
{ok, Client};
stream_body(Func, Client) when is_function(Func) ->
case Func() of
{ok, Data} ->
case stream_body(Data, Client) of
{ok, Client1} ->
stream_body(Func, Client1);
Error ->
Error
end;
eof ->
stream_body(eof, Client);
Err ->
Err
end;
stream_body({Func, State}, Client) when is_function(Func) ->
case Func(State) of
{ok, Data, NewState} ->
case stream_body(Data, Client) of
{ok, Client1} ->
stream_body({Func, NewState}, Client1);
Error ->
Error
end;
eof ->
stream_body(eof, Client);
Err ->
Err
end;
stream_body({file, FileName}, Client) ->
stream_body({file, FileName, []}, Client);
stream_body({file, FileName, Opts}, Client) ->
case sendfile(FileName, Opts, Client) of
{ok, _BytesSent} ->
{ok, Client};
Error ->
Error
end;
stream_body(Body, #client{send_fun=Send}=Client) ->
case Send(Client, Body) of
ok ->
{ok, Client};
Error ->
Error
end.
stream_multipart(eof, #client{response_state=waiting}=Client) ->
{ok, Client};
stream_multipart(eof, #client{mp_boundary=Boundary}=Client) ->
case stream_body(hackney_multipart:mp_eof(Boundary), Client) of
{ok, Client1} ->
end_stream_body(Client1);
Error ->
Error
end;
stream_multipart({mp_mixed, Name, MixedBoundary}, #client{mp_boundary=Boundary}=Client) ->
{MpHeader, _} = hackney_multipart:mp_mixed_header({Name, MixedBoundary}, Boundary),
stream_body(<< MpHeader/binary, "\r\n" >>, Client);
stream_multipart({mp_mixed_eof, MixedBoundary}, Client) ->
Eof = hackney_multipart:mp_eof(MixedBoundary),
stream_body(<< Eof/binary, "\r\n" >>, Client);
stream_multipart({file, Path}, Client) ->
stream_multipart({file, Path, []}, Client);
stream_multipart({file, Path, <<Name/binary>>}, Client) ->
stream_multipart({file, Path, Name, []}, Client);
stream_multipart({file, Path, ExtraHeaders}, Client) ->
stream_multipart({file, Path, <<"file">>, ExtraHeaders}, Client);
stream_multipart({file, Path, _Name, _ExtraHeaders}=File,
#client{mp_boundary=Boundary}=Client) ->
{MpHeader, _} = hackney_multipart:mp_file_header(File, Boundary),
case stream_body(MpHeader, Client) of
{ok, Client1} ->
case stream_body({file, Path}, Client1) of
{ok, Client2} ->
stream_body(<<"\r\n">>, Client2);
Error ->
Error
end;
Error ->
Error
end;
stream_multipart({data, Name, Bin}, Client) ->
stream_multipart({data, Name, Bin, []}, Client);
stream_multipart({data, Name, Bin, ExtraHeaders},
#client{mp_boundary=Boundary}=Client) ->
Len = byte_size(Name),
{MpHeader, _} = hackney_multipart:mp_data_header({Name, Len, ExtraHeaders},
Boundary),
Bin1 = << MpHeader/binary, Bin/binary, "\r\n" >>,
stream_body(Bin1, Client);
stream_multipart({part, eof}, Client) ->
stream_body(<<"\r\n">>, Client);
stream_multipart({part, Headers}, #client{mp_boundary=Boundary}=Client)
when is_list(Headers) ->
MpHeader = hackney_multipart:mp_header(Headers, Boundary),
stream_body(MpHeader, Client);
stream_multipart({part, Name}, Client) when is_binary(Name) ->
stream_multipart({part, Name, []}, Client);
stream_multipart({part, Name, ExtraHeaders},
#client{mp_boundary=Boundary}=Client)
when is_list(ExtraHeaders) ->
CType = mimerl:filename(Name),
Headers = [{<<"Content-Disposition">>,
<<"form-data">>, [{<<"name">>, <<"\"", Name/binary, "\"">>}]},
{<<"Content-Type">>, CType}],
MpHeader = hackney_multipart:mp_header(Headers, Boundary),
stream_body(MpHeader, Client);
stream_multipart({part, Name, Len}, Client) when is_integer(Len)->
stream_multipart({part, Name, Len, []}, Client);
stream_multipart({part, Name, Len, ExtraHeaders},
#client{mp_boundary=Boundary}=Client) ->
{MpHeader, _} = hackney_multipart:mp_data_header({Name, Len, ExtraHeaders},
Boundary),
stream_body(MpHeader, Client);
stream_multipart({part_bin, Bin}, Client) ->
stream_body(Bin, Client).
send(#client{transport=Transport, socket=Skt}, Data) ->
Transport:send(Skt, Data).
send_chunk(Client, Data) ->
Length = iolist_size(Data),
send(Client, [io_lib:format("~.16b\r\n", [Length]), Data,
<<"\r\n">>]).
sendfile(FileName, Opts, #client{transport=hackney_tcp_tansport, socket=Skt,
req_type=normal}) ->
Offset = proplists:get_value(offset, Opts, 0),
Bytes = proplists:get_value(bytes, Opts, 0),
SendFileOpts = case proplists:get_value(chunk_size, Opts, ?CHUNK_SIZE) of
undefined -> Opts;
ChunkSize -> [{chunk_size, ChunkSize}]
end,
file:sendfile(FileName, Skt, Offset, Bytes, SendFileOpts);
sendfile(FileName, Opts, Client) ->
case file:open(FileName, [read, raw, binary]) of
{error, Reason} ->
{error, Reason};
{ok, Fd} ->
Res = sendfile_fallback(Fd, Opts, Client),
ok = file:close(Fd),
Res
end.
encode_form(KVs) ->
Lines = hackney_url:qs(KVs),
CType = <<"application/x-www-form-urlencoded; charset=utf-8">>,
{erlang:byte_size(Lines), CType, Lines}.
handle_body(Headers, ReqType0, Body0, Client) ->
{CLen, CType, Body} = case Body0 of
{form, KVs} ->
encode_form(KVs);
{multipart, Parts} ->
Boundary = hackney_multipart:boundary(),
MpLen = hackney_multipart:len_mp_stream(Parts, Boundary),
MpStream = make_multipart_stream(Parts, Boundary),
CT = << "multipart/form-data; boundary=", Boundary/binary >>,
{MpLen, CT, MpStream};
{file, FileName} ->
S= filelib:file_size(FileName),
FileName1 = hackney_bstr:to_binary(FileName),
CT = hackney_headers_new:get_value(
<<"content-type">>, Headers, mimerl:filename(FileName1)
),
{S, CT, Body0};
Func when is_function(Func) ->
CT = hackney_headers_new:get_value(
<<"content-type">>, Headers, <<"application/octet-stream">>
),
S = hackney_headers_new:get_value(<<"content-length">>, Headers),
{S, CT, Body0};
{Func, _} when is_function(Func) ->
CT = hackney_headers_new:get_value(
<<"content-type">>, Headers, <<"application/octet-stream">>
),
S = hackney_headers_new:get_value(<<"content-length">>, Headers),
{S, CT, Body0};
Body1 = iolist_to_binary(Body0),
S = erlang:byte_size(Body1),
CT = hackney_headers_new:get_value(
<<"content-type">>, Headers, <<"application/octet-stream">>
),
{S, CT, Body1};
_ when is_binary(Body0) ->
S = erlang:byte_size(Body0),
CT = hackney_headers_new:get_value(
<<"content-type">>, Headers, <<"application/octet-stream">>
),
{S, CT, Body0}
end,
{NewHeaders, ReqType} = case {ReqType0, Body} of
{chunked, {file, _}} ->
Headers1 = hackney_headers_new:delete(
<<"transfer-encoding">>,
hackney_headers_new:store(
<<"Content-Type">>, CType,
hackney_headers_new:store(
<<"Content-Length">>, CLen, Headers))),
{Headers1, normal};
{chunked, _} ->
Headers1 = hackney_headers_new:delete(
<<"content-length">>,
hackney_headers_new:store(
<<"Content-Type">>, CType, Headers)),
{Headers1, chunked};
{_, _} when CLen =:= undefined ->
Headers1 = hackney_headers_new:delete(
<<"content-length">>,
hackney_headers_new:store(
[{<<"Content-Type">>, CType},
{<<"Transfer-Encoding">>, <<"chunked">>}],
Headers)),
{Headers1, chunked};
{_, _} ->
Headers1 = hackney_headers_new:delete(
<<"transfer-encoding">>,
hackney_headers_new:store(
[{<<"Content-Type">>, CType},
{<<"Content-Length">>, CLen}],
Headers)),
{Headers1, normal}
end,
{NewHeaders, ReqType, Body, Client}.
handle_multipart_body(Headers, ReqType, Client) ->
handle_multipart_body(Headers, ReqType, chunked, hackney_multipart:boundary(), Client).
handle_multipart_body(Headers, ReqType, CLen, Client) ->
handle_multipart_body(Headers, ReqType, CLen, hackney_multipart:boundary(), Client).
handle_multipart_body(Headers, ReqType, CLen, Boundary, Client) ->
CType = case hackney_headers_new:get_value(<<"content-type">>, Headers) of
undefined ->
<< "multipart/form-data; boundary=", Boundary/binary >>;
Value ->
case hackney_headers_new:parse_content_type(Value) of
{<<"multipart">>, _, _} -> Value;
_ ->
<< "multipart/form-data; boundary=", Boundary/binary >>
end
end,
{NewHeaders, ReqType1} = case {CLen, ReqType} of
{chunked, _} ->
Headers1 = hackney_headers_new:delete(
<<"content-length">>,
hackney_headers_new:store(
[{<<"Content-Type">>, CType},
{<<"Transfer-Encoding">>, <<"chunked">>}],
Headers)),
{Headers1, chunked};
{_, _} ->
Headers1 = hackney_headers_new:delete(
<<"transfer-encoding">>,
hackney_headers_new:store(
[{<<"Content-Type">>, CType},
{<<"Content-Length">>, CLen}],
Headers)),
{Headers1, normal}
end,
{NewHeaders, ReqType1, stream, Client#client{response_state=stream,
mp_boundary=Boundary}}.
req_type(Headers, stream) ->
Te = hackney_bstr:to_lower(
hackney_headers_new:get_value(<<"transfer-encoding">>, Headers, <<>>)
),
case Te of
<<"chunked">> -> {Headers, chunked};
_ ->
case hackney_headers_new:get_value(<<"content-length">>, Headers) of
undefined ->
Headers2 = hackney_headers_new:store(
<<"Transfer-Encoding">>, <<"chunked">>, Headers
),
{Headers2, chunked};
_ ->
{Headers, normal}
end
end;
req_type(Headers, _Body) ->
Te = hackney_bstr:to_lower(
hackney_headers_new:get_value(<<"transfer-encoding">>, Headers, <<>>)
),
case Te of
<<"chunked">> -> {Headers, chunked};
_ -> {Headers, normal}
end.
expectation(Headers) ->
Expect = hackney_headers_new:get_value(<<"expect">>, Headers, <<>>),
(hackney_bstr:to_lower(Expect) =:= <<"100-continue">>).
end_stream_body(#client{req_type=chunked}=Client) ->
case send_chunk(Client, <<>>) of
ok ->
{ok, Client#client{response_state=waiting}};
Error ->
Error
end;
end_stream_body(Client) ->
{ok, Client#client{response_state=waiting}}.
can_perform_all(Body, Expect, PerformAll) when Expect =:= false,
(is_list(Body) orelse is_binary(Body)) ->
PerformAll;
can_perform_all(_Body, _Expect, _PerformAll) ->
false.
perform_all(Client, HeadersData, Body, Method, Path, Expect) ->
case stream_body(iolist_to_binary([HeadersData, Body]),
Client#client{expect=Expect}) of
{error, _Reason}=E ->
E;
{stop, Client2} ->
FinalClient = Client2#client{method=Method, path=Path},
hackney_response:start_response(FinalClient);
{ok, Client2} ->
case end_stream_body(Client2) of
{ok, Client3} ->
FinalClient = Client3#client{method=Method,
path=Path},
hackney_response:start_response(FinalClient);
Error ->
Error
end
end.
sendfile_fallback(Fd, Opts, Client) ->
Offset = proplists:get_value(offset, Opts, 0),
Bytes = proplists:get_value(bytes, Opts, 0),
ChunkSize = proplists:get_value(chunk_size, Opts, ?CHUNK_SIZE),
{ok, CurrPos} = file:position(Fd, {cur, 0}),
{ok, _NewPos} = file:position(Fd, {bof, Offset}),
Res = sendfile_fallback(Fd, Bytes, ChunkSize, Client, 0),
{ok, _} = file:position(Fd, {bof, CurrPos}),
Res.
sendfile_fallback(Fd, Bytes, ChunkSize, #client{send_fun=Send}=Client, Sent)
when Bytes > Sent orelse Bytes =:= 0 ->
Length = if Bytes > 0 -> erlang:min(ChunkSize, Bytes - Sent);
true -> ChunkSize
end,
case file:read(Fd, Length) of
{ok, Data} ->
Len = iolist_size(Data),
case Send(Client, Data) of
ok ->
sendfile_fallback(Fd, Bytes, ChunkSize, Client,
Sent + Len);
Error ->
Error
end;
eof ->
{ok, Sent};
Error ->
Error
end;
sendfile_fallback(_, _, _, _, Sent) ->
{ok, Sent}.
-spec make_multipart_stream(list(), binary()) -> {fun(), list()}.
make_multipart_stream(Parts, Boundary) ->
Stream = lists:foldl(fun
({file, Path}, Acc) ->
{MpHeader, _} = hackney_multipart:mp_file_header(
{file, Path}, Boundary),
[<<"\r\n">>, {file, Path}, MpHeader | Acc];
({file, Path, ExtraHeaders}, Acc) ->
{MpHeader, _} = hackney_multipart:mp_file_header(
{file, Path, ExtraHeaders}, Boundary),
[<<"\r\n">>, {file, Path}, MpHeader | Acc];
({file, Path, Disposition, ExtraHeaders}, Acc) ->
{MpHeader, _} = hackney_multipart:mp_file_header(
{file, Path, Disposition, ExtraHeaders}, Boundary),
[<<"\r\n">>, {file, Path}, MpHeader | Acc];
({mp_mixed, Name, MixedBoundary}, Acc) ->
{MpHeader, _} = hackney_multipart:mp_mixed_header(
Name, MixedBoundary),
[<< MpHeader/binary, "\r\n" >> | Acc];
({mp_mixed_eof, MixedBoundary}, Acc) ->
Eof = hackney_multipart:mp_eof(MixedBoundary),
[<< Eof/binary, "\r\n" >> | Acc];
({Name, Bin}, Acc) ->
Len = byte_size(Bin),
{MpHeader, _} = hackney_multipart:mp_data_header(
{Name, Len}, Boundary),
PartBin = << MpHeader/binary, Bin/binary, "\r\n" >>,
[PartBin | Acc];
({Name, Bin, ExtraHeaders}, Acc) ->
Len = byte_size(Bin),
{MpHeader, _} = hackney_multipart:mp_data_header(
{Name, Len, ExtraHeaders}, Boundary),
PartBin = << MpHeader/binary, Bin/binary, "\r\n" >>,
[PartBin | Acc];
({Name, Bin, Disposition, ExtraHeaders}, Acc) ->
Len = byte_size(Bin),
{MpHeader, _} = hackney_multipart:mp_data_header(
{Name, Len, Disposition, ExtraHeaders},
Boundary),
PartBin = << MpHeader/binary, Bin/binary, "\r\n" >>,
[PartBin | Acc]
end, [], Parts),
FinalStream = lists:reverse([hackney_multipart:mp_eof(Boundary) |
Stream]),
StreamFun = fun
([]) ->
eof;
([Part | Rest]) ->
{ok, Part, Rest}
end,
{StreamFun, FinalStream}.
maybe_add_cookies([], Headers) ->
Headers;
maybe_add_cookies(Cookie, Headers) when is_binary(Cookie) ->
Headers ++ [{<<"Cookie">>, Cookie}];
maybe_add_cookies({Name, Value}, Headers) ->
Cookie = hackney_cookie:setcookie(Name, Value, []),
Headers ++ [{<<"Cookie">>, Cookie}];
maybe_add_cookies({Name, Value, Opts}, Headers) ->
Cookie = hackney_cookie:setcookie(Name, Value, Opts),
Headers ++ [{<<"Cookie">>, Cookie}];
maybe_add_cookies([{Name, Value} | Rest], Headers) ->
Cookie = hackney_cookie:setcookie(Name, Value, []),
Headers1 = Headers ++ [{<<"Cookie">>, Cookie}],
maybe_add_cookies(Rest, Headers1);
maybe_add_cookies([{Name, Value, Opts} | Rest], Headers) ->
Cookie = hackney_cookie:setcookie(Name, Value, Opts),
Headers1 = Headers ++ [{<<"Cookie">>, Cookie}],
maybe_add_cookies(Rest, Headers1);
maybe_add_cookies([Cookie | Rest], Headers) ->
Headers1 = Headers ++ [{<<"Cookie">>, Cookie}],
maybe_add_cookies(Rest, Headers1).
default_ua() ->
Version = case application:get_key(hackney, vsn) of
{ok, FullVersion} ->
list_to_binary(hd(string:tokens(FullVersion, "-")));
_ ->
<< "0.0.0" >>
end,
<< "hackney/", Version/binary >>.
maybe_add_host(Headers0, Netloc) ->
{_, Headers1} = hackney_headers_new:store_new(<<"Host">>, Netloc, Headers0),
Headers1.
is_default_port(#client{transport=hackney_tcp, port=80}) ->
true;
is_default_port(#client{transport=hackney_ssl, port=443}) ->
true;
is_default_port(_) ->
false.
|
2297b5a27fd0d84cfc33ecb5f755515713dead5755a30e006ecf00c5b19a7f22 | avsm/eeww | semantics_of_primitives.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, OCamlPro
and ,
(* *)
(* Copyright 2013--2016 OCamlPro SAS *)
Copyright 2014 - -2016 Jane Street Group LLC
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
[@@@ocaml.warning "+a-4-9-30-40-41-42"]
type effects = No_effects | Only_generative_effects | Arbitrary_effects
type coeffects = No_coeffects | Has_coeffects
let for_primitive (prim : Clambda_primitives.primitive) =
match prim with
| Pmakeblock _
| Pmakearray (_, Mutable) -> Only_generative_effects, No_coeffects
| Pmakearray (_, Immutable) -> No_effects, No_coeffects
| Pduparray (_, Immutable) ->
No_effects, No_coeffects (* Pduparray (_, Immutable) is allowed only on
immutable arrays. *)
| Pduparray (_, Mutable) | Pduprecord _ ->
Only_generative_effects, Has_coeffects
| Pccall { prim_name =
( "caml_format_float" | "caml_format_int" | "caml_int32_format"
| "caml_nativeint_format" | "caml_int64_format" ) } ->
No_effects, No_coeffects
| Pccall _ -> Arbitrary_effects, Has_coeffects
| Praise _ -> Arbitrary_effects, No_coeffects
| Prunstack | Pperform | Presume | Preperform ->
Arbitrary_effects, Has_coeffects
| Pnot
| Pnegint
| Paddint
| Psubint
| Pmulint
| Pandint
| Porint
| Pxorint
| Plslint
| Plsrint
| Pasrint
| Pintcomp _ -> No_effects, No_coeffects
| Pcompare_ints | Pcompare_floats | Pcompare_bints _
-> No_effects, No_coeffects
| Pdivbint { is_safe = Unsafe }
| Pmodbint { is_safe = Unsafe }
| Pdivint Unsafe
| Pmodint Unsafe ->
No_effects, No_coeffects (* Will not raise [Division_by_zero]. *)
| Pdivbint { is_safe = Safe }
| Pmodbint { is_safe = Safe }
| Pdivint Safe
| Pmodint Safe ->
Arbitrary_effects, No_coeffects
| Poffsetint _ -> No_effects, No_coeffects
| Poffsetref _ -> Arbitrary_effects, Has_coeffects
| Pintoffloat
| Pfloatofint
| Pnegfloat
| Pabsfloat
| Paddfloat
| Psubfloat
| Pmulfloat
| Pdivfloat
| Pfloatcomp _ -> No_effects, No_coeffects
| Pstringlength | Pbyteslength
| Parraylength _ -> No_effects, No_coeffects
| Pisint
| Pisout
| Pbintofint _
| Pintofbint _
| Pcvtbint _
| Pnegbint _
| Paddbint _
| Psubbint _
| Pmulbint _
| Pandbint _
| Porbint _
| Pxorbint _
| Plslbint _
| Plsrbint _
| Pasrbint _
| Pbintcomp _ -> No_effects, No_coeffects
| Pbigarraydim _ ->
No_effects, Has_coeffects (* Some people resize bigarrays in place. *)
| Pread_symbol _
| Pfield _
| Pfield_computed
| Pfloatfield _
| Parrayrefu _
| Pstringrefu
| Pbytesrefu
| Pstring_load (_, Unsafe)
| Pbytes_load (_, Unsafe)
| Pbigarrayref (true, _, _, _)
| Pbigstring_load (_, Unsafe) ->
No_effects, Has_coeffects
| Parrayrefs _
| Pstringrefs
| Pbytesrefs
| Pstring_load (_, Safe)
| Pbytes_load (_, Safe)
| Pbigarrayref (false, _, _, _)
| Pbigstring_load (_, Safe) ->
May trigger a bounds check exception .
Arbitrary_effects, Has_coeffects
| Psetfield _
| Psetfield_computed _
| Psetfloatfield _
| Patomic_load _
| Patomic_exchange
| Patomic_cas
| Patomic_fetch_add
| Parraysetu _
| Parraysets _
| Pbytessetu
| Pbytessets
| Pbytes_set _
| Pbigarrayset _
| Pbigstring_set _ ->
(* Whether or not some of these are "unsafe" is irrelevant; they always
have an effect. *)
Arbitrary_effects, No_coeffects
| Pbswap16
| Pbbswap _ -> No_effects, No_coeffects
| Pint_as_pointer -> No_effects, No_coeffects
| Popaque -> Arbitrary_effects, Has_coeffects
| Psequand
| Psequor ->
(* Removed by [Closure_conversion] in the flambda pipeline. *)
No_effects, No_coeffects
| Pdls_get ->
(* only read *)
No_effects, No_coeffects
type return_type =
| Float
| Other
let return_type_of_primitive (prim:Clambda_primitives.primitive) =
match prim with
| Pfloatofint
| Pnegfloat
| Pabsfloat
| Paddfloat
| Psubfloat
| Pmulfloat
| Pdivfloat
| Pfloatfield _
| Parrayrefu Pfloatarray
| Parrayrefs Pfloatarray ->
Float
| _ ->
Other
| null | https://raw.githubusercontent.com/avsm/eeww/651d8da20a3cc9e88354ed0c8da270632b9c0c19/boot/ocaml/middle_end/semantics_of_primitives.ml | ocaml | ************************************************************************
OCaml
Copyright 2013--2016 OCamlPro SAS
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Pduparray (_, Immutable) is allowed only on
immutable arrays.
Will not raise [Division_by_zero].
Some people resize bigarrays in place.
Whether or not some of these are "unsafe" is irrelevant; they always
have an effect.
Removed by [Closure_conversion] in the flambda pipeline.
only read | , OCamlPro
and ,
Copyright 2014 - -2016 Jane Street Group LLC
the GNU Lesser General Public License version 2.1 , with the
[@@@ocaml.warning "+a-4-9-30-40-41-42"]
type effects = No_effects | Only_generative_effects | Arbitrary_effects
type coeffects = No_coeffects | Has_coeffects
let for_primitive (prim : Clambda_primitives.primitive) =
match prim with
| Pmakeblock _
| Pmakearray (_, Mutable) -> Only_generative_effects, No_coeffects
| Pmakearray (_, Immutable) -> No_effects, No_coeffects
| Pduparray (_, Immutable) ->
| Pduparray (_, Mutable) | Pduprecord _ ->
Only_generative_effects, Has_coeffects
| Pccall { prim_name =
( "caml_format_float" | "caml_format_int" | "caml_int32_format"
| "caml_nativeint_format" | "caml_int64_format" ) } ->
No_effects, No_coeffects
| Pccall _ -> Arbitrary_effects, Has_coeffects
| Praise _ -> Arbitrary_effects, No_coeffects
| Prunstack | Pperform | Presume | Preperform ->
Arbitrary_effects, Has_coeffects
| Pnot
| Pnegint
| Paddint
| Psubint
| Pmulint
| Pandint
| Porint
| Pxorint
| Plslint
| Plsrint
| Pasrint
| Pintcomp _ -> No_effects, No_coeffects
| Pcompare_ints | Pcompare_floats | Pcompare_bints _
-> No_effects, No_coeffects
| Pdivbint { is_safe = Unsafe }
| Pmodbint { is_safe = Unsafe }
| Pdivint Unsafe
| Pmodint Unsafe ->
| Pdivbint { is_safe = Safe }
| Pmodbint { is_safe = Safe }
| Pdivint Safe
| Pmodint Safe ->
Arbitrary_effects, No_coeffects
| Poffsetint _ -> No_effects, No_coeffects
| Poffsetref _ -> Arbitrary_effects, Has_coeffects
| Pintoffloat
| Pfloatofint
| Pnegfloat
| Pabsfloat
| Paddfloat
| Psubfloat
| Pmulfloat
| Pdivfloat
| Pfloatcomp _ -> No_effects, No_coeffects
| Pstringlength | Pbyteslength
| Parraylength _ -> No_effects, No_coeffects
| Pisint
| Pisout
| Pbintofint _
| Pintofbint _
| Pcvtbint _
| Pnegbint _
| Paddbint _
| Psubbint _
| Pmulbint _
| Pandbint _
| Porbint _
| Pxorbint _
| Plslbint _
| Plsrbint _
| Pasrbint _
| Pbintcomp _ -> No_effects, No_coeffects
| Pbigarraydim _ ->
| Pread_symbol _
| Pfield _
| Pfield_computed
| Pfloatfield _
| Parrayrefu _
| Pstringrefu
| Pbytesrefu
| Pstring_load (_, Unsafe)
| Pbytes_load (_, Unsafe)
| Pbigarrayref (true, _, _, _)
| Pbigstring_load (_, Unsafe) ->
No_effects, Has_coeffects
| Parrayrefs _
| Pstringrefs
| Pbytesrefs
| Pstring_load (_, Safe)
| Pbytes_load (_, Safe)
| Pbigarrayref (false, _, _, _)
| Pbigstring_load (_, Safe) ->
May trigger a bounds check exception .
Arbitrary_effects, Has_coeffects
| Psetfield _
| Psetfield_computed _
| Psetfloatfield _
| Patomic_load _
| Patomic_exchange
| Patomic_cas
| Patomic_fetch_add
| Parraysetu _
| Parraysets _
| Pbytessetu
| Pbytessets
| Pbytes_set _
| Pbigarrayset _
| Pbigstring_set _ ->
Arbitrary_effects, No_coeffects
| Pbswap16
| Pbbswap _ -> No_effects, No_coeffects
| Pint_as_pointer -> No_effects, No_coeffects
| Popaque -> Arbitrary_effects, Has_coeffects
| Psequand
| Psequor ->
No_effects, No_coeffects
| Pdls_get ->
No_effects, No_coeffects
type return_type =
| Float
| Other
let return_type_of_primitive (prim:Clambda_primitives.primitive) =
match prim with
| Pfloatofint
| Pnegfloat
| Pabsfloat
| Paddfloat
| Psubfloat
| Pmulfloat
| Pdivfloat
| Pfloatfield _
| Parrayrefu Pfloatarray
| Parrayrefs Pfloatarray ->
Float
| _ ->
Other
|
0728e4db6361d6846a8b08956ff13225b4205ecbec40254db345830abd53b2f8 | skanev/playground | 36.scm | SICP exercise 3.36
;
; Suppose we evaluate the following sequence of expressions in the global
; environment:
;
; (define a (make-connector))
; (define b (make-connector))
( set - value ! a 10 ' user )
;
; At some time during evaluation of the set-value!, the following expression
; from the connector's local procedure is evaluated:
;
; (for-each-except
; setter inform-about-value constraints)
;
; Draw an environment diagram showing the environment in which the above
; expression is evaluated
; globals:
; +-------------------------------------------------------------------------+
; | a: <procedure> |
; | b: <procedure> |
; | make-connector: <procedure> |
; | inform-about-value: <procedure> |
; | ... |
; +-------------------------------------------------------------------------+
; ^
; |
; +------------------------------+
; | set-my-value: <procedure> |
; | forget-my-value: <procedure> |
; | connect: <procedure> |
; | me: <procedure> |
; +------------------------------+
; ^
; |
; +------------------------------+
; | value: false |
; | informant: false |
; | constraints: () |
; +------------------------------+
; ^
; |
; +------------------------------+
| newval : 10 |
; | setter: 'user |
; +------------------------------+
| null | https://raw.githubusercontent.com/skanev/playground/d88e53a7f277b35041c2f709771a0b96f993b310/scheme/sicp/03/36.scm | scheme |
Suppose we evaluate the following sequence of expressions in the global
environment:
(define a (make-connector))
(define b (make-connector))
At some time during evaluation of the set-value!, the following expression
from the connector's local procedure is evaluated:
(for-each-except
setter inform-about-value constraints)
Draw an environment diagram showing the environment in which the above
expression is evaluated
globals:
+-------------------------------------------------------------------------+
| a: <procedure> |
| b: <procedure> |
| make-connector: <procedure> |
| inform-about-value: <procedure> |
| ... |
+-------------------------------------------------------------------------+
^
|
+------------------------------+
| set-my-value: <procedure> |
| forget-my-value: <procedure> |
| connect: <procedure> |
| me: <procedure> |
+------------------------------+
^
|
+------------------------------+
| value: false |
| informant: false |
| constraints: () |
+------------------------------+
^
|
+------------------------------+
| setter: 'user |
+------------------------------+ | SICP exercise 3.36
( set - value ! a 10 ' user )
| newval : 10 |
|
a319342704ac1b9e0bd3db11aac3768eb979225c857b33f42133f6f9cfe9659e | mbutterick/beautiful-racket | tokenizer.rkt | #lang br/quicklang
(require brag/support)
(define (make-tokenizer port)
(define (next-token)
(define jsonic-lexer
(lexer
[(from/to "//" "\n") (next-token)]
[(from/to "@$" "$@")
(token 'SEXP-TOK (trim-ends "@$" lexeme "$@"))]
[any-char (token 'CHAR-TOK lexeme)]))
(jsonic-lexer port))
next-token)
(provide make-tokenizer) | null | https://raw.githubusercontent.com/mbutterick/beautiful-racket/f0e2cb5b325733b3f9cbd554cc7d2bb236af9ee9/beautiful-racket-demo/jsonic-demo/tokenizer.rkt | racket | #lang br/quicklang
(require brag/support)
(define (make-tokenizer port)
(define (next-token)
(define jsonic-lexer
(lexer
[(from/to "//" "\n") (next-token)]
[(from/to "@$" "$@")
(token 'SEXP-TOK (trim-ends "@$" lexeme "$@"))]
[any-char (token 'CHAR-TOK lexeme)]))
(jsonic-lexer port))
next-token)
(provide make-tokenizer) | |
40d9845401387a84a5cdc4f486dfbaeb8ad0e28f406ffaaf3de96c234bb99d9b | jeromesimeon/Galax | typing_util.mli | (***********************************************************************)
(* *)
(* GALAX *)
(* XQuery Engine *)
(* *)
Copyright 2001 - 2007 .
(* Distributed only by permission. *)
(* *)
(***********************************************************************)
$ I d : , v 1.9 2007/02/01 22:08:55 simeon Exp $
Module : Typing_util
Description :
This modules implements some basic operations used during static
typing .
Description:
This modules implements some basic operations used during static
typing.
*)
open Xquery_type_core_ast
(***************************)
(* Sequence type factoring *)
(***************************)
val factor_sequencetype : Xquery_core_ast.csequencetype -> Xquery_core_ast.citemtype * Occurrence.occurs * Occurrence.occurs
val factor_asequencetype : Xquery_algebra_ast.asequencetype -> Xquery_algebra_ast.aitemtype * Occurrence.occurs * Occurrence.occurs
val is_itemstar_sequencetype : Xquery_core_ast.csequencetype -> bool
(* Functions that just examine a type's syntactic structure when
full-blown subtyping is not necessary *)
val is_just_a_complex_type : cxtype -> bool
val least_common_promoted_type : cxschema -> cxtype list -> cxtype
(********************)
(* Typing judgments *)
(********************)
val can_be_promoted_to_judge : cxschema -> cxtype -> cxtype -> cxtype option
val data_on_judge : cxschema -> cxtype -> cxtype
val validate_element_resolves_to : cxschema -> Xquery_common_ast.validation_mode -> cxtype -> cxtype
val expand_overloaded_arguments :
cxschema ->
Xquery_type_core_ast.cxtype list ->
Xquery_type_core_ast.cxtype list list * Occurrence.occurs * Occurrence.occurs
val expand_first_overloaded_argument :
cxschema ->
Xquery_type_core_ast.cxtype list ->
Xquery_type_core_ast.cxtype list * cxtype option * Occurrence.occurs * Occurrence.occurs
| null | https://raw.githubusercontent.com/jeromesimeon/Galax/bc565acf782c140291911d08c1c784c9ac09b432/typing/typing_util.mli | ocaml | *********************************************************************
GALAX
XQuery Engine
Distributed only by permission.
*********************************************************************
*************************
Sequence type factoring
*************************
Functions that just examine a type's syntactic structure when
full-blown subtyping is not necessary
******************
Typing judgments
****************** | Copyright 2001 - 2007 .
$ I d : , v 1.9 2007/02/01 22:08:55 simeon Exp $
Module : Typing_util
Description :
This modules implements some basic operations used during static
typing .
Description:
This modules implements some basic operations used during static
typing.
*)
open Xquery_type_core_ast
val factor_sequencetype : Xquery_core_ast.csequencetype -> Xquery_core_ast.citemtype * Occurrence.occurs * Occurrence.occurs
val factor_asequencetype : Xquery_algebra_ast.asequencetype -> Xquery_algebra_ast.aitemtype * Occurrence.occurs * Occurrence.occurs
val is_itemstar_sequencetype : Xquery_core_ast.csequencetype -> bool
val is_just_a_complex_type : cxtype -> bool
val least_common_promoted_type : cxschema -> cxtype list -> cxtype
val can_be_promoted_to_judge : cxschema -> cxtype -> cxtype -> cxtype option
val data_on_judge : cxschema -> cxtype -> cxtype
val validate_element_resolves_to : cxschema -> Xquery_common_ast.validation_mode -> cxtype -> cxtype
val expand_overloaded_arguments :
cxschema ->
Xquery_type_core_ast.cxtype list ->
Xquery_type_core_ast.cxtype list list * Occurrence.occurs * Occurrence.occurs
val expand_first_overloaded_argument :
cxschema ->
Xquery_type_core_ast.cxtype list ->
Xquery_type_core_ast.cxtype list * cxtype option * Occurrence.occurs * Occurrence.occurs
|
e8df86a0da1bb8c4abad780907619781d9e56de198a1062a9ae34eec37d7b974 | hatemogi/misaeng | user.clj | (ns user
(:use [미생.기본]
[미생.검사])
(:require [미생.기본-검사]
[미생.검사-검사]
[미생.예제.계승]
[미생.예제.총합]
[미생.예제.쿼드트리]
[미생.예제.피보나치]))
(defn T [& namespaces]
(적용 검사하기 (or namespaces
['미생.기본-검사
'미생.검사-검사
'미생.예제.계승
'미생.예제.총합
'미생.예제.쿼드트리
'미생.예제.피보나치
])))
| null | https://raw.githubusercontent.com/hatemogi/misaeng/7b097d9d9ae497606cf54be586fe7a0f50bb7ffc/test/user.clj | clojure | (ns user
(:use [미생.기본]
[미생.검사])
(:require [미생.기본-검사]
[미생.검사-검사]
[미생.예제.계승]
[미생.예제.총합]
[미생.예제.쿼드트리]
[미생.예제.피보나치]))
(defn T [& namespaces]
(적용 검사하기 (or namespaces
['미생.기본-검사
'미생.검사-검사
'미생.예제.계승
'미생.예제.총합
'미생.예제.쿼드트리
'미생.예제.피보나치
])))
| |
488550f7ab37779969cc44271afd4553b504f37e91ee13f5025ed7cd6acc0dbb | WhatsApp/power_shell | power_shell_SUITE.erl | %%%-------------------------------------------------------------------
@author < >
( c ) WhatsApp Inc. and its affiliates . All rights reserved .
@private
%%%-------------------------------------------------------------------
-module(power_shell_SUITE).
-author("").
%%--------------------------------------------------------------------
%% IMPORTANT: DO NOT USE EXPORT_ALL!
%% This test relies on functions _not_ being exported from the module.
%% It is the whole point of test.
-export([all/0,
groups/0,
init_per_group/2,
end_per_group/2,
suite/0]).
-export([echo/0, echo/1,
self_echo/0, self_echo/1,
calling_local/0, calling_local/1,
second_clause/0, second_clause/1,
undef/0, undef/1,
undef_local/0, undef_local/1,
recursive/0, recursive/1,
undef_nested/0, undef_nested/1,
bad_match/0, bad_match/1,
function_clause/0, function_clause/1,
preloaded/0, preloaded/1,
throwing/0, throwing/1,
callback_local/0, callback_local/1,
callback_local_fun_obj/0, callback_local_fun_obj/1,
remote_callback/0, remote_callback/1,
callback_local_make_fun/0, callback_local_make_fun/1,
remote_callback_exported/0, remote_callback_exported/1,
record/0, record/1,
try_side_effect/0, try_side_effect/1,
rebind_var/0, rebind_var/1,
external_fun/0, external_fun/1,
catch_apply/0, catch_apply/1,
export/0, export/1,
export_partial/0, export_partial/1,
export_auto_revert/0, export_auto_revert/1,
export_no_link/0, export_no_link/1,
recursive_eval/0, recursive_eval/1
]).
-export([export_all/0, remote_cb_exported/1]).
%% Common Test headers
-include_lib("common_test/include/ct.hrl").
%% Include stdlib header to enable ?assert() for readable output
-include_lib("stdlib/include/assert.hrl").
%%--------------------------------------------------------------------
%% Function: suite() -> Info
%% Info = [tuple()]
%%--------------------------------------------------------------------
suite() ->
[{timetrap, {seconds, 30}}].
test_cases() ->
[echo, self_echo, preloaded, second_clause, undef, undef_local, undef_nested, recursive,
calling_local, throwing, bad_match, function_clause, remote_callback,
callback_local, callback_local_fun_obj, callback_local_make_fun, recursive_eval,
remote_callback_exported, record, try_side_effect, rebind_var, external_fun, catch_apply].
%%--------------------------------------------------------------------
%% Function: groups() -> [Group]
Group = { GroupName , Properties , }
%% GroupName = atom()
Properties = [ parallel | sequence | Shuffle | } ]
= [ Group | { group , GroupName } | TestCase ]
TestCase = atom ( )
Shuffle = shuffle | { shuffle,{integer(),integer(),integer ( ) } }
%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail |
%% repeat_until_any_ok | repeat_until_any_fail
%% N = integer() | forever
%%--------------------------------------------------------------------
groups() ->
[
{direct, [parallel], test_cases()},
{cached, [parallel], test_cases()},
{export, [], [export, export_partial, export_auto_revert, export_no_link]}
].
%%--------------------------------------------------------------------
%% Function: all() -> GroupsAndTestCases | {skip,Reason}
= [ { group , GroupName } | TestCase ]
%% GroupName = atom()
TestCase = atom ( )
%% Reason = term()
%%--------------------------------------------------------------------
all() ->
[{group, direct}, {group, cached}, {group, export}].
%%--------------------------------------------------------------------
Function : init_per_group(GroupName , Config0 ) - >
Config1 | { skip , Reason } | { skip_and_save , Reason , Config1 }
%% GroupName = atom()
%% Config0 = Config1 = [tuple()]
%% Reason = term()
%%--------------------------------------------------------------------
init_per_group(cached, Config) ->
ensure power_shell cache started
Loaded = application:load(power_shell),
?assert(Loaded =:= ok orelse Loaded =:= {error,{already_loaded,power_shell}}),
ok = application:set_env(power_shell, cache_code, true),
ok = application:start(power_shell),
Config;
init_per_group(_GroupName, Config) ->
Config.
%%--------------------------------------------------------------------
Function : end_per_group(GroupName , ) - >
term ( ) | { save_config , Config1 }
%% GroupName = atom()
%% Config0 = Config1 = [tuple()]
%%--------------------------------------------------------------------
end_per_group(cached, _Config) ->
ok = application:unset_env(power_shell, cache_code),
ok = application:stop(power_shell),
ok;
end_per_group(_GroupName, _Config) ->
ok.
%%--------------------------------------------------------------------
%% IMPORTANT: THESE MUST NOT BE EXPORTED !!!
local_unexported(Echo) ->
Echo.
local_self() ->
erlang:self().
local_unexported_recursive(N, Acc) when N =< 0 ->
Acc;
local_unexported_recursive(N, Acc) ->
local_unexported_recursive(N - 1, [N | Acc]).
local_unexported_nested(N) ->
local_unexported_recursive(N, []).
local_undef_nested(Atom) ->
local_undef_nested_impl(Atom),
true = rand:uniform(100). % to avoid tail recursion
local_undef_nested_impl(Atom) ->
not_a_module:not_a_function(1, Atom, 3),
true = rand:uniform(100). % to avoid tail recursion
local_second_clause(Arg, Selector) when Selector =:= one ->
Arg + 1;
local_second_clause(Arg, Selector) when Selector =:= two ->
Arg + 2;
local_second_clause(Arg, Selector) when Selector =:= three ->
Arg + 3;
local_second_clause(Arg, Selector) when is_atom(Selector) ->
Arg + 10.
local_throw(What) ->
rand:uniform(100) < 200 andalso throw(What).
local_throwing() ->
local_throw(ball),
% to make it non-tail-recursive and save call stack:
ok.
local_bad_match() ->
local_do_bad_match(one),
true = rand:uniform(100).
local_do_bad_match(What) ->
true = What =:= rand:uniform(100),
ok.
local_function_clause() ->
local_second_clause(0, get(test_server_logopts)),
true = rand:uniform(100).
local_cb_fun(Arg) ->
{cb, Arg}.
local_cb_caller(Fun, Args) ->
Fun(Args).
local_cb_init() ->
local_cb_caller(fun local_cb_fun/1, [1]).
local_cb_fun_obj() ->
local_cb_caller(fun ([N]) -> N * 2 end, [2]).
local_cb_make_fun() ->
F = erlang:make_fun(?MODULE, module_info, 1),
local_cb_caller(F, md5).
remote_cb(N) when N <0 ->
N rem 3 =:= 0;
remote_cb(N) ->
N rem 2 =:= 0.
remote_cb_init(List) ->
Cb = fun remote_cb/1,
case List of
[] ->
ok;
[E] ->
Cb(E);
[_ | _] ->
lists:filter(fun remote_cb/1, List)
end,
%lists:filter(fun (K) -> K + 1, F = fun remote_cb/1, F(K) end, List).
lists:filter(Cb, List).
remote_cb_exported(N) ->
N rem 3 =:= 0.
remote_cb_exported_init(List) ->
lists:filter(fun ?MODULE:remote_cb_exported/1, List).
%% Kitchen sink to silence compiler in a good way (without suppressing warnings)
export_all() ->
local_undef_nested(atom),
local_cb_fun(1),
local_throwing(),
local_bad_match(),
rebind([]),
external_filter([]),
throw_applied(),
try_side({1, 1}).
-record(rec, {first= "1", second, third = initial}).
create_record() ->
#rec{third = application:get_env(kernel, missing, 3), first = "1"}.
modify_record(#rec{} = Rec) ->
Rec#rec{third = 10, second = "2"}.
try_side(MaybeBinaryInteger) ->
try
_ = binary_to_integer(MaybeBinaryInteger),
true
catch
error:badarg ->
false
end.
internal_lambda(Atom) ->
RebindVar = atom_to_list(Atom),
is_list(RebindVar).
rebind(Original) ->
RebindVar = Original,
lists:filtermap(fun internal_lambda/1, RebindVar).
external_filter(List) ->
lists:filter(fun erlang:is_number/1, List).
throw_applied() ->
Args = [[fun() -> exit(some_error) end, []], []], % this generates: {'EXIT',{{badfun,[#Fun<power_shell_SUITE.21.126501267>,[]]},
case catch apply(erlang, apply, Args) of
{'EXIT', _Reason} ->
throw(expected)
end.
%%--------------------------------------------------------------------
%% Test Cases
echo() ->
[{doc}, "Evaluate non-exported function"].
echo(_Config) ->
?assertEqual(local_unexported(echo), power_shell:eval(?MODULE, local_unexported, [echo])),
ok.
self_echo() ->
[{doc}, "Evaluates function returning self() - to see if NIFs are working"].
self_echo(_Config) ->
?assertEqual(local_self(), power_shell:eval(?MODULE, local_self, [])),
ok.
undef() ->
[{doc, "Ensure undefined function throws undef"}].
undef(_Config) ->
next 2 statements must be on the same line , otherwise stack trace info is broken
{current_stacktrace, Trace} = process_info(self(), current_stacktrace), Actual = (catch power_shell:eval(not_a_module, not_a_function, [1, 2, 3])),
Expected = {'EXIT', {undef, [{not_a_module, not_a_function,[1,2,3], []}] ++ Trace}},
?assertEqual(Expected, Actual),
ok.
undef_local() ->
[{doc, "Ensure undefined function in this very module throws undef"}].
undef_local(_Config) ->
next 2 statments must be on the same line , otherwise stack trace info is broken
{current_stacktrace, Trace} = process_info(self(), current_stacktrace), Actual = (catch power_shell:eval(?MODULE, not_a_function, [1, 2, 3])),
Expected = {'EXIT', {undef, [{?MODULE,not_a_function,[1,2,3], []}] ++ Trace}},
?assertEqual(Expected, Actual),
ok.
undef_nested() ->
[{doc, "Ensure undefined function throws undef even when it's nested"}].
undef_nested(_Config) ->
exception_check(fun local_undef_nested/1, local_undef_nested, [atom]).
preloaded() ->
[{doc, "Ensure that functions from preloaded modules are just applied"}].
preloaded(_Config) ->
?assertEqual([2, 1], power_shell:eval(prim_zip, reverse, [[1, 2]])),
?assertEqual(self(), power_shell:eval(erlang, self, [])).
throwing() ->
[{doc, "Unexported function throwing"}].
throwing(_Config) ->
exception_check(fun local_throwing/0, local_throwing, []).
bad_match() ->
[{doc, "Unexported function throwing badmatch"}].
bad_match(_Config) ->
exception_check(fun local_bad_match/0, local_bad_match, []).
function_clause() ->
[{doc, "Unexported function throwing function_clause"}].
function_clause(_Config) ->
exception_check(fun local_function_clause/0, local_function_clause, []).
recursive() ->
[{doc, "Evaluate recursive function"}].
recursive(_Config) ->
?assertEqual(local_unexported_recursive(1, []), power_shell:eval(?MODULE, local_unexported_recursive, [1, []])),
ok.
calling_local() ->
[{doc, "Evaluate non-exported function calling another non-exported function"}].
calling_local(_Config) ->
?assertEqual(local_unexported_nested(0), power_shell:eval(?MODULE, local_unexported_nested, [0])),
ok.
second_clause() ->
[{doc, "Evaluate non-exported function with multiple clauses, ensure right one is selected"}].
second_clause(_Config) ->
?assertEqual(local_second_clause(10, two), power_shell:eval(?MODULE, local_second_clause, [10, two])),
ok.
callback_local() ->
[{doc, "Non-exported function calls some function that calls a callback fun"}].
callback_local(_Config) ->
?assertEqual(local_cb_init(), power_shell:eval(?MODULE, local_cb_init, [])),
ok.
callback_local_fun_obj() ->
[{doc, "Non-exported function calls some function that calls a callback function object"}].
callback_local_fun_obj(_Config) ->
?assertEqual(local_cb_fun_obj(), power_shell:eval(?MODULE, local_cb_fun_obj, [])),
ok.
callback_local_make_fun() ->
[{doc, "Non-exported function and make_fun/3 fun"}].
callback_local_make_fun(_Config) ->
?assertEqual(local_cb_make_fun(), power_shell:eval(?MODULE, local_cb_make_fun, [])),
ok.
remote_callback_exported() ->
[{doc, "Non-exported function calls remote function that calls exported callback"}].
remote_callback_exported(_Config) ->
L = lists:seq(1, 20),
?assertEqual(remote_cb_exported_init(L), power_shell:eval(?MODULE, remote_cb_exported_init, [L])),
ok.
remote_callback() ->
[{doc, "Non-exported function calls some function that calls a callback fun"}].
remote_callback(_Config) ->
L = lists:seq(1, 20),
?assertEqual(remote_cb_init(L), power_shell:eval(?MODULE, remote_cb_init, [L])),
ok.
record() ->
[{doc, "Tests records - creation & modification"}].
record(_Config) ->
Rec = create_record(),
?assertEqual(Rec, power_shell:eval(?MODULE, create_record, [])),
?assertEqual(modify_record(Rec), power_shell:eval(?MODULE, modify_record, [Rec])).
try_side_effect() ->
[{doc, "Tests try ... catch returning value from both flows"}].
try_side_effect(_Config) ->
?assertEqual(false, power_shell:eval(?MODULE, try_side, [atom])).
rebind_var() ->
[{doc, "Tests that variable is unbound when returned from function"}].
rebind_var(Config) when is_list(Config) ->
?assertEqual([atom, atom], power_shell:eval(?MODULE, rebind, [[atom, atom]])).
external_fun() ->
[{doc, "Tests external function passed to lists:filter"}].
external_fun(Config) when is_list(Config) ->
?assertEqual([1, 2], power_shell:eval(?MODULE, external_filter, [[1, atom, 2, atom]])).
catch_apply() ->
[{doc, "Tests that cast catch erlang:apply works and throws as expected, not converting it to badarg"}].
catch_apply(Config) when is_list(Config) ->
?assertThrow(expected, power_shell:eval(?MODULE, throw_applied, [])).
export() ->
[{doc, "Tests export_all (hot code load and revert)"}].
export(Config) when is_list(Config) ->
?assertException(error, undef, power_shell_export:local_unexported(success)),
%% find some compiler options to keep
CompileFlags = proplists:get_value(options, power_shell_export:module_info(compile)),
Sentinel = power_shell:export(power_shell_export),
?assertEqual(success, power_shell_export:local_unexported(success)),
%% verify compile flags - original flags should be honoured!
NewFlags = proplists:get_value(options, power_shell_export:module_info(compile)),
?assertEqual([], CompileFlags -- NewFlags),
%% unload now
power_shell:revert(Sentinel),
?assertException(error, undef, power_shell_export:local_unexported(success)).
export_partial() ->
[{doc, "Tests selective export"}].
export_partial(Config) when is_list(Config) ->
?assertEqual(echo, power_shell_export:export_all(echo)), %% verify echo
?assertException(error, undef, power_shell_export:local_unexported(success)),
?assertException(error, undef, power_shell_export:local_never(success)),
Sentinel = power_shell:export(power_shell_export, [{local_unexported, 1}]),
?assertEqual(success, power_shell_export:local_unexported(success)),
%% local_never should not be expected, as it's not in the fun/arity list
?assertException(error, undef, power_shell_export:local_never(success)),
%% unload now
ok = power_shell:revert(Sentinel),
?assertException(error, undef, power_shell_export:local_unexported(success)).
export_auto_revert() ->
[{doc, "Tests auto-revert when sentinel process goes down"}].
export_auto_revert(Config) when is_list(Config) ->
?assertException(error, undef, power_shell_export:local_unexported(success)),
{Pid, MRef} = spawn_monitor(fun () ->
power_shell:export(power_shell_export),
?assertEqual(success, power_shell_export:local_unexported(success))
end),
receive
{'DOWN', MRef, process, Pid, normal} ->
ct:sleep(1000), %% can't think of any better way to wait for code load event to complete
?assertException(error, undef, power_shell_export:local_unexported(success))
end.
export_no_link() ->
[{doc, "Tests no auto-revert when sentinel starts unlinked"}].
export_no_link(Config) when is_list(Config) ->
?assertException(error, undef, power_shell_export:local_unexported(success)),
Self = self(),
spawn_link(fun () ->
Sentinel = power_shell:export(power_shell_export, all, #{link => false}),
Self ! {go, Sentinel}
end),
receive
{go, Sentinel} ->
?assertEqual(success, power_shell_export:local_unexported(success)),
ok = power_shell:revert(Sentinel),
?assertException(error, undef, power_shell_export:local_unexported(success))
end.
recursive_eval() ->
[{doc, "Tests on_load preserves stack"}].
recursive_eval(Config) ->
Source =
"-module(recursive). -export([foo/0, bar/0]). "
"foo() -> ok. bar() -> power_shell:eval(recursive, foo, []), true.",
PrivPath = ?config(priv_dir, Config),
true = code:add_path(PrivPath),
Filename = filename:join(PrivPath, "recursive.erl"),
ok = file:write_file(Filename, Source),
PrevDict = get(),
?assert(power_shell:eval(recursive, bar, [])),
?assertEqual(PrevDict, get()),
true = code:del_path(PrivPath),
ok.
%%--------------------------------------------------------------------
%% Exception testing helper
%%--------------------------------------------------------------------
%% Compatibility: stacktrace
strip_dbg(Trace) ->
[{M, F, A} || {M, F, A, _Dbg} <- Trace].
exception_check(Fun, FunAtomName, Args) ->
% again, next line cannot be split, otherwise line information would be broken
Expected = try erlang:apply(Fun, Args) of Val -> throw({test_broken, Val}) catch C:R:S -> {C, R, strip_dbg(S)} end, Actual = try power_shell:eval(?MODULE, FunAtomName, Args) of
Val1 ->
throw({test_broken, Val1})
catch
Class:Reason:Stack ->
{Class, Reason, strip_dbg(Stack)}
end,
% allow line numbers and file names to slip through
?assertEqual(Expected, Actual).
| null | https://raw.githubusercontent.com/WhatsApp/power_shell/12b25ed66963a930a25ff341a26cd8cbba9bff56/test/power_shell_SUITE.erl | erlang | -------------------------------------------------------------------
-------------------------------------------------------------------
--------------------------------------------------------------------
IMPORTANT: DO NOT USE EXPORT_ALL!
This test relies on functions _not_ being exported from the module.
It is the whole point of test.
Common Test headers
Include stdlib header to enable ?assert() for readable output
--------------------------------------------------------------------
Function: suite() -> Info
Info = [tuple()]
--------------------------------------------------------------------
--------------------------------------------------------------------
Function: groups() -> [Group]
GroupName = atom()
RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail |
repeat_until_any_ok | repeat_until_any_fail
N = integer() | forever
--------------------------------------------------------------------
--------------------------------------------------------------------
Function: all() -> GroupsAndTestCases | {skip,Reason}
GroupName = atom()
Reason = term()
--------------------------------------------------------------------
--------------------------------------------------------------------
GroupName = atom()
Config0 = Config1 = [tuple()]
Reason = term()
--------------------------------------------------------------------
--------------------------------------------------------------------
GroupName = atom()
Config0 = Config1 = [tuple()]
--------------------------------------------------------------------
--------------------------------------------------------------------
IMPORTANT: THESE MUST NOT BE EXPORTED !!!
to avoid tail recursion
to avoid tail recursion
to make it non-tail-recursive and save call stack:
lists:filter(fun (K) -> K + 1, F = fun remote_cb/1, F(K) end, List).
Kitchen sink to silence compiler in a good way (without suppressing warnings)
this generates: {'EXIT',{{badfun,[#Fun<power_shell_SUITE.21.126501267>,[]]},
--------------------------------------------------------------------
Test Cases
find some compiler options to keep
verify compile flags - original flags should be honoured!
unload now
verify echo
local_never should not be expected, as it's not in the fun/arity list
unload now
can't think of any better way to wait for code load event to complete
--------------------------------------------------------------------
Exception testing helper
--------------------------------------------------------------------
Compatibility: stacktrace
again, next line cannot be split, otherwise line information would be broken
allow line numbers and file names to slip through | @author < >
( c ) WhatsApp Inc. and its affiliates . All rights reserved .
@private
-module(power_shell_SUITE).
-author("").
-export([all/0,
groups/0,
init_per_group/2,
end_per_group/2,
suite/0]).
-export([echo/0, echo/1,
self_echo/0, self_echo/1,
calling_local/0, calling_local/1,
second_clause/0, second_clause/1,
undef/0, undef/1,
undef_local/0, undef_local/1,
recursive/0, recursive/1,
undef_nested/0, undef_nested/1,
bad_match/0, bad_match/1,
function_clause/0, function_clause/1,
preloaded/0, preloaded/1,
throwing/0, throwing/1,
callback_local/0, callback_local/1,
callback_local_fun_obj/0, callback_local_fun_obj/1,
remote_callback/0, remote_callback/1,
callback_local_make_fun/0, callback_local_make_fun/1,
remote_callback_exported/0, remote_callback_exported/1,
record/0, record/1,
try_side_effect/0, try_side_effect/1,
rebind_var/0, rebind_var/1,
external_fun/0, external_fun/1,
catch_apply/0, catch_apply/1,
export/0, export/1,
export_partial/0, export_partial/1,
export_auto_revert/0, export_auto_revert/1,
export_no_link/0, export_no_link/1,
recursive_eval/0, recursive_eval/1
]).
-export([export_all/0, remote_cb_exported/1]).
-include_lib("common_test/include/ct.hrl").
-include_lib("stdlib/include/assert.hrl").
suite() ->
[{timetrap, {seconds, 30}}].
test_cases() ->
[echo, self_echo, preloaded, second_clause, undef, undef_local, undef_nested, recursive,
calling_local, throwing, bad_match, function_clause, remote_callback,
callback_local, callback_local_fun_obj, callback_local_make_fun, recursive_eval,
remote_callback_exported, record, try_side_effect, rebind_var, external_fun, catch_apply].
Group = { GroupName , Properties , }
Properties = [ parallel | sequence | Shuffle | } ]
= [ Group | { group , GroupName } | TestCase ]
TestCase = atom ( )
Shuffle = shuffle | { shuffle,{integer(),integer(),integer ( ) } }
groups() ->
[
{direct, [parallel], test_cases()},
{cached, [parallel], test_cases()},
{export, [], [export, export_partial, export_auto_revert, export_no_link]}
].
= [ { group , GroupName } | TestCase ]
TestCase = atom ( )
all() ->
[{group, direct}, {group, cached}, {group, export}].
Function : init_per_group(GroupName , Config0 ) - >
Config1 | { skip , Reason } | { skip_and_save , Reason , Config1 }
init_per_group(cached, Config) ->
ensure power_shell cache started
Loaded = application:load(power_shell),
?assert(Loaded =:= ok orelse Loaded =:= {error,{already_loaded,power_shell}}),
ok = application:set_env(power_shell, cache_code, true),
ok = application:start(power_shell),
Config;
init_per_group(_GroupName, Config) ->
Config.
Function : end_per_group(GroupName , ) - >
term ( ) | { save_config , Config1 }
end_per_group(cached, _Config) ->
ok = application:unset_env(power_shell, cache_code),
ok = application:stop(power_shell),
ok;
end_per_group(_GroupName, _Config) ->
ok.
local_unexported(Echo) ->
Echo.
local_self() ->
erlang:self().
local_unexported_recursive(N, Acc) when N =< 0 ->
Acc;
local_unexported_recursive(N, Acc) ->
local_unexported_recursive(N - 1, [N | Acc]).
local_unexported_nested(N) ->
local_unexported_recursive(N, []).
local_undef_nested(Atom) ->
local_undef_nested_impl(Atom),
local_undef_nested_impl(Atom) ->
not_a_module:not_a_function(1, Atom, 3),
local_second_clause(Arg, Selector) when Selector =:= one ->
Arg + 1;
local_second_clause(Arg, Selector) when Selector =:= two ->
Arg + 2;
local_second_clause(Arg, Selector) when Selector =:= three ->
Arg + 3;
local_second_clause(Arg, Selector) when is_atom(Selector) ->
Arg + 10.
local_throw(What) ->
rand:uniform(100) < 200 andalso throw(What).
local_throwing() ->
local_throw(ball),
ok.
local_bad_match() ->
local_do_bad_match(one),
true = rand:uniform(100).
local_do_bad_match(What) ->
true = What =:= rand:uniform(100),
ok.
local_function_clause() ->
local_second_clause(0, get(test_server_logopts)),
true = rand:uniform(100).
local_cb_fun(Arg) ->
{cb, Arg}.
local_cb_caller(Fun, Args) ->
Fun(Args).
local_cb_init() ->
local_cb_caller(fun local_cb_fun/1, [1]).
local_cb_fun_obj() ->
local_cb_caller(fun ([N]) -> N * 2 end, [2]).
local_cb_make_fun() ->
F = erlang:make_fun(?MODULE, module_info, 1),
local_cb_caller(F, md5).
remote_cb(N) when N <0 ->
N rem 3 =:= 0;
remote_cb(N) ->
N rem 2 =:= 0.
remote_cb_init(List) ->
Cb = fun remote_cb/1,
case List of
[] ->
ok;
[E] ->
Cb(E);
[_ | _] ->
lists:filter(fun remote_cb/1, List)
end,
lists:filter(Cb, List).
remote_cb_exported(N) ->
N rem 3 =:= 0.
remote_cb_exported_init(List) ->
lists:filter(fun ?MODULE:remote_cb_exported/1, List).
export_all() ->
local_undef_nested(atom),
local_cb_fun(1),
local_throwing(),
local_bad_match(),
rebind([]),
external_filter([]),
throw_applied(),
try_side({1, 1}).
-record(rec, {first= "1", second, third = initial}).
create_record() ->
#rec{third = application:get_env(kernel, missing, 3), first = "1"}.
modify_record(#rec{} = Rec) ->
Rec#rec{third = 10, second = "2"}.
try_side(MaybeBinaryInteger) ->
try
_ = binary_to_integer(MaybeBinaryInteger),
true
catch
error:badarg ->
false
end.
internal_lambda(Atom) ->
RebindVar = atom_to_list(Atom),
is_list(RebindVar).
rebind(Original) ->
RebindVar = Original,
lists:filtermap(fun internal_lambda/1, RebindVar).
external_filter(List) ->
lists:filter(fun erlang:is_number/1, List).
throw_applied() ->
case catch apply(erlang, apply, Args) of
{'EXIT', _Reason} ->
throw(expected)
end.
echo() ->
[{doc}, "Evaluate non-exported function"].
echo(_Config) ->
?assertEqual(local_unexported(echo), power_shell:eval(?MODULE, local_unexported, [echo])),
ok.
self_echo() ->
[{doc}, "Evaluates function returning self() - to see if NIFs are working"].
self_echo(_Config) ->
?assertEqual(local_self(), power_shell:eval(?MODULE, local_self, [])),
ok.
undef() ->
[{doc, "Ensure undefined function throws undef"}].
undef(_Config) ->
next 2 statements must be on the same line , otherwise stack trace info is broken
{current_stacktrace, Trace} = process_info(self(), current_stacktrace), Actual = (catch power_shell:eval(not_a_module, not_a_function, [1, 2, 3])),
Expected = {'EXIT', {undef, [{not_a_module, not_a_function,[1,2,3], []}] ++ Trace}},
?assertEqual(Expected, Actual),
ok.
undef_local() ->
[{doc, "Ensure undefined function in this very module throws undef"}].
undef_local(_Config) ->
next 2 statments must be on the same line , otherwise stack trace info is broken
{current_stacktrace, Trace} = process_info(self(), current_stacktrace), Actual = (catch power_shell:eval(?MODULE, not_a_function, [1, 2, 3])),
Expected = {'EXIT', {undef, [{?MODULE,not_a_function,[1,2,3], []}] ++ Trace}},
?assertEqual(Expected, Actual),
ok.
undef_nested() ->
[{doc, "Ensure undefined function throws undef even when it's nested"}].
undef_nested(_Config) ->
exception_check(fun local_undef_nested/1, local_undef_nested, [atom]).
preloaded() ->
[{doc, "Ensure that functions from preloaded modules are just applied"}].
preloaded(_Config) ->
?assertEqual([2, 1], power_shell:eval(prim_zip, reverse, [[1, 2]])),
?assertEqual(self(), power_shell:eval(erlang, self, [])).
throwing() ->
[{doc, "Unexported function throwing"}].
throwing(_Config) ->
exception_check(fun local_throwing/0, local_throwing, []).
bad_match() ->
[{doc, "Unexported function throwing badmatch"}].
bad_match(_Config) ->
exception_check(fun local_bad_match/0, local_bad_match, []).
function_clause() ->
[{doc, "Unexported function throwing function_clause"}].
function_clause(_Config) ->
exception_check(fun local_function_clause/0, local_function_clause, []).
recursive() ->
[{doc, "Evaluate recursive function"}].
recursive(_Config) ->
?assertEqual(local_unexported_recursive(1, []), power_shell:eval(?MODULE, local_unexported_recursive, [1, []])),
ok.
calling_local() ->
[{doc, "Evaluate non-exported function calling another non-exported function"}].
calling_local(_Config) ->
?assertEqual(local_unexported_nested(0), power_shell:eval(?MODULE, local_unexported_nested, [0])),
ok.
second_clause() ->
[{doc, "Evaluate non-exported function with multiple clauses, ensure right one is selected"}].
second_clause(_Config) ->
?assertEqual(local_second_clause(10, two), power_shell:eval(?MODULE, local_second_clause, [10, two])),
ok.
callback_local() ->
[{doc, "Non-exported function calls some function that calls a callback fun"}].
callback_local(_Config) ->
?assertEqual(local_cb_init(), power_shell:eval(?MODULE, local_cb_init, [])),
ok.
callback_local_fun_obj() ->
[{doc, "Non-exported function calls some function that calls a callback function object"}].
callback_local_fun_obj(_Config) ->
?assertEqual(local_cb_fun_obj(), power_shell:eval(?MODULE, local_cb_fun_obj, [])),
ok.
callback_local_make_fun() ->
[{doc, "Non-exported function and make_fun/3 fun"}].
callback_local_make_fun(_Config) ->
?assertEqual(local_cb_make_fun(), power_shell:eval(?MODULE, local_cb_make_fun, [])),
ok.
remote_callback_exported() ->
[{doc, "Non-exported function calls remote function that calls exported callback"}].
remote_callback_exported(_Config) ->
L = lists:seq(1, 20),
?assertEqual(remote_cb_exported_init(L), power_shell:eval(?MODULE, remote_cb_exported_init, [L])),
ok.
remote_callback() ->
[{doc, "Non-exported function calls some function that calls a callback fun"}].
remote_callback(_Config) ->
L = lists:seq(1, 20),
?assertEqual(remote_cb_init(L), power_shell:eval(?MODULE, remote_cb_init, [L])),
ok.
record() ->
[{doc, "Tests records - creation & modification"}].
record(_Config) ->
Rec = create_record(),
?assertEqual(Rec, power_shell:eval(?MODULE, create_record, [])),
?assertEqual(modify_record(Rec), power_shell:eval(?MODULE, modify_record, [Rec])).
try_side_effect() ->
[{doc, "Tests try ... catch returning value from both flows"}].
try_side_effect(_Config) ->
?assertEqual(false, power_shell:eval(?MODULE, try_side, [atom])).
rebind_var() ->
[{doc, "Tests that variable is unbound when returned from function"}].
rebind_var(Config) when is_list(Config) ->
?assertEqual([atom, atom], power_shell:eval(?MODULE, rebind, [[atom, atom]])).
external_fun() ->
[{doc, "Tests external function passed to lists:filter"}].
external_fun(Config) when is_list(Config) ->
?assertEqual([1, 2], power_shell:eval(?MODULE, external_filter, [[1, atom, 2, atom]])).
catch_apply() ->
[{doc, "Tests that cast catch erlang:apply works and throws as expected, not converting it to badarg"}].
catch_apply(Config) when is_list(Config) ->
?assertThrow(expected, power_shell:eval(?MODULE, throw_applied, [])).
export() ->
[{doc, "Tests export_all (hot code load and revert)"}].
export(Config) when is_list(Config) ->
?assertException(error, undef, power_shell_export:local_unexported(success)),
CompileFlags = proplists:get_value(options, power_shell_export:module_info(compile)),
Sentinel = power_shell:export(power_shell_export),
?assertEqual(success, power_shell_export:local_unexported(success)),
NewFlags = proplists:get_value(options, power_shell_export:module_info(compile)),
?assertEqual([], CompileFlags -- NewFlags),
power_shell:revert(Sentinel),
?assertException(error, undef, power_shell_export:local_unexported(success)).
export_partial() ->
[{doc, "Tests selective export"}].
export_partial(Config) when is_list(Config) ->
?assertException(error, undef, power_shell_export:local_unexported(success)),
?assertException(error, undef, power_shell_export:local_never(success)),
Sentinel = power_shell:export(power_shell_export, [{local_unexported, 1}]),
?assertEqual(success, power_shell_export:local_unexported(success)),
?assertException(error, undef, power_shell_export:local_never(success)),
ok = power_shell:revert(Sentinel),
?assertException(error, undef, power_shell_export:local_unexported(success)).
export_auto_revert() ->
[{doc, "Tests auto-revert when sentinel process goes down"}].
export_auto_revert(Config) when is_list(Config) ->
?assertException(error, undef, power_shell_export:local_unexported(success)),
{Pid, MRef} = spawn_monitor(fun () ->
power_shell:export(power_shell_export),
?assertEqual(success, power_shell_export:local_unexported(success))
end),
receive
{'DOWN', MRef, process, Pid, normal} ->
?assertException(error, undef, power_shell_export:local_unexported(success))
end.
export_no_link() ->
[{doc, "Tests no auto-revert when sentinel starts unlinked"}].
export_no_link(Config) when is_list(Config) ->
?assertException(error, undef, power_shell_export:local_unexported(success)),
Self = self(),
spawn_link(fun () ->
Sentinel = power_shell:export(power_shell_export, all, #{link => false}),
Self ! {go, Sentinel}
end),
receive
{go, Sentinel} ->
?assertEqual(success, power_shell_export:local_unexported(success)),
ok = power_shell:revert(Sentinel),
?assertException(error, undef, power_shell_export:local_unexported(success))
end.
recursive_eval() ->
[{doc, "Tests on_load preserves stack"}].
recursive_eval(Config) ->
Source =
"-module(recursive). -export([foo/0, bar/0]). "
"foo() -> ok. bar() -> power_shell:eval(recursive, foo, []), true.",
PrivPath = ?config(priv_dir, Config),
true = code:add_path(PrivPath),
Filename = filename:join(PrivPath, "recursive.erl"),
ok = file:write_file(Filename, Source),
PrevDict = get(),
?assert(power_shell:eval(recursive, bar, [])),
?assertEqual(PrevDict, get()),
true = code:del_path(PrivPath),
ok.
strip_dbg(Trace) ->
[{M, F, A} || {M, F, A, _Dbg} <- Trace].
exception_check(Fun, FunAtomName, Args) ->
Expected = try erlang:apply(Fun, Args) of Val -> throw({test_broken, Val}) catch C:R:S -> {C, R, strip_dbg(S)} end, Actual = try power_shell:eval(?MODULE, FunAtomName, Args) of
Val1 ->
throw({test_broken, Val1})
catch
Class:Reason:Stack ->
{Class, Reason, strip_dbg(Stack)}
end,
?assertEqual(Expected, Actual).
|
9b861c9d26e79773c33e89f56f4d336f7dfde03df48fe64cb6f00974d3f903c7 | acieroid/scala-am | f-10.scm | (letrec ((f (lambda (x) (+ (* x x) (* x x)))) (g (lambda (x) (+ (* x x) (* x x))))) (let ((_tmp1 (f 1))) (let ((_tmp2 (f 2))) (let ((_tmp3 (f 3))) (let ((_tmp4 (f 4))) (let ((_tmp5 (f 5))) (let ((_tmp6 (f 6))) (let ((_tmp7 (f 7))) (let ((_tmp8 (f 8))) (let ((_tmp9 (f 9))) (g 10))))))))))) | null | https://raw.githubusercontent.com/acieroid/scala-am/13ef3befbfc664b77f31f56847c30d60f4ee7dfe/test/changesBenevolPaper/f1-tests/f-10.scm | scheme | (letrec ((f (lambda (x) (+ (* x x) (* x x)))) (g (lambda (x) (+ (* x x) (* x x))))) (let ((_tmp1 (f 1))) (let ((_tmp2 (f 2))) (let ((_tmp3 (f 3))) (let ((_tmp4 (f 4))) (let ((_tmp5 (f 5))) (let ((_tmp6 (f 6))) (let ((_tmp7 (f 7))) (let ((_tmp8 (f 8))) (let ((_tmp9 (f 9))) (g 10))))))))))) | |
5694a97f6bd6885bedf0ebb8aaf68725f7ed884761b451fc31061803d89bcc71 | aristidb/aws | ListIdentities.hs | module Aws.Ses.Commands.ListIdentities
( ListIdentities(..)
, ListIdentitiesResponse(..)
, IdentityType(..)
) where
import Data.Text (Text)
import qualified Data.ByteString.Char8 as BS
import Data.Maybe (catMaybes)
import Control.Applicative
import Data.Text.Encoding as T (encodeUtf8)
import Data.Typeable
import Text.XML.Cursor (($//), (&/), laxElement)
import Prelude
import Aws.Core
import Aws.Ses.Core
-- | List email addresses and/or domains
data ListIdentities =
ListIdentities
{ liIdentityType :: Maybe IdentityType
valid range is 1 .. 100
, liNextToken :: Maybe Text
}
deriving (Eq, Ord, Show, Typeable)
data IdentityType = EmailAddress | Domain
deriving (Eq, Ord, Show, Typeable)
-- | ServiceConfiguration: 'SesConfiguration'
instance SignQuery ListIdentities where
type ServiceConfiguration ListIdentities = SesConfiguration
signQuery ListIdentities {..} =
let it = case liIdentityType of
Just EmailAddress -> Just "EmailAddress"
Just Domain -> Just "Domain"
Nothing -> Nothing
in sesSignQuery $ ("Action", "ListIdentities")
: catMaybes
[ ("IdentityType",) <$> it
, ("MaxItems",) . BS.pack . show <$> liMaxItems
, ("NextToken",) . T.encodeUtf8 <$> liNextToken
]
| The response sent back by Amazon SES after a
' ListIdentities ' command .
data ListIdentitiesResponse = ListIdentitiesResponse [Text]
deriving (Eq, Ord, Show, Typeable)
instance ResponseConsumer ListIdentities ListIdentitiesResponse where
type ResponseMetadata ListIdentitiesResponse = SesMetadata
responseConsumer _ _ =
sesResponseConsumer $ \cursor -> do
let ids = cursor $// laxElement "Identities" &/ elContent "member"
return $ ListIdentitiesResponse ids
instance Transaction ListIdentities ListIdentitiesResponse where
instance AsMemoryResponse ListIdentitiesResponse where
type MemoryResponse ListIdentitiesResponse = ListIdentitiesResponse
loadToMemory = return
| null | https://raw.githubusercontent.com/aristidb/aws/9bdc4ee018d0d9047c0434eeb21e2383afaa9ccf/Aws/Ses/Commands/ListIdentities.hs | haskell | | List email addresses and/or domains
| ServiceConfiguration: 'SesConfiguration' | module Aws.Ses.Commands.ListIdentities
( ListIdentities(..)
, ListIdentitiesResponse(..)
, IdentityType(..)
) where
import Data.Text (Text)
import qualified Data.ByteString.Char8 as BS
import Data.Maybe (catMaybes)
import Control.Applicative
import Data.Text.Encoding as T (encodeUtf8)
import Data.Typeable
import Text.XML.Cursor (($//), (&/), laxElement)
import Prelude
import Aws.Core
import Aws.Ses.Core
data ListIdentities =
ListIdentities
{ liIdentityType :: Maybe IdentityType
valid range is 1 .. 100
, liNextToken :: Maybe Text
}
deriving (Eq, Ord, Show, Typeable)
data IdentityType = EmailAddress | Domain
deriving (Eq, Ord, Show, Typeable)
instance SignQuery ListIdentities where
type ServiceConfiguration ListIdentities = SesConfiguration
signQuery ListIdentities {..} =
let it = case liIdentityType of
Just EmailAddress -> Just "EmailAddress"
Just Domain -> Just "Domain"
Nothing -> Nothing
in sesSignQuery $ ("Action", "ListIdentities")
: catMaybes
[ ("IdentityType",) <$> it
, ("MaxItems",) . BS.pack . show <$> liMaxItems
, ("NextToken",) . T.encodeUtf8 <$> liNextToken
]
| The response sent back by Amazon SES after a
' ListIdentities ' command .
data ListIdentitiesResponse = ListIdentitiesResponse [Text]
deriving (Eq, Ord, Show, Typeable)
instance ResponseConsumer ListIdentities ListIdentitiesResponse where
type ResponseMetadata ListIdentitiesResponse = SesMetadata
responseConsumer _ _ =
sesResponseConsumer $ \cursor -> do
let ids = cursor $// laxElement "Identities" &/ elContent "member"
return $ ListIdentitiesResponse ids
instance Transaction ListIdentities ListIdentitiesResponse where
instance AsMemoryResponse ListIdentitiesResponse where
type MemoryResponse ListIdentitiesResponse = ListIdentitiesResponse
loadToMemory = return
|
a7a0144f3169f9bb03c3d26ac2defa34d2689ceedea07e60d3dd144640ee74b2 | typedclojure/typedclojure | fn_method_one.clj | Copyright ( c ) , contributors .
;; The use and distribution terms for this software are covered by the
;; Eclipse Public License 1.0 (-1.0.php)
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns typed.cljc.checker.check.fn-method-one
(:require [clojure.core.typed.ast-utils :as ast-u]
[clojure.core.typed.contract-utils :as con]
[clojure.core.typed.current-impl :as impl]
[clojure.core.typed.errors :as err]
[clojure.core.typed.util-vars :as vs]
[typed.cljc.analyzer :as ana2]
[typed.cljc.checker.check-below :as below]
[typed.clj.analyzer.passes.beta-reduce :as beta-reduce]
[typed.clj.checker.analyze-clj :as ana-clj]
[typed.clj.checker.parse-unparse :as prs]
[typed.clj.checker.subtype :as sub]
[typed.cljc.checker.check :refer [check-expr]]
[typed.cljc.analyzer :as ana]
[typed.cljc.checker.check.fn-method-utils :as fn-method-u]
[typed.cljc.checker.check.funapp :as funapp]
[typed.cljc.checker.check.isa :as isa]
[typed.cljc.checker.check.multi-utils :as multi-u]
[typed.cljc.checker.check.recur-utils :as recur-u]
[typed.cljc.checker.check.utils :as cu]
[typed.cljc.checker.filter-ops :as fo]
[typed.cljc.checker.filter-rep :as fl]
[typed.cljc.checker.lex-env :as lex]
[typed.cljc.checker.object-rep :as obj]
[typed.cljc.checker.open-result :as open-result]
[typed.cljc.checker.type-rep :as r]
[typed.cljc.checker.update :as update]
[typed.cljc.checker.utils :as u]
[typed.cljc.checker.var-env :as var-env]))
;check method is under a particular Function, and return inferred Function
;
check - fn - method1 exposes enough wiring to support the differences in deftype
; methods and normal methods via `fn`.
;
; # Differences in recur behaviour
;
deftype methods do * not * pass the first parameter ( usually ` this ` ) when calling ` recur ` .
;
; eg. (my-method [this a b c] (recur a b c))
;
The behaviour of generating a RecurTarget type for recurs is exposed via the : recur - target - fn
;
;
;[MethodExpr Function -> {:ftype Function :cmethod Expr}]
(defn check-fn-method1 [method {:keys [dom rest drest kws prest pdot] :as expected}
& {:keys [recur-target-fn] :as opt}]
{:pre [(r/Function? expected)]
:post [(r/Function? (:ftype %))
(-> % :cmethod :clojure.core.typed/ftype r/Function?)
(:cmethod %)]}
(impl/impl-case
:clojure (assert (#{:fn-method :method} (:op method))
(:op method))
; is there a better :op check here?
:cljs (assert method))
(let [check-rest-fn (or (:check-rest-fn opt) fn-method-u/check-rest-fn)
method (-> method
ana2/run-pre-passes
(ast-u/visit-method-params ana2/run-passes))
body ((ast-u/method-body-kw) method)
required-params (ast-u/method-required-params method)
rest-param (ast-u/method-rest-param method)
param-obj (comp #(obj/-path nil %)
:name)
; Difference from Typed Racket:
;
; Because types can contain abstracted names, we instantiate
; the expected type in the range before using it.
;
; eg. Checking against this function type:
; [Any Any
; -> (HVec [(U nil Class) (U nil Class)]
; :objects [{:path [Class], :id 0} {:path [Class], :id 1}])]
; means we need to instantiate the HVec type to the actual argument
; names with open-Result.
;
; If the actual function method is (fn [a b] ...) we check against:
;
; (HVec [(U nil Class) (U nil Class)]
; :objects [{:path [Class], :id a} {:path [Class], :id b}])
open-expected-rng (open-result/open-Result->TCResult
(:rng expected)
(map param-obj
(concat required-params
(some-> rest-param list))))
open-expected-filters (:fl open-expected-rng)
_ (assert (fl/FilterSet? open-expected-filters))
open-expected-rng-no-filters (assoc open-expected-rng :fl (fo/-infer-filter))
_ (assert (r/TCResult? open-expected-rng-no-filters))
;_ (prn "open-result open-expected-rng-no-filters" open-expected-rng-no-filters expected)
_ ( prn " open - result open - expected - rng filters " ( some- > > open - expected - rng - no - filters : fl ( ( : then : else ) ) ( map fl / infer - top ? ) ) )
;ensure Function fits method
_ (when-not (or ((if (or rest drest kws prest pdot) <= =) (count required-params) (count dom))
rest-param)
(err/int-error (str "Checking method with incorrect number of expected parameters"
", expected " (count dom) " required parameter(s) with"
(if rest " a " " no ") "rest parameter, found " (count required-params)
" required parameter(s) and" (if rest-param " a " " no ")
"rest parameter.")))
props (:props (lex/lexical-env))
crequired-params (map (fn [p t] (assoc p u/expr-type (r/ret t)))
required-params
(concat dom
(repeat (or rest (:pre-type drest) prest (:pre-type pdot)))))
_ (assert (every? (comp r/TCResult? u/expr-type) crequired-params))
fixed-entry (map (juxt :name (comp r/ret-t u/expr-type)) crequired-params)
;_ (prn "checking function:" (prs/unparse-type expected))
crest-param (some-> rest-param
(assoc u/expr-type (r/ret (check-rest-fn
(drop (count crequired-params) dom)
(select-keys expected [:rest :drest :kws :prest :pdot])))))
rest-entry (when crest-param
[[(:name crest-param) (r/ret-t (u/expr-type crest-param))]])
;_ (prn "rest entry" rest-entry)
_ (when (some? fixed-entry)
(assert ((con/hash-c? symbol? r/Type?)
(into {} fixed-entry))
(into {} fixed-entry)))
_ (when (some? rest-entry)
(assert ((con/hash-c? symbol? r/Type?)
(into {} rest-entry))
(into {} rest-entry)))
; if this fn method is a multimethod dispatch method, then infer
; a new filter that results from being dispatched "here"
mm-filter (when-let [{:keys [dispatch-fn-type dispatch-val-ret]} multi-u/*current-mm*]
(assert (and dispatch-fn-type dispatch-val-ret))
(assert (not (or drest rest rest-param)))
(let [disp-app-ret (funapp/check-funapp nil nil
(r/ret dispatch-fn-type)
(map r/ret dom (repeat (fo/-FS fl/-top fl/-top))
(map param-obj required-params))
nil)
;_ (prn "disp-app-ret" disp-app-ret)
;_ (prn "disp-fn-type" (prs/unparse-type dispatch-fn-type))
;_ (prn "dom" dom)
isa-ret (isa/tc-isa? disp-app-ret dispatch-val-ret nil)
then-filter (-> isa-ret r/ret-f :then)
_ (assert then-filter)]
then-filter))
;_ (prn "^^^ mm-filter" multi-u/*current-mm*)
_ ( prn " funapp1 : inferred mm - filter " mm - filter )
env (let [env (-> (lex/lexical-env)
;add mm-filter
(assoc :props (cond-> (set props) mm-filter (conj mm-filter)))
;add parameters to scope
IF UNHYGIENIC order important , ( fn [ a a & a ] ) prefers rightmost name
(update :l merge (into {} fixed-entry) (into {} rest-entry)))
flag (atom true :validator boolean?)
env (cond-> env
mm-filter (update/env+ [mm-filter] flag))]
(when-not @flag
(err/int-error "Unreachable method: Local inferred to be bottom when applying multimethod filter"))
env)
; rng with inferred filters, and before manually inferring new filters
crng-nopass
(binding [multi-u/*current-mm* nil]
(var-env/with-lexical-env env
(let [rec (or ; if there's a custom recur behaviour, use the provided
keyword argument to generate the RecurTarget .
(when recur-target-fn
(recur-target-fn expected))
; Otherwise, assume we are checking a regular `fn` method
(recur-u/RecurTarget-maker dom rest drest nil))
_ (assert (recur-u/RecurTarget? rec))]
(recur-u/with-recur-target rec
(let [body (if (and vs/*custom-expansions*
rest-param
(not-any? identity [rest drest kws prest pdot]))
;; substitute away the rest argument to try and trigger
;; any beta reductions
(with-bindings (ana-clj/thread-bindings {:env (:env method)})
(-> body
(beta-reduce/subst-locals
{(:name rest-param) (beta-reduce/fake-seq-invoke
(mapv (fn [t]
(beta-reduce/make-invoke-expr
(beta-reduce/make-var-expr
#'cu/special-typed-expression
(:env method))
[(ana/parse-quote
(binding [vs/*verbose-types* true]
(list 'quote (prs/unparse-type t)))
(:env method))]
(:env method)))
dom)
(:env method))})
ana/run-passes))
body)]
(check-expr body open-expected-rng-no-filters))))))
; Apply the filters of computed rng to the environment and express
; changes to the lexical env as new filters, and conjoin with existing filters.
then-env (let [{:keys [then]} (-> crng-nopass u/expr-type r/ret-f)]
(cond-> env
(not (fl/NoFilter? then))
(update/env+ [then] (atom true))))
new-then-props (reduce-kv (fn [fs sym t]
{:pre [((con/set-c? fl/Filter?) fs)]}
(cond-> fs
(not= t (get-in env [:l sym]))
;new type, add positive proposition
;(otherwise, type hasn't changed, no new propositions)
(conj (fo/-filter-at t (lex/lookup-alias sym :env env)))))
#{}
(:l then-env))
crng+inferred-filters (update-in crng-nopass [u/expr-type :fl :then]
(fn [f]
(apply fo/-and f new-then-props)))
;_ (prn "open-expected-filters" open-expected-filters)
crng (if (= open-expected-filters (fo/-infer-filter))
;; infer mode
(do ;(prn "infer mode" multi-u/*current-mm*)
crng+inferred-filters)
;; check actual filters and fill in expected filters
(let [;_ (prn "check mode" multi-u/*current-mm*)
{actual-filters :fl :as actual-ret} (u/expr-type crng+inferred-filters)
_ (when-not (below/filter-better? actual-filters open-expected-filters)
(below/bad-filter-delayed-error
actual-ret
(assoc open-expected-rng-no-filters :fl open-expected-filters)))]
(assoc-in crng+inferred-filters [u/expr-type :fl] open-expected-filters)))
;_ (prn "crng" (u/expr-type crng))
rest-param-name (some-> rest-param :name)
ftype (fn-method-u/FnResult->Function
(fn-method-u/FnResult-maker
fixed-entry
(when (and kws rest-param)
[rest-param-name kws])
(when (and rest rest-param)
[rest-param-name rest])
(when (and drest rest-param)
[rest-param-name drest])
(when (and prest rest-param)
[rest-param-name prest])
(when (and pdot rest-param)
[rest-param-name pdot])
(u/expr-type crng)))
_ (assert (r/Function? ftype))
cmethod (-> method
(assoc (ast-u/method-body-kw) crng
:clojure.core.typed/ftype ftype)
(ast-u/reconstruct-arglist crequired-params crest-param))
_ (assert (vector? (:params cmethod)))
_ (assert (every? (comp r/TCResult? u/expr-type) (:params cmethod)))]
{:ftype ftype
:cmethod cmethod}))
| null | https://raw.githubusercontent.com/typedclojure/typedclojure/668ee1ef3953eb37f2c748209a6ad25bbfb6165c/typed/clj.checker/src/typed/cljc/checker/check/fn_method_one.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
check method is under a particular Function, and return inferred Function
methods and normal methods via `fn`.
# Differences in recur behaviour
eg. (my-method [this a b c] (recur a b c))
[MethodExpr Function -> {:ftype Function :cmethod Expr}]
is there a better :op check here?
Difference from Typed Racket:
Because types can contain abstracted names, we instantiate
the expected type in the range before using it.
eg. Checking against this function type:
[Any Any
-> (HVec [(U nil Class) (U nil Class)]
:objects [{:path [Class], :id 0} {:path [Class], :id 1}])]
means we need to instantiate the HVec type to the actual argument
names with open-Result.
If the actual function method is (fn [a b] ...) we check against:
(HVec [(U nil Class) (U nil Class)]
:objects [{:path [Class], :id a} {:path [Class], :id b}])
_ (prn "open-result open-expected-rng-no-filters" open-expected-rng-no-filters expected)
ensure Function fits method
_ (prn "checking function:" (prs/unparse-type expected))
_ (prn "rest entry" rest-entry)
if this fn method is a multimethod dispatch method, then infer
a new filter that results from being dispatched "here"
_ (prn "disp-app-ret" disp-app-ret)
_ (prn "disp-fn-type" (prs/unparse-type dispatch-fn-type))
_ (prn "dom" dom)
_ (prn "^^^ mm-filter" multi-u/*current-mm*)
add mm-filter
add parameters to scope
rng with inferred filters, and before manually inferring new filters
if there's a custom recur behaviour, use the provided
Otherwise, assume we are checking a regular `fn` method
substitute away the rest argument to try and trigger
any beta reductions
Apply the filters of computed rng to the environment and express
changes to the lexical env as new filters, and conjoin with existing filters.
new type, add positive proposition
(otherwise, type hasn't changed, no new propositions)
_ (prn "open-expected-filters" open-expected-filters)
infer mode
(prn "infer mode" multi-u/*current-mm*)
check actual filters and fill in expected filters
_ (prn "check mode" multi-u/*current-mm*)
_ (prn "crng" (u/expr-type crng)) | Copyright ( c ) , contributors .
(ns typed.cljc.checker.check.fn-method-one
(:require [clojure.core.typed.ast-utils :as ast-u]
[clojure.core.typed.contract-utils :as con]
[clojure.core.typed.current-impl :as impl]
[clojure.core.typed.errors :as err]
[clojure.core.typed.util-vars :as vs]
[typed.cljc.analyzer :as ana2]
[typed.cljc.checker.check-below :as below]
[typed.clj.analyzer.passes.beta-reduce :as beta-reduce]
[typed.clj.checker.analyze-clj :as ana-clj]
[typed.clj.checker.parse-unparse :as prs]
[typed.clj.checker.subtype :as sub]
[typed.cljc.checker.check :refer [check-expr]]
[typed.cljc.analyzer :as ana]
[typed.cljc.checker.check.fn-method-utils :as fn-method-u]
[typed.cljc.checker.check.funapp :as funapp]
[typed.cljc.checker.check.isa :as isa]
[typed.cljc.checker.check.multi-utils :as multi-u]
[typed.cljc.checker.check.recur-utils :as recur-u]
[typed.cljc.checker.check.utils :as cu]
[typed.cljc.checker.filter-ops :as fo]
[typed.cljc.checker.filter-rep :as fl]
[typed.cljc.checker.lex-env :as lex]
[typed.cljc.checker.object-rep :as obj]
[typed.cljc.checker.open-result :as open-result]
[typed.cljc.checker.type-rep :as r]
[typed.cljc.checker.update :as update]
[typed.cljc.checker.utils :as u]
[typed.cljc.checker.var-env :as var-env]))
check - fn - method1 exposes enough wiring to support the differences in deftype
deftype methods do * not * pass the first parameter ( usually ` this ` ) when calling ` recur ` .
The behaviour of generating a RecurTarget type for recurs is exposed via the : recur - target - fn
(defn check-fn-method1 [method {:keys [dom rest drest kws prest pdot] :as expected}
& {:keys [recur-target-fn] :as opt}]
{:pre [(r/Function? expected)]
:post [(r/Function? (:ftype %))
(-> % :cmethod :clojure.core.typed/ftype r/Function?)
(:cmethod %)]}
(impl/impl-case
:clojure (assert (#{:fn-method :method} (:op method))
(:op method))
:cljs (assert method))
(let [check-rest-fn (or (:check-rest-fn opt) fn-method-u/check-rest-fn)
method (-> method
ana2/run-pre-passes
(ast-u/visit-method-params ana2/run-passes))
body ((ast-u/method-body-kw) method)
required-params (ast-u/method-required-params method)
rest-param (ast-u/method-rest-param method)
param-obj (comp #(obj/-path nil %)
:name)
open-expected-rng (open-result/open-Result->TCResult
(:rng expected)
(map param-obj
(concat required-params
(some-> rest-param list))))
open-expected-filters (:fl open-expected-rng)
_ (assert (fl/FilterSet? open-expected-filters))
open-expected-rng-no-filters (assoc open-expected-rng :fl (fo/-infer-filter))
_ (assert (r/TCResult? open-expected-rng-no-filters))
_ ( prn " open - result open - expected - rng filters " ( some- > > open - expected - rng - no - filters : fl ( ( : then : else ) ) ( map fl / infer - top ? ) ) )
_ (when-not (or ((if (or rest drest kws prest pdot) <= =) (count required-params) (count dom))
rest-param)
(err/int-error (str "Checking method with incorrect number of expected parameters"
", expected " (count dom) " required parameter(s) with"
(if rest " a " " no ") "rest parameter, found " (count required-params)
" required parameter(s) and" (if rest-param " a " " no ")
"rest parameter.")))
props (:props (lex/lexical-env))
crequired-params (map (fn [p t] (assoc p u/expr-type (r/ret t)))
required-params
(concat dom
(repeat (or rest (:pre-type drest) prest (:pre-type pdot)))))
_ (assert (every? (comp r/TCResult? u/expr-type) crequired-params))
fixed-entry (map (juxt :name (comp r/ret-t u/expr-type)) crequired-params)
crest-param (some-> rest-param
(assoc u/expr-type (r/ret (check-rest-fn
(drop (count crequired-params) dom)
(select-keys expected [:rest :drest :kws :prest :pdot])))))
rest-entry (when crest-param
[[(:name crest-param) (r/ret-t (u/expr-type crest-param))]])
_ (when (some? fixed-entry)
(assert ((con/hash-c? symbol? r/Type?)
(into {} fixed-entry))
(into {} fixed-entry)))
_ (when (some? rest-entry)
(assert ((con/hash-c? symbol? r/Type?)
(into {} rest-entry))
(into {} rest-entry)))
mm-filter (when-let [{:keys [dispatch-fn-type dispatch-val-ret]} multi-u/*current-mm*]
(assert (and dispatch-fn-type dispatch-val-ret))
(assert (not (or drest rest rest-param)))
(let [disp-app-ret (funapp/check-funapp nil nil
(r/ret dispatch-fn-type)
(map r/ret dom (repeat (fo/-FS fl/-top fl/-top))
(map param-obj required-params))
nil)
isa-ret (isa/tc-isa? disp-app-ret dispatch-val-ret nil)
then-filter (-> isa-ret r/ret-f :then)
_ (assert then-filter)]
then-filter))
_ ( prn " funapp1 : inferred mm - filter " mm - filter )
env (let [env (-> (lex/lexical-env)
(assoc :props (cond-> (set props) mm-filter (conj mm-filter)))
IF UNHYGIENIC order important , ( fn [ a a & a ] ) prefers rightmost name
(update :l merge (into {} fixed-entry) (into {} rest-entry)))
flag (atom true :validator boolean?)
env (cond-> env
mm-filter (update/env+ [mm-filter] flag))]
(when-not @flag
(err/int-error "Unreachable method: Local inferred to be bottom when applying multimethod filter"))
env)
crng-nopass
(binding [multi-u/*current-mm* nil]
(var-env/with-lexical-env env
keyword argument to generate the RecurTarget .
(when recur-target-fn
(recur-target-fn expected))
(recur-u/RecurTarget-maker dom rest drest nil))
_ (assert (recur-u/RecurTarget? rec))]
(recur-u/with-recur-target rec
(let [body (if (and vs/*custom-expansions*
rest-param
(not-any? identity [rest drest kws prest pdot]))
(with-bindings (ana-clj/thread-bindings {:env (:env method)})
(-> body
(beta-reduce/subst-locals
{(:name rest-param) (beta-reduce/fake-seq-invoke
(mapv (fn [t]
(beta-reduce/make-invoke-expr
(beta-reduce/make-var-expr
#'cu/special-typed-expression
(:env method))
[(ana/parse-quote
(binding [vs/*verbose-types* true]
(list 'quote (prs/unparse-type t)))
(:env method))]
(:env method)))
dom)
(:env method))})
ana/run-passes))
body)]
(check-expr body open-expected-rng-no-filters))))))
then-env (let [{:keys [then]} (-> crng-nopass u/expr-type r/ret-f)]
(cond-> env
(not (fl/NoFilter? then))
(update/env+ [then] (atom true))))
new-then-props (reduce-kv (fn [fs sym t]
{:pre [((con/set-c? fl/Filter?) fs)]}
(cond-> fs
(not= t (get-in env [:l sym]))
(conj (fo/-filter-at t (lex/lookup-alias sym :env env)))))
#{}
(:l then-env))
crng+inferred-filters (update-in crng-nopass [u/expr-type :fl :then]
(fn [f]
(apply fo/-and f new-then-props)))
crng (if (= open-expected-filters (fo/-infer-filter))
crng+inferred-filters)
{actual-filters :fl :as actual-ret} (u/expr-type crng+inferred-filters)
_ (when-not (below/filter-better? actual-filters open-expected-filters)
(below/bad-filter-delayed-error
actual-ret
(assoc open-expected-rng-no-filters :fl open-expected-filters)))]
(assoc-in crng+inferred-filters [u/expr-type :fl] open-expected-filters)))
rest-param-name (some-> rest-param :name)
ftype (fn-method-u/FnResult->Function
(fn-method-u/FnResult-maker
fixed-entry
(when (and kws rest-param)
[rest-param-name kws])
(when (and rest rest-param)
[rest-param-name rest])
(when (and drest rest-param)
[rest-param-name drest])
(when (and prest rest-param)
[rest-param-name prest])
(when (and pdot rest-param)
[rest-param-name pdot])
(u/expr-type crng)))
_ (assert (r/Function? ftype))
cmethod (-> method
(assoc (ast-u/method-body-kw) crng
:clojure.core.typed/ftype ftype)
(ast-u/reconstruct-arglist crequired-params crest-param))
_ (assert (vector? (:params cmethod)))
_ (assert (every? (comp r/TCResult? u/expr-type) (:params cmethod)))]
{:ftype ftype
:cmethod cmethod}))
|
b57953ef1060f7e106e37a9961cb14234052d9b16aafa5db415d165fe2f08f81 | profmaad/bitcaml | bitcoin_blockchain_db.ml | open! Core.Std
open Bitcoin_protocol;;
module Sqlexpr = Sqlexpr_sqlite.Make(Sqlexpr_concurrency.Id);;
module S = Sqlexpr;;
type t = S.db;;
let difficulty_1_target = {
bits_base = 0x00ffff;
bits_exponent = 0x1d;
}
let float_log_difficulty_1_base = log (float_of_int difficulty_1_target.bits_base);;
let float_log_difficulty_scaland = log 256.0;;
let log_difficulty_of_difficulty_bits bits =
let float_log_base = log (float_of_int bits.bits_base) in
let float_exponent_difference = float_of_int (difficulty_1_target.bits_exponent - bits.bits_exponent) in
float_log_difficulty_1_base -. float_log_base +. float_log_difficulty_scaland *. float_exponent_difference
;;
let difficulty_of_difficulty_bits bits = exp (log_difficulty_of_difficulty_bits bits);;
let init_db db =
S.execute db
[%sqlinit "CREATE TABLE IF NOT EXISTS blockchain(
id INTEGER PRIMARY KEY,
hash TEXT COLLATE BINARY NOT NULL,
height INTEGER NOT NULL,
cumulative_log_difficulty REAL NOT NULL,
previous_block INTEGER NOT NULL,
is_main BOOLEAN NOT NULL,
block_version INTEGER NOT NULL,
merkle_root TEXT COLLATE BINARY NOT NULL,
timestamp INTEGER NOT NULL,
difficulty_bits INTEGER NOT NULL,
nonce INTEGER NOT NULL
);"];
S.execute db
[%sqlinit "CREATE TABLE IF NOT EXISTS orphans(
id INTEGER PRIMARY KEY,
hash TEXT COLLATE BINARY NOT NULL,
previous_block_hash TEXT COLLATE BINARY NOT NULL,
log_difficulty REAL NOT NULL,
block_version INTEGER NOT NULL,
merkle_root TEXT COLLATE BINARY NOT NULL,
timestamp INTEGER NOT NULL,
difficulty_bits INTEGER NOT NULL,
nonce INTEGER NOT NULL
);"];
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS hash_index ON blockchain (hash);"];
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS mainchain_hash_index ON blockchain (hash, is_main);"];
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS previous_block_index ON blockchain (previous_block);"];
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS orphans_hash_index ON orphans (hash);"];
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS orphans_previous_block_hash_index ON orphans (previous_block_hash);"];
S.execute db
[%sqlinit "CREATE TABLE IF NOT EXISTS memory_pool(
id INTEGER PRIMARY KEY,
hash TEXT COLLATE BINARY NOT NULL,
output_count INTEGER NOT NULL,
is_orphan BOOLEAN NOT NULL,
data BLOB NOT NULL
);"];
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS memory_pool_hash_index ON memory_pool (hash);"];
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS memory_pool_orphan_index ON memory_pool (is_orphan);"];
S.execute db
[%sqlinit "CREATE TABLE IF NOT EXISTS transactions(
id INTEGER PRIMARY KEY,
hash TEXT COLLATE BINARY NOT NULL,
block INTEGER NOT NULL,
tx_index INTEGER NOT NULL
);"];
S.execute db
[%sqlinit "CREATE TABLE IF NOT EXISTS unspent_transaction_outputs(
id INTEGER PRIMARY KEY,
hash TEXT COLLATE BINARY NOT NULL,
output_index INTEGER NOT NULL,
block INTEGER NOT NULL,
value INTEGER NOT NULL,
script TEXT COLLATE BINARY NOT NULL,
is_coinbase BOOLEAN NOT NULL
);"];
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS transactions_hash_index ON transactions (hash);"];
(* S.execute db *)
(* [%sqlinit "CREATE INDEX IF NOT EXISTS transactions_block_index ON transactions (block);"]; *)
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS utxo_hash_index ON unspent_transaction_outputs (hash, output_index);"];
;;
type insertion_result =
| InsertedIntoBlockchain of int64
| InsertedAsOrphan of int64
| InsertionFailed
| NotInsertedExisted
;;
let run_in_transaction db f =
S.transaction db f
;;
module Block = struct
type db_block = {
id : int64;
hash : string;
height: int64;
cumulative_log_difficulty : float;
previous_block_id : int64;
is_main : bool;
block_header : block_header;
};;
type t = db_block;;
let from_result (id, hash, height, previous_block, cld, is_main, block_version, merkle_root, timestamp, difficulty_bits, nonce, previous_block_hash) =
{
id = id;
hash = hash;
height = height;
cumulative_log_difficulty = cld;
previous_block_id = previous_block;
is_main = is_main;
block_header = {
block_version = block_version;
previous_block_hash = previous_block_hash;
merkle_root = merkle_root;
block_timestamp = Utils.unix_tm_of_int64 timestamp;
block_difficulty_target = difficulty_bits_of_int32 difficulty_bits;
block_nonce = nonce;
};
}
;;
let retrieve db id =
let result = S.select_one_maybe db
[%sqlc "SELECT @L{id}, @s{hash}, @L{height}, @L{previous_block}, @f{cumulative_log_difficulty}, @b{is_main}, @d{block_version}, @s{merkle_root}, @L{timestamp}, @l{difficulty_bits}, @l{nonce}, @s{IFNULL((SELECT hash FROM blockchain WHERE id = previous_block LIMIT 1), X'0000000000000000000000000000000000000000000000000000000000000000')} FROM blockchain WHERE id = %L"]
id
in
Option.map ~f:from_result result
;;
let retrieve_by_hash db hash =
let result = S.select_one_maybe db
[%sqlc "SELECT @L{id}, @s{hash}, @L{height}, @L{previous_block}, @f{cumulative_log_difficulty}, @b{is_main}, @d{block_version}, @s{merkle_root}, @L{timestamp}, @l{difficulty_bits}, @l{nonce}, @s{IFNULL((SELECT hash FROM blockchain WHERE id = previous_block LIMIT 1), X'0000000000000000000000000000000000000000000000000000000000000000')} FROM blockchain WHERE hash = %s"]
hash
in
Option.map ~f:from_result result
;;
let retrieve_mainchain_tip db =
let result = S.select_one_maybe db
[%sqlc "SELECT @L{id}, @s{hash}, @L{height}, @L{previous_block}, @f{cumulative_log_difficulty}, @b{is_main}, @d{block_version}, @s{merkle_root}, @L{timestamp}, @l{difficulty_bits}, @l{nonce}, @s{IFNULL((SELECT hash FROM blockchain WHERE id = previous_block LIMIT 1), X'0000000000000000000000000000000000000000000000000000000000000000')} FROM blockchain WHERE is_main = 1 ORDER BY cumulative_log_difficulty DESC, height DESC"]
in
Option.map ~f:from_result result
;;
let retrieve_mainchain_block_at_height db height =
let result = S.select_one_maybe db
[%sqlc "SELECT @L{id}, @s{hash}, @L{height}, @L{previous_block}, @f{cumulative_log_difficulty}, @b{is_main}, @d{block_version}, @s{merkle_root}, @L{timestamp}, @l{difficulty_bits}, @l{nonce}, @s{IFNULL((SELECT hash FROM blockchain WHERE id = previous_block LIMIT 1), X'0000000000000000000000000000000000000000000000000000000000000000')} FROM blockchain WHERE height = %L AND is_main = 1 ORDER BY cumulative_log_difficulty DESC"]
height
in
Option.map ~f:from_result result
;;
let hash_exists db hash =
S.select_one db [%sql "SELECT @b{count(1)} FROM blockchain WHERE hash = %s LIMIT 1"] hash
;;
let insert db db_block =
S.insert db
[%sql "INSERT INTO blockchain(hash, height, previous_block, cumulative_log_difficulty, is_main, block_version, merkle_root, timestamp, difficulty_bits, nonce) VALUES(%s, %L, %L, %f, %b, %d, %s, %L, %l, %l)"]
db_block.hash
db_block.height
db_block.previous_block_id
db_block.cumulative_log_difficulty
db_block.is_main
db_block.block_header.block_version
db_block.block_header.merkle_root
(Utils.int64_of_unix_tm db_block.block_header.block_timestamp)
(int32_of_difficulty_bits db_block.block_header.block_difficulty_target)
db_block.block_header.block_nonce
;;
end
module Orphan = struct
type db_orphan = {
id : int64;
hash : string;
previous_block_hash : string;
log_difficulty : float;
block_header : block_header;
};;
type t = db_orphan;;
let from_result (id, hash, previous_block_hash, log_difficulty, block_version, merkle_root, timestamp, difficulty_bits, nonce) =
{
id = id;
hash = hash;
previous_block_hash = previous_block_hash;
log_difficulty = log_difficulty;
block_header = {
block_version = block_version;
previous_block_hash = previous_block_hash;
merkle_root = merkle_root;
block_timestamp = Utils.unix_tm_of_int64 timestamp;
block_difficulty_target = difficulty_bits_of_int32 difficulty_bits;
block_nonce = nonce;
};
}
;;
let retrieve db id =
let result = S.select_one_maybe db
[%sqlc "SELECT @L{id}, @s{hash}, @s{previous_block_hash}, @f{log_difficulty}, @d{block_version}, @s{merkle_root}, @L{timestamp}, @l{difficulty_bits}, @l{nonce} FROM orphans WHERE id = %L"]
id
in
Option.map ~f:from_result result
;;
let retrieve_by_hash db hash =
let result = S.select_one_maybe db
[%sqlc "SELECT @L{id}, @s{hash}, @s{previous_block_hash}, @f{log_difficulty}, @d{block_version}, @s{merkle_root}, @L{timestamp}, @l{difficulty_bits}, @l{nonce} FROM orphans WHERE hash = %s"]
hash
in
Option.map ~f:from_result result
;;
let retrieve_by_previous_block_hash db previous_block_hash =
let results = S.select db
[%sqlc "SELECT @L{id}, @s{hash}, @s{previous_block_hash}, @f{log_difficulty} @d{block_version}, @s{merkle_root}, @L{timestamp}, @l{difficulty_bits}, @l{nonce} FROM orphans WHERE previous_block_hash = %s"]
previous_block_hash
in
List.map ~f:from_result results
;;
let hash_exists db hash =
S.select_one db [%sql "SELECT @b{count(1)} FROM orphans WHERE hash = %s LIMIT 1"] hash
;;
let insert db db_orphan =
S.insert db
[%sql "INSERT INTO orphans(hash, previous_block_hash, log_difficulty, block_version, merkle_root, timestamp, difficulty_bits, nonce) VALUES(%s, %s, %f, %d, %s, %L, %l, %l)"]
db_orphan.hash
db_orphan.previous_block_hash
db_orphan.log_difficulty
db_orphan.block_header.block_version
db_orphan.block_header.merkle_root
(Utils.int64_of_unix_tm db_orphan.block_header.block_timestamp)
(int32_of_difficulty_bits db_orphan.block_header.block_difficulty_target)
db_orphan.block_header.block_nonce
;;
let delete db id =
S.execute db [%sql "DELETE FROM orphans WHERE id = %L"] id
;;
end
module UnspentTransactionOutput = struct
type db_unspent_transaction_output = {
id : int64;
hash : string;
output_index : int32;
block_id : int64;
value : int64;
script : string;
is_coinbase : bool;
};;
type t = db_unspent_transaction_output;;
let from_result (id, hash, output_index, block, value, script, is_coinbase) =
{
id = id;
hash = hash;
output_index = output_index;
block_id = block;
value = value;
script = script;
is_coinbase = is_coinbase;
}
;;
let retrieve_by_hash_and_index db hash output_index =
let result = S.select_one_maybe db
[%sqlc "SELECT @L{id}, @s{hash}, @l{output_index}, @L{block}, @L{value}, @s{script}, @b{is_coinbase} FROM unspent_transaction_outputs WHERE hash = %s AND output_index = %l"]
hash
output_index
in
Option.map ~f:from_result result
;;
let delete_by_hash db hash =
S.execute db
[%sqlc "DELETE FROM unspent_transaction_outputs WHERE hash = %s"]
hash
;;
let delete_by_hash_and_index db hash index =
S.execute db
[%sqlc "DELETE FROM unspent_transaction_outputs WHERE hash = %s AND output_index = %l"]
hash
index
;;
let insert db utxo =
S.insert db
[%sqlc "INSERT INTO unspent_transaction_outputs(hash, output_index, block, value, script, is_coinbase) VALUES(%s, %l, %L, %L, %s, %b)"]
utxo.hash
utxo.output_index
utxo.block_id
utxo.value
utxo.script
utxo.is_coinbase
;;
end
module UTxO = UnspentTransactionOutput;;
module MemoryPool = struct
type db_transaction = {
id : int64;
hash : string;
output_count : int32;
is_orphan : bool;
data : string;
};;
type t = db_transaction;;
let from_result (id, hash, output_count, is_orphan, data) =
{
id = id;
hash = hash;
output_count = output_count;
is_orphan = is_orphan;
data = data;
}
;;
let retrieve_by_hash db hash =
let result = S.select_one_maybe db
[%sqlc "SELECT @L{id}, @s{hash}, @l{output_count}, @b{is_orphan}, @S{data} FROM memory_pool WHERE hash = %s"]
hash
in
Option.map ~f:from_result result
;;
let delete_by_hash db hash =
S.execute db [%sql "DELETE FROM memory_pool WHERE hash = %s"] hash
;;
let hash_exists db hash =
S.select_one db [%sql "SELECT @b{count(1)} FROM memory_pool WHERE hash = %s LIMIT 1"] hash
;;
end
let mainchain_block_id_and_index_for_transaction_hash db tx_hash =
S.select_one_maybe db
[%sqlc "SELECT @L{transactions.block}, @d{transactions.tx_index} FROM transactions WHERE transaction.hash = %s AND blockchain.is_main = 1 INNER JOIN blockchain ON transactions.block = blockchain.id"]
tx_hash
;;
let mainchain_block_hash_and_index_for_transaction_hash db tx_hash =
S.select_one_maybe db
[%sqlc "SELECT @s{blockchain.hash where}, @d{transactions.tx_index} FROM transactions WHERE hash = %s AND blockchain.is_main = 1 INNER JOIN blockchain ON transactions.block = blockchain.id"]
tx_hash
;;
let mainchain_block_id_hash_and_index_for_transaction_hash db tx_hash =
S.select_one_maybe db
[%sqlc "SELECT @L{transactions.block}, @s{blockchain.hash}, @d{transactions.tx_index} FROM transactions WHERE hash = %s AND blockchain.is_main = 1 INNER JOIN blockchain ON transactions.block = blockchain.id"]
tx_hash
;;
let mainchain_transaction_hash_exists db hash =
S.select_one db [%sql "SELECT @b{count(1)} FROM transactions WHERE transactions.hash = %s AND blockchain.is_main = 1 INNER JOIN blockchain ON transactions.block = blockchain.id"] hash
;;
let rec nth_predecessor_by_id db id n =
match Block.retrieve db id with
| None -> None
| Some block ->
if n = 0L then Some block
else
if block.Block.is_main then
Block.retrieve_mainchain_block_at_height db (max 0L (Int64.(-) block.Block.height n))
else
nth_predecessor_by_id db block.Block.previous_block_id (Int64.(-) n 1L)
;;
let nth_predecessor db hash n =
match Block.retrieve_by_hash db hash with
| None -> None
| Some block -> nth_predecessor_by_id db block.Block.id n
;;
let retrieve_n_predecessors db hash n =
let rec retrieve_n_predecessors_by_id_acc acc id n =
if n = 0 then acc
else
match Block.retrieve db id with
| None -> acc
| Some block ->
retrieve_n_predecessors_by_id_acc (block :: acc) block.Block.previous_block_id (n - 1)
in
match Block.retrieve_by_hash db hash with
| None -> []
| Some block ->
List.rev (retrieve_n_predecessors_by_id_acc [] block.Block.id n)
;;
let retrieve_sidechain_with_leaf db sidechain_hash =
let rec retrieve_sidechain_acc db acc sidechain_id =
match Block.retrieve db sidechain_id with
| None -> None
| Some block ->
if block.Block.is_main then Some (acc, block)
else retrieve_sidechain_acc db (block :: acc) block.Block.previous_block_id
in
match Block.retrieve_by_hash db sidechain_hash with
| None -> None
| Some block -> retrieve_sidechain_acc db [] block.Block.id
;;
let retrieve_between_hashes db leaf_hash base_hash =
let rec retrieve_between_hashes_acc db acc leaf_id base_hash =
match Block.retrieve db leaf_id with
| None -> None
| Some block ->
if block.Block.hash = base_hash then Some acc
else retrieve_between_hashes_acc db (block :: acc) block.Block.previous_block_id base_hash
in
if base_hash = leaf_hash then Some []
else (
match Block.retrieve_by_hash db leaf_hash with
| None -> None
| Some block -> retrieve_between_hashes_acc db [block] block.Block.previous_block_id base_hash
)
;;
let rollback_mainchain_to_height db height =
S.execute db
[%sqlc "UPDATE blockchain SET is_main = 0 WHERE is_main = 1 AND height > %L"]
height
;;
let block_exists_anywhere db hash =
(Block.hash_exists db hash) || (Orphan.hash_exists db hash)
;;
let delete_block_transactions_from_mempool db block =
List.iter ~f:(MemoryPool.delete_by_hash db) (List.map ~f:Bitcoin_protocol_generator.transaction_hash block.block_transactions)
;;
let update_utxo_with_transaction db block_id tx_index tx =
let hash = Bitcoin_protocol_generator.transaction_hash tx in
let outpoint_of_txout txout_index txout =
{
UTxO.id = 0L;
hash = hash;
output_index = Int32.of_int_exn txout_index;
block_id = block_id;
value = txout.transaction_output_value;
script = txout.output_script;
is_coinbase = (tx_index = 0);
}
in
let spent_outpoints = List.map ~f:(fun txin -> (txin.previous_transaction_output.referenced_transaction_hash, txin.previous_transaction_output.transaction_output_index)) tx.transaction_inputs in
let created_outpoints = List.mapi ~f:outpoint_of_txout tx.transaction_outputs in
List.iter ~f:(fun (hash, index) -> UTxO.delete_by_hash_and_index db hash index) spent_outpoints;
List.iter ~f:(fun utxo -> ignore (UTxO.insert db utxo)) created_outpoints
;;
(* since a transaction can spend an output that only appeard in the same block, we have to handle transactions in order *)
let update_utxo_with_block db block hash =
Printf.printf "[DB] starting UTxO update for block %s\n%!" (Utils.hex_string_of_hash_string hash);
match Block.retrieve_by_hash db hash with
| None -> failwith "tried to update UTxO for non-existant block"
| Some db_block ->
List.iteri ~f:(S.transaction db (fun db -> update_utxo_with_transaction db db_block.Block.id)) block.block_transactions;
Printf.printf "[DB] finished UTxO update for block %s\n%!" (Utils.hex_string_of_hash_string hash);
;;
let register_transactions_for_block db block block_id =
let register_tx tx_index tx =
let hash = Bitcoin_protocol_generator.transaction_hash tx in
S.execute db [%sqlc "INSERT INTO transactions (hash, block, tx_index) VALUES (%s, %L, %d)"]
hash
block_id
tx_index
in
List.iteri ~f:register_tx block.block_transactions
;;
let insert_block_into_blockchain hash previous_block_hash log_difficulty header db =
match Block.hash_exists db hash with
| true -> NotInsertedExisted
| false ->
match Block.retrieve_by_hash db previous_block_hash with
| None -> InsertionFailed
| Some previous_block ->
let record_id = Block.insert db {
Block.id = 0L;
hash = hash;
height = (Int64.succ previous_block.Block.height);
previous_block_id = previous_block.Block.id;
cumulative_log_difficulty = (previous_block.Block.cumulative_log_difficulty +. log_difficulty);
is_main = previous_block.Block.is_main;
block_header = header;
} in
InsertedIntoBlockchain record_id
;;
let insert_block_as_orphan hash previous_block_hash log_difficulty header db =
match Orphan.hash_exists db hash with
| true -> NotInsertedExisted
| false ->
let record_id = Orphan.insert db {
Orphan.id = 0L;
hash = hash;
previous_block_hash = previous_block_hash;
log_difficulty = log_difficulty;
block_header = header;
} in
InsertedAsOrphan record_id
;;
(* we need a special implementation for this, since no previous block exists for the genesis block *)
let insert_genesis_block db =
let hash = Bitcaml_config.testnet3_genesis_block_hash in
let header = Bitcaml_config.testnet3_genesis_block_header in
let log_difficulty = log_difficulty_of_difficulty_bits header.block_difficulty_target in
if not (Block.hash_exists db hash) then
let record_id = Block.insert db {
Block.id = 0L;
hash = hash;
height = 0L;
previous_block_id = 0L;
cumulative_log_difficulty = log_difficulty;
is_main = true;
block_header = header;
} in
Some record_id
else
None
;;
let open_db path =
let db = S.open_db path in
init_db db;
ignore (insert_genesis_block db);
db
;;
| null | https://raw.githubusercontent.com/profmaad/bitcaml/18cfbca46c989f43dfb1bcfd50ee2ff500f9ab8d/src/bitcoin_blockchain_db.ml | ocaml | S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS transactions_block_index ON transactions (block);"];
since a transaction can spend an output that only appeard in the same block, we have to handle transactions in order
we need a special implementation for this, since no previous block exists for the genesis block | open! Core.Std
open Bitcoin_protocol;;
module Sqlexpr = Sqlexpr_sqlite.Make(Sqlexpr_concurrency.Id);;
module S = Sqlexpr;;
type t = S.db;;
let difficulty_1_target = {
bits_base = 0x00ffff;
bits_exponent = 0x1d;
}
let float_log_difficulty_1_base = log (float_of_int difficulty_1_target.bits_base);;
let float_log_difficulty_scaland = log 256.0;;
let log_difficulty_of_difficulty_bits bits =
let float_log_base = log (float_of_int bits.bits_base) in
let float_exponent_difference = float_of_int (difficulty_1_target.bits_exponent - bits.bits_exponent) in
float_log_difficulty_1_base -. float_log_base +. float_log_difficulty_scaland *. float_exponent_difference
;;
let difficulty_of_difficulty_bits bits = exp (log_difficulty_of_difficulty_bits bits);;
let init_db db =
S.execute db
[%sqlinit "CREATE TABLE IF NOT EXISTS blockchain(
id INTEGER PRIMARY KEY,
hash TEXT COLLATE BINARY NOT NULL,
height INTEGER NOT NULL,
cumulative_log_difficulty REAL NOT NULL,
previous_block INTEGER NOT NULL,
is_main BOOLEAN NOT NULL,
block_version INTEGER NOT NULL,
merkle_root TEXT COLLATE BINARY NOT NULL,
timestamp INTEGER NOT NULL,
difficulty_bits INTEGER NOT NULL,
nonce INTEGER NOT NULL
);"];
S.execute db
[%sqlinit "CREATE TABLE IF NOT EXISTS orphans(
id INTEGER PRIMARY KEY,
hash TEXT COLLATE BINARY NOT NULL,
previous_block_hash TEXT COLLATE BINARY NOT NULL,
log_difficulty REAL NOT NULL,
block_version INTEGER NOT NULL,
merkle_root TEXT COLLATE BINARY NOT NULL,
timestamp INTEGER NOT NULL,
difficulty_bits INTEGER NOT NULL,
nonce INTEGER NOT NULL
);"];
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS hash_index ON blockchain (hash);"];
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS mainchain_hash_index ON blockchain (hash, is_main);"];
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS previous_block_index ON blockchain (previous_block);"];
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS orphans_hash_index ON orphans (hash);"];
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS orphans_previous_block_hash_index ON orphans (previous_block_hash);"];
S.execute db
[%sqlinit "CREATE TABLE IF NOT EXISTS memory_pool(
id INTEGER PRIMARY KEY,
hash TEXT COLLATE BINARY NOT NULL,
output_count INTEGER NOT NULL,
is_orphan BOOLEAN NOT NULL,
data BLOB NOT NULL
);"];
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS memory_pool_hash_index ON memory_pool (hash);"];
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS memory_pool_orphan_index ON memory_pool (is_orphan);"];
S.execute db
[%sqlinit "CREATE TABLE IF NOT EXISTS transactions(
id INTEGER PRIMARY KEY,
hash TEXT COLLATE BINARY NOT NULL,
block INTEGER NOT NULL,
tx_index INTEGER NOT NULL
);"];
S.execute db
[%sqlinit "CREATE TABLE IF NOT EXISTS unspent_transaction_outputs(
id INTEGER PRIMARY KEY,
hash TEXT COLLATE BINARY NOT NULL,
output_index INTEGER NOT NULL,
block INTEGER NOT NULL,
value INTEGER NOT NULL,
script TEXT COLLATE BINARY NOT NULL,
is_coinbase BOOLEAN NOT NULL
);"];
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS transactions_hash_index ON transactions (hash);"];
S.execute db
[%sqlinit "CREATE INDEX IF NOT EXISTS utxo_hash_index ON unspent_transaction_outputs (hash, output_index);"];
;;
type insertion_result =
| InsertedIntoBlockchain of int64
| InsertedAsOrphan of int64
| InsertionFailed
| NotInsertedExisted
;;
let run_in_transaction db f =
S.transaction db f
;;
module Block = struct
type db_block = {
id : int64;
hash : string;
height: int64;
cumulative_log_difficulty : float;
previous_block_id : int64;
is_main : bool;
block_header : block_header;
};;
type t = db_block;;
let from_result (id, hash, height, previous_block, cld, is_main, block_version, merkle_root, timestamp, difficulty_bits, nonce, previous_block_hash) =
{
id = id;
hash = hash;
height = height;
cumulative_log_difficulty = cld;
previous_block_id = previous_block;
is_main = is_main;
block_header = {
block_version = block_version;
previous_block_hash = previous_block_hash;
merkle_root = merkle_root;
block_timestamp = Utils.unix_tm_of_int64 timestamp;
block_difficulty_target = difficulty_bits_of_int32 difficulty_bits;
block_nonce = nonce;
};
}
;;
let retrieve db id =
let result = S.select_one_maybe db
[%sqlc "SELECT @L{id}, @s{hash}, @L{height}, @L{previous_block}, @f{cumulative_log_difficulty}, @b{is_main}, @d{block_version}, @s{merkle_root}, @L{timestamp}, @l{difficulty_bits}, @l{nonce}, @s{IFNULL((SELECT hash FROM blockchain WHERE id = previous_block LIMIT 1), X'0000000000000000000000000000000000000000000000000000000000000000')} FROM blockchain WHERE id = %L"]
id
in
Option.map ~f:from_result result
;;
let retrieve_by_hash db hash =
let result = S.select_one_maybe db
[%sqlc "SELECT @L{id}, @s{hash}, @L{height}, @L{previous_block}, @f{cumulative_log_difficulty}, @b{is_main}, @d{block_version}, @s{merkle_root}, @L{timestamp}, @l{difficulty_bits}, @l{nonce}, @s{IFNULL((SELECT hash FROM blockchain WHERE id = previous_block LIMIT 1), X'0000000000000000000000000000000000000000000000000000000000000000')} FROM blockchain WHERE hash = %s"]
hash
in
Option.map ~f:from_result result
;;
let retrieve_mainchain_tip db =
let result = S.select_one_maybe db
[%sqlc "SELECT @L{id}, @s{hash}, @L{height}, @L{previous_block}, @f{cumulative_log_difficulty}, @b{is_main}, @d{block_version}, @s{merkle_root}, @L{timestamp}, @l{difficulty_bits}, @l{nonce}, @s{IFNULL((SELECT hash FROM blockchain WHERE id = previous_block LIMIT 1), X'0000000000000000000000000000000000000000000000000000000000000000')} FROM blockchain WHERE is_main = 1 ORDER BY cumulative_log_difficulty DESC, height DESC"]
in
Option.map ~f:from_result result
;;
let retrieve_mainchain_block_at_height db height =
let result = S.select_one_maybe db
[%sqlc "SELECT @L{id}, @s{hash}, @L{height}, @L{previous_block}, @f{cumulative_log_difficulty}, @b{is_main}, @d{block_version}, @s{merkle_root}, @L{timestamp}, @l{difficulty_bits}, @l{nonce}, @s{IFNULL((SELECT hash FROM blockchain WHERE id = previous_block LIMIT 1), X'0000000000000000000000000000000000000000000000000000000000000000')} FROM blockchain WHERE height = %L AND is_main = 1 ORDER BY cumulative_log_difficulty DESC"]
height
in
Option.map ~f:from_result result
;;
let hash_exists db hash =
S.select_one db [%sql "SELECT @b{count(1)} FROM blockchain WHERE hash = %s LIMIT 1"] hash
;;
let insert db db_block =
S.insert db
[%sql "INSERT INTO blockchain(hash, height, previous_block, cumulative_log_difficulty, is_main, block_version, merkle_root, timestamp, difficulty_bits, nonce) VALUES(%s, %L, %L, %f, %b, %d, %s, %L, %l, %l)"]
db_block.hash
db_block.height
db_block.previous_block_id
db_block.cumulative_log_difficulty
db_block.is_main
db_block.block_header.block_version
db_block.block_header.merkle_root
(Utils.int64_of_unix_tm db_block.block_header.block_timestamp)
(int32_of_difficulty_bits db_block.block_header.block_difficulty_target)
db_block.block_header.block_nonce
;;
end
module Orphan = struct
type db_orphan = {
id : int64;
hash : string;
previous_block_hash : string;
log_difficulty : float;
block_header : block_header;
};;
type t = db_orphan;;
let from_result (id, hash, previous_block_hash, log_difficulty, block_version, merkle_root, timestamp, difficulty_bits, nonce) =
{
id = id;
hash = hash;
previous_block_hash = previous_block_hash;
log_difficulty = log_difficulty;
block_header = {
block_version = block_version;
previous_block_hash = previous_block_hash;
merkle_root = merkle_root;
block_timestamp = Utils.unix_tm_of_int64 timestamp;
block_difficulty_target = difficulty_bits_of_int32 difficulty_bits;
block_nonce = nonce;
};
}
;;
let retrieve db id =
let result = S.select_one_maybe db
[%sqlc "SELECT @L{id}, @s{hash}, @s{previous_block_hash}, @f{log_difficulty}, @d{block_version}, @s{merkle_root}, @L{timestamp}, @l{difficulty_bits}, @l{nonce} FROM orphans WHERE id = %L"]
id
in
Option.map ~f:from_result result
;;
let retrieve_by_hash db hash =
let result = S.select_one_maybe db
[%sqlc "SELECT @L{id}, @s{hash}, @s{previous_block_hash}, @f{log_difficulty}, @d{block_version}, @s{merkle_root}, @L{timestamp}, @l{difficulty_bits}, @l{nonce} FROM orphans WHERE hash = %s"]
hash
in
Option.map ~f:from_result result
;;
let retrieve_by_previous_block_hash db previous_block_hash =
let results = S.select db
[%sqlc "SELECT @L{id}, @s{hash}, @s{previous_block_hash}, @f{log_difficulty} @d{block_version}, @s{merkle_root}, @L{timestamp}, @l{difficulty_bits}, @l{nonce} FROM orphans WHERE previous_block_hash = %s"]
previous_block_hash
in
List.map ~f:from_result results
;;
let hash_exists db hash =
S.select_one db [%sql "SELECT @b{count(1)} FROM orphans WHERE hash = %s LIMIT 1"] hash
;;
let insert db db_orphan =
S.insert db
[%sql "INSERT INTO orphans(hash, previous_block_hash, log_difficulty, block_version, merkle_root, timestamp, difficulty_bits, nonce) VALUES(%s, %s, %f, %d, %s, %L, %l, %l)"]
db_orphan.hash
db_orphan.previous_block_hash
db_orphan.log_difficulty
db_orphan.block_header.block_version
db_orphan.block_header.merkle_root
(Utils.int64_of_unix_tm db_orphan.block_header.block_timestamp)
(int32_of_difficulty_bits db_orphan.block_header.block_difficulty_target)
db_orphan.block_header.block_nonce
;;
let delete db id =
S.execute db [%sql "DELETE FROM orphans WHERE id = %L"] id
;;
end
module UnspentTransactionOutput = struct
type db_unspent_transaction_output = {
id : int64;
hash : string;
output_index : int32;
block_id : int64;
value : int64;
script : string;
is_coinbase : bool;
};;
type t = db_unspent_transaction_output;;
let from_result (id, hash, output_index, block, value, script, is_coinbase) =
{
id = id;
hash = hash;
output_index = output_index;
block_id = block;
value = value;
script = script;
is_coinbase = is_coinbase;
}
;;
let retrieve_by_hash_and_index db hash output_index =
let result = S.select_one_maybe db
[%sqlc "SELECT @L{id}, @s{hash}, @l{output_index}, @L{block}, @L{value}, @s{script}, @b{is_coinbase} FROM unspent_transaction_outputs WHERE hash = %s AND output_index = %l"]
hash
output_index
in
Option.map ~f:from_result result
;;
let delete_by_hash db hash =
S.execute db
[%sqlc "DELETE FROM unspent_transaction_outputs WHERE hash = %s"]
hash
;;
let delete_by_hash_and_index db hash index =
S.execute db
[%sqlc "DELETE FROM unspent_transaction_outputs WHERE hash = %s AND output_index = %l"]
hash
index
;;
let insert db utxo =
S.insert db
[%sqlc "INSERT INTO unspent_transaction_outputs(hash, output_index, block, value, script, is_coinbase) VALUES(%s, %l, %L, %L, %s, %b)"]
utxo.hash
utxo.output_index
utxo.block_id
utxo.value
utxo.script
utxo.is_coinbase
;;
end
module UTxO = UnspentTransactionOutput;;
module MemoryPool = struct
type db_transaction = {
id : int64;
hash : string;
output_count : int32;
is_orphan : bool;
data : string;
};;
type t = db_transaction;;
let from_result (id, hash, output_count, is_orphan, data) =
{
id = id;
hash = hash;
output_count = output_count;
is_orphan = is_orphan;
data = data;
}
;;
let retrieve_by_hash db hash =
let result = S.select_one_maybe db
[%sqlc "SELECT @L{id}, @s{hash}, @l{output_count}, @b{is_orphan}, @S{data} FROM memory_pool WHERE hash = %s"]
hash
in
Option.map ~f:from_result result
;;
let delete_by_hash db hash =
S.execute db [%sql "DELETE FROM memory_pool WHERE hash = %s"] hash
;;
let hash_exists db hash =
S.select_one db [%sql "SELECT @b{count(1)} FROM memory_pool WHERE hash = %s LIMIT 1"] hash
;;
end
let mainchain_block_id_and_index_for_transaction_hash db tx_hash =
S.select_one_maybe db
[%sqlc "SELECT @L{transactions.block}, @d{transactions.tx_index} FROM transactions WHERE transaction.hash = %s AND blockchain.is_main = 1 INNER JOIN blockchain ON transactions.block = blockchain.id"]
tx_hash
;;
let mainchain_block_hash_and_index_for_transaction_hash db tx_hash =
S.select_one_maybe db
[%sqlc "SELECT @s{blockchain.hash where}, @d{transactions.tx_index} FROM transactions WHERE hash = %s AND blockchain.is_main = 1 INNER JOIN blockchain ON transactions.block = blockchain.id"]
tx_hash
;;
let mainchain_block_id_hash_and_index_for_transaction_hash db tx_hash =
S.select_one_maybe db
[%sqlc "SELECT @L{transactions.block}, @s{blockchain.hash}, @d{transactions.tx_index} FROM transactions WHERE hash = %s AND blockchain.is_main = 1 INNER JOIN blockchain ON transactions.block = blockchain.id"]
tx_hash
;;
let mainchain_transaction_hash_exists db hash =
S.select_one db [%sql "SELECT @b{count(1)} FROM transactions WHERE transactions.hash = %s AND blockchain.is_main = 1 INNER JOIN blockchain ON transactions.block = blockchain.id"] hash
;;
let rec nth_predecessor_by_id db id n =
match Block.retrieve db id with
| None -> None
| Some block ->
if n = 0L then Some block
else
if block.Block.is_main then
Block.retrieve_mainchain_block_at_height db (max 0L (Int64.(-) block.Block.height n))
else
nth_predecessor_by_id db block.Block.previous_block_id (Int64.(-) n 1L)
;;
let nth_predecessor db hash n =
match Block.retrieve_by_hash db hash with
| None -> None
| Some block -> nth_predecessor_by_id db block.Block.id n
;;
let retrieve_n_predecessors db hash n =
let rec retrieve_n_predecessors_by_id_acc acc id n =
if n = 0 then acc
else
match Block.retrieve db id with
| None -> acc
| Some block ->
retrieve_n_predecessors_by_id_acc (block :: acc) block.Block.previous_block_id (n - 1)
in
match Block.retrieve_by_hash db hash with
| None -> []
| Some block ->
List.rev (retrieve_n_predecessors_by_id_acc [] block.Block.id n)
;;
let retrieve_sidechain_with_leaf db sidechain_hash =
let rec retrieve_sidechain_acc db acc sidechain_id =
match Block.retrieve db sidechain_id with
| None -> None
| Some block ->
if block.Block.is_main then Some (acc, block)
else retrieve_sidechain_acc db (block :: acc) block.Block.previous_block_id
in
match Block.retrieve_by_hash db sidechain_hash with
| None -> None
| Some block -> retrieve_sidechain_acc db [] block.Block.id
;;
let retrieve_between_hashes db leaf_hash base_hash =
let rec retrieve_between_hashes_acc db acc leaf_id base_hash =
match Block.retrieve db leaf_id with
| None -> None
| Some block ->
if block.Block.hash = base_hash then Some acc
else retrieve_between_hashes_acc db (block :: acc) block.Block.previous_block_id base_hash
in
if base_hash = leaf_hash then Some []
else (
match Block.retrieve_by_hash db leaf_hash with
| None -> None
| Some block -> retrieve_between_hashes_acc db [block] block.Block.previous_block_id base_hash
)
;;
let rollback_mainchain_to_height db height =
S.execute db
[%sqlc "UPDATE blockchain SET is_main = 0 WHERE is_main = 1 AND height > %L"]
height
;;
let block_exists_anywhere db hash =
(Block.hash_exists db hash) || (Orphan.hash_exists db hash)
;;
let delete_block_transactions_from_mempool db block =
List.iter ~f:(MemoryPool.delete_by_hash db) (List.map ~f:Bitcoin_protocol_generator.transaction_hash block.block_transactions)
;;
let update_utxo_with_transaction db block_id tx_index tx =
let hash = Bitcoin_protocol_generator.transaction_hash tx in
let outpoint_of_txout txout_index txout =
{
UTxO.id = 0L;
hash = hash;
output_index = Int32.of_int_exn txout_index;
block_id = block_id;
value = txout.transaction_output_value;
script = txout.output_script;
is_coinbase = (tx_index = 0);
}
in
let spent_outpoints = List.map ~f:(fun txin -> (txin.previous_transaction_output.referenced_transaction_hash, txin.previous_transaction_output.transaction_output_index)) tx.transaction_inputs in
let created_outpoints = List.mapi ~f:outpoint_of_txout tx.transaction_outputs in
List.iter ~f:(fun (hash, index) -> UTxO.delete_by_hash_and_index db hash index) spent_outpoints;
List.iter ~f:(fun utxo -> ignore (UTxO.insert db utxo)) created_outpoints
;;
let update_utxo_with_block db block hash =
Printf.printf "[DB] starting UTxO update for block %s\n%!" (Utils.hex_string_of_hash_string hash);
match Block.retrieve_by_hash db hash with
| None -> failwith "tried to update UTxO for non-existant block"
| Some db_block ->
List.iteri ~f:(S.transaction db (fun db -> update_utxo_with_transaction db db_block.Block.id)) block.block_transactions;
Printf.printf "[DB] finished UTxO update for block %s\n%!" (Utils.hex_string_of_hash_string hash);
;;
let register_transactions_for_block db block block_id =
let register_tx tx_index tx =
let hash = Bitcoin_protocol_generator.transaction_hash tx in
S.execute db [%sqlc "INSERT INTO transactions (hash, block, tx_index) VALUES (%s, %L, %d)"]
hash
block_id
tx_index
in
List.iteri ~f:register_tx block.block_transactions
;;
let insert_block_into_blockchain hash previous_block_hash log_difficulty header db =
match Block.hash_exists db hash with
| true -> NotInsertedExisted
| false ->
match Block.retrieve_by_hash db previous_block_hash with
| None -> InsertionFailed
| Some previous_block ->
let record_id = Block.insert db {
Block.id = 0L;
hash = hash;
height = (Int64.succ previous_block.Block.height);
previous_block_id = previous_block.Block.id;
cumulative_log_difficulty = (previous_block.Block.cumulative_log_difficulty +. log_difficulty);
is_main = previous_block.Block.is_main;
block_header = header;
} in
InsertedIntoBlockchain record_id
;;
let insert_block_as_orphan hash previous_block_hash log_difficulty header db =
match Orphan.hash_exists db hash with
| true -> NotInsertedExisted
| false ->
let record_id = Orphan.insert db {
Orphan.id = 0L;
hash = hash;
previous_block_hash = previous_block_hash;
log_difficulty = log_difficulty;
block_header = header;
} in
InsertedAsOrphan record_id
;;
let insert_genesis_block db =
let hash = Bitcaml_config.testnet3_genesis_block_hash in
let header = Bitcaml_config.testnet3_genesis_block_header in
let log_difficulty = log_difficulty_of_difficulty_bits header.block_difficulty_target in
if not (Block.hash_exists db hash) then
let record_id = Block.insert db {
Block.id = 0L;
hash = hash;
height = 0L;
previous_block_id = 0L;
cumulative_log_difficulty = log_difficulty;
is_main = true;
block_header = header;
} in
Some record_id
else
None
;;
let open_db path =
let db = S.open_db path in
init_db db;
ignore (insert_genesis_block db);
db
;;
|
33549e03afbfa4e865f04e92497ae6f1eea043524b2a158265ae03b8155d3170 | exercism/erlang | crypto_square.erl | -module(crypto_square).
-export([ciphertext/1]).
ciphertext(_PlainText) -> undefined.
| null | https://raw.githubusercontent.com/exercism/erlang/57ac2707dae643682950715e74eb271f732e2100/exercises/practice/crypto-square/src/crypto_square.erl | erlang | -module(crypto_square).
-export([ciphertext/1]).
ciphertext(_PlainText) -> undefined.
| |
b5cc37563ebd8fffb7d63bf1dcd723192e1ec6754af62189c07f12c6f960c81b | rd--/hsc3 | rd-20061017.hs | crotale ( rd , 2006 - 10 - 17 ) ; i d
let crotale_data =
([35.45
,128.59
,346.97
,483.55
,1049.24
,1564.02
,1756.33
,3391.66
,3451.80
,3497.26
,3596.89
,3696.73
,3835.23
,3845.95
,4254.85
,4407.53
,4415.26
,4552.86
,5538.07
,5637.73
,5690.29
,5728.00
,5764.27
,5824.41
,6377.60
,6544.35
,6807.14
,6994.97
,7026.84
,7144.58
,7269.61
,7393.67
,7897.25
,8040.45
,8157.77
,8225.01
,9126.15
,9488.52
,9916.40
,10155.59
,11715.95
,12111.83
,12339.99
,12417.66
,12459.28
,12618.33
,13116.49
,13201.12
,13297.83
,13533.75]
,[0.001282
,0.000804
,0.017361
,0.004835
,0.004413
,0.004110
,0.000333
,0.003614
,0.006919
,0.000322
,0.000603
,0.066864
,0.000605
,0.003602
,0.000283
,0.015243
,0.020536
,0.016677
,0.000924
,0.202050
,0.001254
,0.012705
,0.000252
,0.000486
,0.000642
,0.000776
,0.208116
,0.002491
,0.001934
,0.005231
,0.006924
,0.001203
,0.205002
,0.040604
,0.003834
,0.002189
,0.180560
,0.002192
,0.006516
,0.009982
,0.004745
,0.046154
,0.000510
,0.001890
,0.001978
,0.006729
,0.002342
,0.002400
,0.035155
,0.001408]
,[5.203680
,1.703434
,40.165168
,27.282501
,0.895052
,42.847427
,2.660366
,15.767886
,6.848367
,3.232500
,1.734338
,2.020241
,4.727905
,9.400103
,0.710251
,37.494625
,36.248794
,29.172658
,3.891019
,4.757885
,3.851426
,20.907810
,3.732874
,2.383410
,10.443285
,8.795611
,20.985643
,18.011800
,25.297883
,14.819819
,42.391899
,2.948513
,11.043763
,49.551651
,29.882694
,10.527188
,23.557245
,26.555616
,45.099605
,22.550390
,36.461261
,11.826201
,16.818185
,14.903121
,32.811138
,43.138904
,12.289558
,11.498942
,10.465788
,24.931695])
(cf,ca,cd) = crotale_data
ps = mce [-12,-5,0,2,4,5,7,12]
t = dust kr 3
fs = select (tiRand 0 7 t) ps
s = decay2 t 0.06 0.01 * pinkNoise ar * tRand 0 1 t
ks = klankSpec_k cf ca (map recip cd)
k = dynKlank s (midiRatio fs) (tRand 0 1 t) (tRand 2 7 t) ks
in pan2 k (tRand (-1) 1 t) 1
crotale ( rd , 2006 - 10 - 17 ) ; i d
let crotale_data =
([35.45
,128.59
,346.97
,483.55
,1049.24
,1564.02
,1756.33
,3391.66
,3451.80
,3497.26
,3596.89
,3696.73
,3835.23
,3845.95
,4254.85
,4407.53
,4415.26
,4552.86
,5538.07
,5637.73
,5690.29
,5728.00
,5764.27
,5824.41
,6377.60
,6544.35
,6807.14
,6994.97
,7026.84
,7144.58
,7269.61
,7393.67
,7897.25
,8040.45
,8157.77
,8225.01
,9126.15
,9488.52
,9916.40
,10155.59
,11715.95
,12111.83
,12339.99
,12417.66
,12459.28
,12618.33
,13116.49
,13201.12
,13297.83
,13533.75]
,[0.001282
,0.000804
,0.017361
,0.004835
,0.004413
,0.004110
,0.000333
,0.003614
,0.006919
,0.000322
,0.000603
,0.066864
,0.000605
,0.003602
,0.000283
,0.015243
,0.020536
,0.016677
,0.000924
,0.202050
,0.001254
,0.012705
,0.000252
,0.000486
,0.000642
,0.000776
,0.208116
,0.002491
,0.001934
,0.005231
,0.006924
,0.001203
,0.205002
,0.040604
,0.003834
,0.002189
,0.180560
,0.002192
,0.006516
,0.009982
,0.004745
,0.046154
,0.000510
,0.001890
,0.001978
,0.006729
,0.002342
,0.002400
,0.035155
,0.001408]
,[5.203680
,1.703434
,40.165168
,27.282501
,0.895052
,42.847427
,2.660366
,15.767886
,6.848367
,3.232500
,1.734338
,2.020241
,4.727905
,9.400103
,0.710251
,37.494625
,36.248794
,29.172658
,3.891019
,4.757885
,3.851426
,20.907810
,3.732874
,2.383410
,10.443285
,8.795611
,20.985643
,18.011800
,25.297883
,14.819819
,42.391899
,2.948513
,11.043763
,49.551651
,29.882694
,10.527188
,23.557245
,26.555616
,45.099605
,22.550390
,36.461261
,11.826201
,16.818185
,14.903121
,32.811138
,43.138904
,12.289558
,11.498942
,10.465788
,24.931695])
(cf,ca,cd) = crotale_data
ps = mce [-12,-5,0,2,4,5,7,12]
n = pinkNoiseId 'α' ar
t = dustId 'β' kr 3
fs = select (tiRandId 'γ' 0 7 t) ps
g = tRandId 'δ' 0 1 t
fo = tRandId 'ε' 0 1 t
ds = tRandId 'ζ' 2 7 t
p = tRandId 'η' (-1) 1 t
s = decay2 t 0.06 0.01 * n * g
ks = klankSpec_k cf ca (map recip cd)
k = dynKlank s (midiRatio fs) fo ds ks
in pan2 k p 1
| null | https://raw.githubusercontent.com/rd--/hsc3/fb2ae8fb1923515938b69481a8971cc6b6d7a258/Help/Graph/rd-20061017.hs | haskell | crotale ( rd , 2006 - 10 - 17 ) ; i d
let crotale_data =
([35.45
,128.59
,346.97
,483.55
,1049.24
,1564.02
,1756.33
,3391.66
,3451.80
,3497.26
,3596.89
,3696.73
,3835.23
,3845.95
,4254.85
,4407.53
,4415.26
,4552.86
,5538.07
,5637.73
,5690.29
,5728.00
,5764.27
,5824.41
,6377.60
,6544.35
,6807.14
,6994.97
,7026.84
,7144.58
,7269.61
,7393.67
,7897.25
,8040.45
,8157.77
,8225.01
,9126.15
,9488.52
,9916.40
,10155.59
,11715.95
,12111.83
,12339.99
,12417.66
,12459.28
,12618.33
,13116.49
,13201.12
,13297.83
,13533.75]
,[0.001282
,0.000804
,0.017361
,0.004835
,0.004413
,0.004110
,0.000333
,0.003614
,0.006919
,0.000322
,0.000603
,0.066864
,0.000605
,0.003602
,0.000283
,0.015243
,0.020536
,0.016677
,0.000924
,0.202050
,0.001254
,0.012705
,0.000252
,0.000486
,0.000642
,0.000776
,0.208116
,0.002491
,0.001934
,0.005231
,0.006924
,0.001203
,0.205002
,0.040604
,0.003834
,0.002189
,0.180560
,0.002192
,0.006516
,0.009982
,0.004745
,0.046154
,0.000510
,0.001890
,0.001978
,0.006729
,0.002342
,0.002400
,0.035155
,0.001408]
,[5.203680
,1.703434
,40.165168
,27.282501
,0.895052
,42.847427
,2.660366
,15.767886
,6.848367
,3.232500
,1.734338
,2.020241
,4.727905
,9.400103
,0.710251
,37.494625
,36.248794
,29.172658
,3.891019
,4.757885
,3.851426
,20.907810
,3.732874
,2.383410
,10.443285
,8.795611
,20.985643
,18.011800
,25.297883
,14.819819
,42.391899
,2.948513
,11.043763
,49.551651
,29.882694
,10.527188
,23.557245
,26.555616
,45.099605
,22.550390
,36.461261
,11.826201
,16.818185
,14.903121
,32.811138
,43.138904
,12.289558
,11.498942
,10.465788
,24.931695])
(cf,ca,cd) = crotale_data
ps = mce [-12,-5,0,2,4,5,7,12]
t = dust kr 3
fs = select (tiRand 0 7 t) ps
s = decay2 t 0.06 0.01 * pinkNoise ar * tRand 0 1 t
ks = klankSpec_k cf ca (map recip cd)
k = dynKlank s (midiRatio fs) (tRand 0 1 t) (tRand 2 7 t) ks
in pan2 k (tRand (-1) 1 t) 1
crotale ( rd , 2006 - 10 - 17 ) ; i d
let crotale_data =
([35.45
,128.59
,346.97
,483.55
,1049.24
,1564.02
,1756.33
,3391.66
,3451.80
,3497.26
,3596.89
,3696.73
,3835.23
,3845.95
,4254.85
,4407.53
,4415.26
,4552.86
,5538.07
,5637.73
,5690.29
,5728.00
,5764.27
,5824.41
,6377.60
,6544.35
,6807.14
,6994.97
,7026.84
,7144.58
,7269.61
,7393.67
,7897.25
,8040.45
,8157.77
,8225.01
,9126.15
,9488.52
,9916.40
,10155.59
,11715.95
,12111.83
,12339.99
,12417.66
,12459.28
,12618.33
,13116.49
,13201.12
,13297.83
,13533.75]
,[0.001282
,0.000804
,0.017361
,0.004835
,0.004413
,0.004110
,0.000333
,0.003614
,0.006919
,0.000322
,0.000603
,0.066864
,0.000605
,0.003602
,0.000283
,0.015243
,0.020536
,0.016677
,0.000924
,0.202050
,0.001254
,0.012705
,0.000252
,0.000486
,0.000642
,0.000776
,0.208116
,0.002491
,0.001934
,0.005231
,0.006924
,0.001203
,0.205002
,0.040604
,0.003834
,0.002189
,0.180560
,0.002192
,0.006516
,0.009982
,0.004745
,0.046154
,0.000510
,0.001890
,0.001978
,0.006729
,0.002342
,0.002400
,0.035155
,0.001408]
,[5.203680
,1.703434
,40.165168
,27.282501
,0.895052
,42.847427
,2.660366
,15.767886
,6.848367
,3.232500
,1.734338
,2.020241
,4.727905
,9.400103
,0.710251
,37.494625
,36.248794
,29.172658
,3.891019
,4.757885
,3.851426
,20.907810
,3.732874
,2.383410
,10.443285
,8.795611
,20.985643
,18.011800
,25.297883
,14.819819
,42.391899
,2.948513
,11.043763
,49.551651
,29.882694
,10.527188
,23.557245
,26.555616
,45.099605
,22.550390
,36.461261
,11.826201
,16.818185
,14.903121
,32.811138
,43.138904
,12.289558
,11.498942
,10.465788
,24.931695])
(cf,ca,cd) = crotale_data
ps = mce [-12,-5,0,2,4,5,7,12]
n = pinkNoiseId 'α' ar
t = dustId 'β' kr 3
fs = select (tiRandId 'γ' 0 7 t) ps
g = tRandId 'δ' 0 1 t
fo = tRandId 'ε' 0 1 t
ds = tRandId 'ζ' 2 7 t
p = tRandId 'η' (-1) 1 t
s = decay2 t 0.06 0.01 * n * g
ks = klankSpec_k cf ca (map recip cd)
k = dynKlank s (midiRatio fs) fo ds ks
in pan2 k p 1
| |
6f9d06734534fcdaa09c6eda0e38ca01bb3aa03f3afa59b7179b4759c51b2871 | sbcl/sbcl | c-call.lisp | This software is part of the SBCL system . See the README file for
;;;; more information.
;;;;
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
;;;; public domain. The software is in the public domain and is
;;;; provided with absolutely no warranty. See the COPYING and CREDITS
;;;; files for more information.
(in-package "SB-ALIEN")
;;;; C string support.
(defun load-alien-c-string-type (element-type external-format not-null)
(make-alien-c-string-type
:to (parse-alien-type 'char (sb-kernel:make-null-lexenv))
:element-type element-type
:external-format external-format
:not-null not-null))
(define-alien-type-translator c-string
(&key (external-format :default)
(element-type 'character)
(not-null nil))
(load-alien-c-string-type element-type external-format not-null))
(defun c-string-external-format (type)
(let ((external-format (alien-c-string-type-external-format type)))
(if (eq external-format :default)
#+sb-xc-host (bug "No default c-string-external-format")
#-sb-xc-host (default-c-string-external-format)
external-format)))
(define-alien-type-method (c-string :unparse) (type state)
(let* ((external-format (alien-c-string-type-external-format type))
(element-type (alien-c-string-type-element-type type))
(not-null (alien-c-string-type-not-null type))
(tail
(append (unless (eq :default external-format)
(list :external-format external-format))
(unless (eq 'character element-type)
(list :element-type element-type))
(when not-null
(list :not-null t)))))
(if tail
(cons 'c-string tail)
'c-string)))
(define-alien-type-method (c-string :lisp-rep) (type)
(let ((possibilities '(simple-string (alien (* char)) (simple-array (unsigned-byte 8)))))
(if (alien-c-string-type-not-null type)
`(or ,@possibilities)
`(or null ,@possibilities))))
(define-alien-type-method (c-string :deport-pin-p) (type)
(declare (ignore type))
t)
(defun c-string-needs-conversion-p (type)
#+sb-xc-host
(declare (ignore type))
#+sb-xc-host
t
#-sb-xc-host
(let ((external-format (sb-impl::get-external-format
;; Can't use C-STRING-EXTERNAL-FORMAT here,
since the meaning of : DEFAULT can change
when * - FORMAT *
;; changes.
(alien-c-string-type-external-format type))))
(not (and external-format
(or (eq (first (sb-impl::ef-names external-format)) :ascii)
On all latin-1 codepoints will fit
into a base - char , on SB - UNICODE they wo n't .
#-sb-unicode
(eq (first (sb-impl::ef-names external-format)) :latin-1))))))
(declaim (ftype (sfunction (t) nil) null-error))
(defun null-error (type)
#-sb-xc-host(declare (optimize sb-kernel:allow-non-returning-tail-call))
(aver (alien-c-string-type-not-null type))
(error 'type-error
:expected-type `(alien ,(unparse-alien-type type))
:datum nil))
(define-alien-type-method (c-string :naturalize-gen) (type alien)
`(if (zerop (sap-int ,alien))
,(if (alien-c-string-type-not-null type)
`(null-error ',type)
nil)
;; Check whether we need to do a full external-format
;; conversion, or whether we can just do a cheap byte-by-byte
;; copy of the c-string data.
;;
;; On SB-UNICODE we can never do the cheap copy, even if the
;; external format and element-type are suitable, since
simple - base - strings may not contain ISO-8859 - 1 characters .
;; If we need to check for non-ascii data in the input, we
;; might as well go through the usual external-format machinery
;; instead of rewriting another version of it.
,(if #+sb-unicode t
#-sb-unicode (c-string-needs-conversion-p type)
`(c-string-to-string ,alien
(c-string-external-format ,type)
(alien-c-string-type-element-type
,type))
`(%naturalize-c-string ,alien))))
(define-alien-type-method (c-string :deport-gen) (type value)
This SAP taking is safe as DEPORT callers pin the VALUE when
;; necessary.
`(etypecase ,value
(null
,(if (alien-c-string-type-not-null type)
`(null-error ',type)
`(int-sap 0)))
((alien (* char)) (alien-sap ,value))
(vector (vector-sap ,value))))
(define-alien-type-method (c-string :deport-alloc-gen) (type value)
`(etypecase ,value
(null
,(if (alien-c-string-type-not-null type)
`(null-error ',type)
nil))
((alien (* char)) ,value)
;; If the alien type is not ascii-compatible (+SB-UNICODE)
;; or latin-1-compatible (-SB-UNICODE), we need to do
;; external format conversion.
,@(if (c-string-needs-conversion-p type)
`((t
(string-to-c-string ,value
(c-string-external-format ,type))))
`((simple-base-string
,value)
(simple-string
(string-to-c-string ,value
(c-string-external-format ,type)))))))
| null | https://raw.githubusercontent.com/sbcl/sbcl/eb76e6340e390a9238973e7bc6c26f61c94f509b/src/code/c-call.lisp | lisp | more information.
public domain. The software is in the public domain and is
provided with absolutely no warranty. See the COPYING and CREDITS
files for more information.
C string support.
Can't use C-STRING-EXTERNAL-FORMAT here,
changes.
Check whether we need to do a full external-format
conversion, or whether we can just do a cheap byte-by-byte
copy of the c-string data.
On SB-UNICODE we can never do the cheap copy, even if the
external format and element-type are suitable, since
If we need to check for non-ascii data in the input, we
might as well go through the usual external-format machinery
instead of rewriting another version of it.
necessary.
If the alien type is not ascii-compatible (+SB-UNICODE)
or latin-1-compatible (-SB-UNICODE), we need to do
external format conversion. | This software is part of the SBCL system . See the README file for
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
(in-package "SB-ALIEN")
(defun load-alien-c-string-type (element-type external-format not-null)
(make-alien-c-string-type
:to (parse-alien-type 'char (sb-kernel:make-null-lexenv))
:element-type element-type
:external-format external-format
:not-null not-null))
(define-alien-type-translator c-string
(&key (external-format :default)
(element-type 'character)
(not-null nil))
(load-alien-c-string-type element-type external-format not-null))
(defun c-string-external-format (type)
(let ((external-format (alien-c-string-type-external-format type)))
(if (eq external-format :default)
#+sb-xc-host (bug "No default c-string-external-format")
#-sb-xc-host (default-c-string-external-format)
external-format)))
(define-alien-type-method (c-string :unparse) (type state)
(let* ((external-format (alien-c-string-type-external-format type))
(element-type (alien-c-string-type-element-type type))
(not-null (alien-c-string-type-not-null type))
(tail
(append (unless (eq :default external-format)
(list :external-format external-format))
(unless (eq 'character element-type)
(list :element-type element-type))
(when not-null
(list :not-null t)))))
(if tail
(cons 'c-string tail)
'c-string)))
(define-alien-type-method (c-string :lisp-rep) (type)
(let ((possibilities '(simple-string (alien (* char)) (simple-array (unsigned-byte 8)))))
(if (alien-c-string-type-not-null type)
`(or ,@possibilities)
`(or null ,@possibilities))))
(define-alien-type-method (c-string :deport-pin-p) (type)
(declare (ignore type))
t)
(defun c-string-needs-conversion-p (type)
#+sb-xc-host
(declare (ignore type))
#+sb-xc-host
t
#-sb-xc-host
(let ((external-format (sb-impl::get-external-format
since the meaning of : DEFAULT can change
when * - FORMAT *
(alien-c-string-type-external-format type))))
(not (and external-format
(or (eq (first (sb-impl::ef-names external-format)) :ascii)
On all latin-1 codepoints will fit
into a base - char , on SB - UNICODE they wo n't .
#-sb-unicode
(eq (first (sb-impl::ef-names external-format)) :latin-1))))))
(declaim (ftype (sfunction (t) nil) null-error))
(defun null-error (type)
#-sb-xc-host(declare (optimize sb-kernel:allow-non-returning-tail-call))
(aver (alien-c-string-type-not-null type))
(error 'type-error
:expected-type `(alien ,(unparse-alien-type type))
:datum nil))
(define-alien-type-method (c-string :naturalize-gen) (type alien)
`(if (zerop (sap-int ,alien))
,(if (alien-c-string-type-not-null type)
`(null-error ',type)
nil)
simple - base - strings may not contain ISO-8859 - 1 characters .
,(if #+sb-unicode t
#-sb-unicode (c-string-needs-conversion-p type)
`(c-string-to-string ,alien
(c-string-external-format ,type)
(alien-c-string-type-element-type
,type))
`(%naturalize-c-string ,alien))))
(define-alien-type-method (c-string :deport-gen) (type value)
This SAP taking is safe as DEPORT callers pin the VALUE when
`(etypecase ,value
(null
,(if (alien-c-string-type-not-null type)
`(null-error ',type)
`(int-sap 0)))
((alien (* char)) (alien-sap ,value))
(vector (vector-sap ,value))))
(define-alien-type-method (c-string :deport-alloc-gen) (type value)
`(etypecase ,value
(null
,(if (alien-c-string-type-not-null type)
`(null-error ',type)
nil))
((alien (* char)) ,value)
,@(if (c-string-needs-conversion-p type)
`((t
(string-to-c-string ,value
(c-string-external-format ,type))))
`((simple-base-string
,value)
(simple-string
(string-to-c-string ,value
(c-string-external-format ,type)))))))
|
7a3e5a4fbc394ca5b97720c8f82cdec3fafe26e65bb058220535532fcdc45d9e | realworldocaml/mdx | cram.ml |
* Copyright ( c ) 2018 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2018 Thomas Gazagnaire <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
let src = Logs.Src.create "ocaml-mdx"
module Log = (val Logs.src_log src : Logs.LOG)
open Astring
type t = { command : string list; output : Output.t list; exit_code : int }
type cram_tests = {
start_pad : int;
hpad : int;
tests : t list;
end_pad : string option;
}
let dump_line ppf = function
| #Output.t as o -> Output.dump ppf o
| `Exit i -> Fmt.pf ppf "`Exit %d" i
| `Command c -> Fmt.pf ppf "`Command %S" c
| `Command_first c -> Fmt.pf ppf "`Command_first %S" c
| `Command_cont c -> Fmt.pf ppf "`Command_cont %S" c
| `Command_last c -> Fmt.pf ppf "`Command_last %S" c
let dump ppf { command; output; exit_code } =
Fmt.pf ppf "{@[command: %a;@ output: %a;@ exit_code: %d]}"
Fmt.Dump.(list string)
command
(Fmt.Dump.list Output.dump)
output exit_code
let rec pp_vertical_pad ppf = function
| 0 -> ()
| n ->
Fmt.pf ppf "\n";
pp_vertical_pad ppf (Int.pred n)
let pp_command ?(pad = 0) ppf (t : t) =
match t.command with
| [] -> ()
| l ->
let sep ppf () = Fmt.pf ppf "\\\n%a> " Pp.pp_pad pad in
Fmt.pf ppf "%a$ %a" Pp.pp_pad pad Fmt.(list ~sep string) l
let pp_exit_code ?(pad = 0) ppf = function
| 0 -> ()
| n -> Fmt.pf ppf "\n%a[%d]" Pp.pp_pad pad n
let pp ?pad ppf (t : t) =
pp_command ?pad ppf t;
Fmt.string ppf "\n";
Pp.pp_lines (Output.pp ?pad) ppf t.output;
pp_exit_code ?pad ppf t.exit_code
let hpad_of_lines = function
| [] -> 0
| h :: _ ->
let i = ref 0 in
while !i < String.length h && h.[!i] = ' ' do
incr i
done;
!i
let unpad_line ~hpad line =
match Util.String.all_blank line with
| true -> String.with_index_range line ~first:hpad
| false -> (
match String.length line < hpad with
| true -> Fmt.failwith "invalid padding: %S" line
| false -> String.with_index_range line ~first:hpad)
let unpad hpad = List.map (unpad_line ~hpad)
let dump_cram_tests ppf { start_pad; hpad; tests; end_pad } =
Fmt.pf ppf "{@[start_pad: %d;@ hpad: %d;@ tests: %a;@ end_pad: %a]}" start_pad
hpad
Fmt.Dump.(list dump)
tests
Fmt.Dump.(option string)
end_pad
determine the amount of empty lines before the first non - empty line
let start_pad lines =
let pad_lines, code_lines =
Util.List.partition_until (String.equal "") lines
in
make sure there * are * non - empty lines in the first place
match List.length code_lines with
| 0 -> (0, lines)
| _ -> (List.length pad_lines, code_lines)
let rec end_pad = function
| [] -> (None, [])
| [ x; last ] when Util.String.all_blank last -> (Some last, [ x ])
| x :: xs ->
let pad, xs = end_pad xs in
(pad, x :: xs)
type cram_input = {
start_pad : int;
tests : string list;
end_pad : string option;
}
let determine_padding lines =
match List.length lines with
| 0 -> failwith "unable to determine padding, no lines in block"
one line , it does n't have any paddings
| 1 -> { start_pad = 0; tests = lines; end_pad = None }
| _ ->
let start_pad, lines = start_pad lines in
let end_pad, lines = end_pad lines in
let lines =
match List.for_all Util.String.all_blank lines with
| true -> []
| false -> lines
in
{ start_pad; tests = lines; end_pad }
let of_lines t =
let { start_pad; tests; end_pad } = determine_padding t in
let hpad = hpad_of_lines tests in
let lines = unpad hpad tests in
let lexer_input =
lines |> List.map ((Fun.flip String.append) "\n") |> String.concat
in
let lines = Lexer_cram.token (Lexing.from_string lexer_input) in
Log.debug (fun l ->
l "Cram.of_lines (pad=%d) %a" hpad Fmt.(Dump.list dump_line) lines);
let mk command output ~exit:exit_code =
{ command; output = List.rev output; exit_code }
in
let rec command_cont acc = function
| `Command_cont c :: t -> command_cont (c :: acc) t
| `Command_last c :: t -> (List.rev (c :: acc), t)
| _ -> Fmt.failwith "invalid multi-line command"
in
let rec aux command output acc = function
| [] when command = [] -> List.rev acc
| [] -> List.rev (mk command output ~exit:0 :: acc)
| `Exit exit :: t -> aux [] [] (mk command output ~exit :: acc) t
| (`Ellipsis as o) :: t -> aux command (o :: output) acc t
| `Command cmd :: t ->
if command = [] then aux [ cmd ] [] acc t
else aux [ cmd ] [] (mk command output ~exit:0 :: acc) t
| `Command_first cmd :: t ->
let cmd, t = command_cont [ cmd ] t in
aux cmd [] (mk command output ~exit:0 :: acc) t
| (`Output _ as o) :: t -> aux command (o :: output) acc t
| (`Command_last s | `Command_cont s) :: t ->
aux command output acc (`Output s :: t)
in
let hpad, tests =
match lines with
| `Command_first cmd :: t ->
let cmd, t = command_cont [ cmd ] t in
(hpad, aux cmd [] [] t)
| `Command cmd :: t -> (hpad, aux [ cmd ] [] [] t)
| [] -> (0, [])
| `Output line :: _ ->
if String.length line > 0 && line.[0] = '$' then
failwith
"Blocks must start with a command or similar, not with an output \
line. To indicate a line as a command, start it with a dollar \
followed by a space."
else
failwith
"Blocks must start with a command or similar, not with an output \
line. Please, make sure that there's no spare empty line, \
particularly between the output and its input."
| _ -> Fmt.failwith "invalid cram block: %a" Fmt.(Dump.list dump_line) lines
in
{ start_pad; hpad; tests; end_pad }
let exit_code t = t.exit_code
(* -docs.html *)
let use_heredoc (t : t) = String.cut (List.hd t.command) ~sep:"<<" <> None
let command_line t =
if not (use_heredoc t) then String.concat ~sep:" " t.command
else String.concat ~sep:"\n" t.command
| null | https://raw.githubusercontent.com/realworldocaml/mdx/aa5551a8eb0669fa4561aeee55d6f1528661a510/lib/cram.ml | ocaml | -docs.html |
* Copyright ( c ) 2018 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2018 Thomas Gazagnaire <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
let src = Logs.Src.create "ocaml-mdx"
module Log = (val Logs.src_log src : Logs.LOG)
open Astring
type t = { command : string list; output : Output.t list; exit_code : int }
type cram_tests = {
start_pad : int;
hpad : int;
tests : t list;
end_pad : string option;
}
let dump_line ppf = function
| #Output.t as o -> Output.dump ppf o
| `Exit i -> Fmt.pf ppf "`Exit %d" i
| `Command c -> Fmt.pf ppf "`Command %S" c
| `Command_first c -> Fmt.pf ppf "`Command_first %S" c
| `Command_cont c -> Fmt.pf ppf "`Command_cont %S" c
| `Command_last c -> Fmt.pf ppf "`Command_last %S" c
let dump ppf { command; output; exit_code } =
Fmt.pf ppf "{@[command: %a;@ output: %a;@ exit_code: %d]}"
Fmt.Dump.(list string)
command
(Fmt.Dump.list Output.dump)
output exit_code
let rec pp_vertical_pad ppf = function
| 0 -> ()
| n ->
Fmt.pf ppf "\n";
pp_vertical_pad ppf (Int.pred n)
let pp_command ?(pad = 0) ppf (t : t) =
match t.command with
| [] -> ()
| l ->
let sep ppf () = Fmt.pf ppf "\\\n%a> " Pp.pp_pad pad in
Fmt.pf ppf "%a$ %a" Pp.pp_pad pad Fmt.(list ~sep string) l
let pp_exit_code ?(pad = 0) ppf = function
| 0 -> ()
| n -> Fmt.pf ppf "\n%a[%d]" Pp.pp_pad pad n
let pp ?pad ppf (t : t) =
pp_command ?pad ppf t;
Fmt.string ppf "\n";
Pp.pp_lines (Output.pp ?pad) ppf t.output;
pp_exit_code ?pad ppf t.exit_code
let hpad_of_lines = function
| [] -> 0
| h :: _ ->
let i = ref 0 in
while !i < String.length h && h.[!i] = ' ' do
incr i
done;
!i
let unpad_line ~hpad line =
match Util.String.all_blank line with
| true -> String.with_index_range line ~first:hpad
| false -> (
match String.length line < hpad with
| true -> Fmt.failwith "invalid padding: %S" line
| false -> String.with_index_range line ~first:hpad)
let unpad hpad = List.map (unpad_line ~hpad)
let dump_cram_tests ppf { start_pad; hpad; tests; end_pad } =
Fmt.pf ppf "{@[start_pad: %d;@ hpad: %d;@ tests: %a;@ end_pad: %a]}" start_pad
hpad
Fmt.Dump.(list dump)
tests
Fmt.Dump.(option string)
end_pad
determine the amount of empty lines before the first non - empty line
let start_pad lines =
let pad_lines, code_lines =
Util.List.partition_until (String.equal "") lines
in
make sure there * are * non - empty lines in the first place
match List.length code_lines with
| 0 -> (0, lines)
| _ -> (List.length pad_lines, code_lines)
let rec end_pad = function
| [] -> (None, [])
| [ x; last ] when Util.String.all_blank last -> (Some last, [ x ])
| x :: xs ->
let pad, xs = end_pad xs in
(pad, x :: xs)
type cram_input = {
start_pad : int;
tests : string list;
end_pad : string option;
}
let determine_padding lines =
match List.length lines with
| 0 -> failwith "unable to determine padding, no lines in block"
one line , it does n't have any paddings
| 1 -> { start_pad = 0; tests = lines; end_pad = None }
| _ ->
let start_pad, lines = start_pad lines in
let end_pad, lines = end_pad lines in
let lines =
match List.for_all Util.String.all_blank lines with
| true -> []
| false -> lines
in
{ start_pad; tests = lines; end_pad }
let of_lines t =
let { start_pad; tests; end_pad } = determine_padding t in
let hpad = hpad_of_lines tests in
let lines = unpad hpad tests in
let lexer_input =
lines |> List.map ((Fun.flip String.append) "\n") |> String.concat
in
let lines = Lexer_cram.token (Lexing.from_string lexer_input) in
Log.debug (fun l ->
l "Cram.of_lines (pad=%d) %a" hpad Fmt.(Dump.list dump_line) lines);
let mk command output ~exit:exit_code =
{ command; output = List.rev output; exit_code }
in
let rec command_cont acc = function
| `Command_cont c :: t -> command_cont (c :: acc) t
| `Command_last c :: t -> (List.rev (c :: acc), t)
| _ -> Fmt.failwith "invalid multi-line command"
in
let rec aux command output acc = function
| [] when command = [] -> List.rev acc
| [] -> List.rev (mk command output ~exit:0 :: acc)
| `Exit exit :: t -> aux [] [] (mk command output ~exit :: acc) t
| (`Ellipsis as o) :: t -> aux command (o :: output) acc t
| `Command cmd :: t ->
if command = [] then aux [ cmd ] [] acc t
else aux [ cmd ] [] (mk command output ~exit:0 :: acc) t
| `Command_first cmd :: t ->
let cmd, t = command_cont [ cmd ] t in
aux cmd [] (mk command output ~exit:0 :: acc) t
| (`Output _ as o) :: t -> aux command (o :: output) acc t
| (`Command_last s | `Command_cont s) :: t ->
aux command output acc (`Output s :: t)
in
let hpad, tests =
match lines with
| `Command_first cmd :: t ->
let cmd, t = command_cont [ cmd ] t in
(hpad, aux cmd [] [] t)
| `Command cmd :: t -> (hpad, aux [ cmd ] [] [] t)
| [] -> (0, [])
| `Output line :: _ ->
if String.length line > 0 && line.[0] = '$' then
failwith
"Blocks must start with a command or similar, not with an output \
line. To indicate a line as a command, start it with a dollar \
followed by a space."
else
failwith
"Blocks must start with a command or similar, not with an output \
line. Please, make sure that there's no spare empty line, \
particularly between the output and its input."
| _ -> Fmt.failwith "invalid cram block: %a" Fmt.(Dump.list dump_line) lines
in
{ start_pad; hpad; tests; end_pad }
let exit_code t = t.exit_code
let use_heredoc (t : t) = String.cut (List.hd t.command) ~sep:"<<" <> None
let command_line t =
if not (use_heredoc t) then String.concat ~sep:" " t.command
else String.concat ~sep:"\n" t.command
|
6a70f39d887d01b8da888fecd5204e0f656a5a99c82725535cc9dbf2d6bd2b73 | tqtezos/vesting-contract | Vesting.hs | { - # LANGUAGE DuplicateRecordFields # - }
{ - # LANGUAGE FunctionalDependencies # - }
# LANGUAGE RebindableSyntax #
{ - # LANGUAGE UndecidableSuperClasses # - }
{-# OPTIONS -Wno-missing-export-lists #-}
{-# OPTIONS -Wno-orphans #-}
{-# OPTIONS -Wno-unused-do-bind #-}
module Lorentz.Contracts.Vesting where
import Data.Bool
import Data.Functor
import Control.Monad (Monad((>>=)))
import Data.Eq
import Data.Function
import Data.Maybe
import GHC.Enum
import System.IO
import Text.Show
import Lorentz
import Lorentz.Entrypoints ()
import Tezos.Core
import qualified Data.Text.Lazy.IO as TL
import qualified Data.Text.Lazy.IO.Utf8 as Utf8
type TokensPerTick = Natural
type SecondsPerTick = Natural
type VestedTicks = Natural
secondsPerTick must be non - zero
data VestingSchedule = VestingSchedule
{ epoch :: Timestamp
, secondsPerTick :: SecondsPerTick
, tokensPerTick :: TokensPerTick
}
deriving stock Generic
deriving stock Show
deriving anyclass IsoValue
mkVestingSchedule :: SecondsPerTick -> TokensPerTick -> IO VestingSchedule
mkVestingSchedule secondsPerTick' tokensPerTick' = do
(\epoch' -> VestingSchedule epoch' secondsPerTick' tokensPerTick') <$> getCurrentTime
secondsPerTick must be non - zero
assertValidSchedule :: VestingSchedule -> VestingSchedule
assertValidSchedule xs@(VestingSchedule{..}) =
bool
(error "secondsPerTick must be non-zero")
xs
(secondsPerTick /= 0)
secondsPerTick must be non - zero
unVestingSchedule :: VestingSchedule & s :-> (Timestamp, (SecondsPerTick, TokensPerTick)) & s
unVestingSchedule = forcedCoerce_
secondsSinceEpoch :: Timestamp & s :-> Integer & s
secondsSinceEpoch = do
now
sub
ticksSinceEpoch :: forall s t. KnownValue t
=> VestingSchedule & t & s :-> Integer & t & s
ticksSinceEpoch = do
unVestingSchedule
dup
car
secondsSinceEpoch
dip $ do
cdr
car
ediv
if IsSome
then car
else failWith
-- | The number of `ticksSinceEpoch` that have not been vested
unvestedTicks :: VestingSchedule & VestedTicks & s :-> Maybe Natural & s
unvestedTicks = do
ticksSinceEpoch
sub
isNat
-- | Given a number of ticks to vest, assert that it's at most
` unvestedTicks ` and return the updated number of ` VestedTicks `
assertVestTicks :: Natural & VestingSchedule & VestedTicks & s :-> VestedTicks & s
assertVestTicks = do
dip $ do
dip dup
unvestedTicks
swap
ifNone
failWith
(do
-- unvested & to_vest
dip dup
unvested & to_vest & to_vest
swap
-- to_vest & unvested & to_vest
if IsLe -- to_vest <= unvested
then add
else failWith
)
vestTokens :: Natural & VestingSchedule & s :-> Natural & s
vestTokens = do
dip $ do
unVestingSchedule
cdr
cdr
mul
data Storage st = Storage
{ wrapped :: st
, vested :: VestedTicks
, schedule :: VestingSchedule
}
deriving stock Generic
deriving stock instance (Show st) => Show (Storage st)
deriving anyclass instance (IsoValue st) => IsoValue (Storage st)
unStorage :: Storage st & s :-> (st, (VestedTicks, VestingSchedule)) & s
unStorage = forcedCoerce_
toStorage :: (st, (VestedTicks, VestingSchedule)) & s :-> Storage st & s
toStorage = forcedCoerce_
KnownValue st
=> (forall s. Natural & st & s :-> [Operation] & s)
-> ContractCode Natural (Storage st)
vestingContract vest = do
dup
car
dip $ do
cdr
unStorage
dup
dip $ do
car
cdr
dup
dip cdr
dup
cdr
dip car
dup
dip $ do
assertVestTicks
dip dup
swap
dip $ do
pair
swap
dup
vestTokens
vest
dip $ do
pair
toStorage
pair
data TezParameter
= SetDelegate (Maybe KeyHash)
| Vest Natural
deriving stock Generic
deriving stock Show
deriving anyclass IsoValue
data TezStorage = TezStorage
{ target :: Address
, delegateAdmin :: Address
}
deriving stock Generic
deriving stock Show
deriving anyclass IsoValue
unTezStorage :: TezStorage & s :-> (Address, Address) & s
unTezStorage = forcedCoerce_
vestingTezContract :: ContractCode TezParameter (Storage TezStorage)
vestingTezContract = do
unpair
caseT @TezParameter
( #cSetDelegate /-> do
swap
dup
dip $ do
unStorage
car
unTezStorage
cdr -- delegateAdmin
sender
if IsEq
then do
dip nil
setDelegate
cons
else failWith
swap
pair
, #cVest /-> do
pair
vestingContract $ do
swap
unTezStorage
car -- target
contract @()
if IsNone
then failWith
else do
swap
push $ toEnum @Mutez 1
mul
unit
transferTokens
dip nil
cons
)
instance HasAnnotation VestingSchedule where
instance HasAnnotation TezStorage where
instance HasAnnotation (Storage TezStorage) where
instance ParameterHasEntrypoints TezParameter where
type ParameterEntrypointsDerivation TezParameter = EpdPlain
-- | Print `permitAdmin42Contract`
--
-- @
-- printVestingTezContract (Just "contracts/vesting_tez.tz") False
-- @
printVestingTezContract :: Maybe FilePath -> Bool -> IO ()
printVestingTezContract mOutput forceOneLine' =
maybe TL.putStrLn Utf8.writeFile mOutput $
printLorentzContract forceOneLine' $
(defaultContract vestingTezContract)
{ cDisableInitialCast = True }
initialVestedTicks :: VestedTicks
initialVestedTicks = 0
-- Tezos.Crypto.Orphans> A.printInitPermitAdmin42 (read "tz1bDCu64RmcpWahdn9bWrDMi6cu7mXZynHm")
-- Pair { } (Pair 0 "tz1bDCu64RmcpWahdn9bWrDMi6cu7mXZynHm")
printInitVestingTezContract :: Address -> Address -> SecondsPerTick -> TokensPerTick -> IO ()
printInitVestingTezContract target adminAddr secondsPerTick' tokensPerTick' =
mkVestingSchedule secondsPerTick' tokensPerTick' >>= (\schedule' ->
TL.putStrLn $
printLorentzValue @(Storage TezStorage) forceOneLine $
Storage
(TezStorage target adminAddr)
initialVestedTicks
(assertValidSchedule schedule')
)
where
forceOneLine = True
| null | https://raw.githubusercontent.com/tqtezos/vesting-contract/bc72106631fa82bef71723dd4d0bb4cf1147cdc4/src/Lorentz/Contracts/Vesting.hs | haskell | # OPTIONS -Wno-missing-export-lists #
# OPTIONS -Wno-orphans #
# OPTIONS -Wno-unused-do-bind #
| The number of `ticksSinceEpoch` that have not been vested
| Given a number of ticks to vest, assert that it's at most
unvested & to_vest
to_vest & unvested & to_vest
to_vest <= unvested
delegateAdmin
target
| Print `permitAdmin42Contract`
@
printVestingTezContract (Just "contracts/vesting_tez.tz") False
@
Tezos.Crypto.Orphans> A.printInitPermitAdmin42 (read "tz1bDCu64RmcpWahdn9bWrDMi6cu7mXZynHm")
Pair { } (Pair 0 "tz1bDCu64RmcpWahdn9bWrDMi6cu7mXZynHm") | { - # LANGUAGE DuplicateRecordFields # - }
{ - # LANGUAGE FunctionalDependencies # - }
# LANGUAGE RebindableSyntax #
{ - # LANGUAGE UndecidableSuperClasses # - }
module Lorentz.Contracts.Vesting where
import Data.Bool
import Data.Functor
import Control.Monad (Monad((>>=)))
import Data.Eq
import Data.Function
import Data.Maybe
import GHC.Enum
import System.IO
import Text.Show
import Lorentz
import Lorentz.Entrypoints ()
import Tezos.Core
import qualified Data.Text.Lazy.IO as TL
import qualified Data.Text.Lazy.IO.Utf8 as Utf8
type TokensPerTick = Natural
type SecondsPerTick = Natural
type VestedTicks = Natural
secondsPerTick must be non - zero
data VestingSchedule = VestingSchedule
{ epoch :: Timestamp
, secondsPerTick :: SecondsPerTick
, tokensPerTick :: TokensPerTick
}
deriving stock Generic
deriving stock Show
deriving anyclass IsoValue
mkVestingSchedule :: SecondsPerTick -> TokensPerTick -> IO VestingSchedule
mkVestingSchedule secondsPerTick' tokensPerTick' = do
(\epoch' -> VestingSchedule epoch' secondsPerTick' tokensPerTick') <$> getCurrentTime
secondsPerTick must be non - zero
assertValidSchedule :: VestingSchedule -> VestingSchedule
assertValidSchedule xs@(VestingSchedule{..}) =
bool
(error "secondsPerTick must be non-zero")
xs
(secondsPerTick /= 0)
secondsPerTick must be non - zero
unVestingSchedule :: VestingSchedule & s :-> (Timestamp, (SecondsPerTick, TokensPerTick)) & s
unVestingSchedule = forcedCoerce_
secondsSinceEpoch :: Timestamp & s :-> Integer & s
secondsSinceEpoch = do
now
sub
ticksSinceEpoch :: forall s t. KnownValue t
=> VestingSchedule & t & s :-> Integer & t & s
ticksSinceEpoch = do
unVestingSchedule
dup
car
secondsSinceEpoch
dip $ do
cdr
car
ediv
if IsSome
then car
else failWith
unvestedTicks :: VestingSchedule & VestedTicks & s :-> Maybe Natural & s
unvestedTicks = do
ticksSinceEpoch
sub
isNat
` unvestedTicks ` and return the updated number of ` VestedTicks `
assertVestTicks :: Natural & VestingSchedule & VestedTicks & s :-> VestedTicks & s
assertVestTicks = do
dip $ do
dip dup
unvestedTicks
swap
ifNone
failWith
(do
dip dup
unvested & to_vest & to_vest
swap
then add
else failWith
)
vestTokens :: Natural & VestingSchedule & s :-> Natural & s
vestTokens = do
dip $ do
unVestingSchedule
cdr
cdr
mul
data Storage st = Storage
{ wrapped :: st
, vested :: VestedTicks
, schedule :: VestingSchedule
}
deriving stock Generic
deriving stock instance (Show st) => Show (Storage st)
deriving anyclass instance (IsoValue st) => IsoValue (Storage st)
unStorage :: Storage st & s :-> (st, (VestedTicks, VestingSchedule)) & s
unStorage = forcedCoerce_
toStorage :: (st, (VestedTicks, VestingSchedule)) & s :-> Storage st & s
toStorage = forcedCoerce_
KnownValue st
=> (forall s. Natural & st & s :-> [Operation] & s)
-> ContractCode Natural (Storage st)
vestingContract vest = do
dup
car
dip $ do
cdr
unStorage
dup
dip $ do
car
cdr
dup
dip cdr
dup
cdr
dip car
dup
dip $ do
assertVestTicks
dip dup
swap
dip $ do
pair
swap
dup
vestTokens
vest
dip $ do
pair
toStorage
pair
data TezParameter
= SetDelegate (Maybe KeyHash)
| Vest Natural
deriving stock Generic
deriving stock Show
deriving anyclass IsoValue
data TezStorage = TezStorage
{ target :: Address
, delegateAdmin :: Address
}
deriving stock Generic
deriving stock Show
deriving anyclass IsoValue
unTezStorage :: TezStorage & s :-> (Address, Address) & s
unTezStorage = forcedCoerce_
vestingTezContract :: ContractCode TezParameter (Storage TezStorage)
vestingTezContract = do
unpair
caseT @TezParameter
( #cSetDelegate /-> do
swap
dup
dip $ do
unStorage
car
unTezStorage
sender
if IsEq
then do
dip nil
setDelegate
cons
else failWith
swap
pair
, #cVest /-> do
pair
vestingContract $ do
swap
unTezStorage
contract @()
if IsNone
then failWith
else do
swap
push $ toEnum @Mutez 1
mul
unit
transferTokens
dip nil
cons
)
instance HasAnnotation VestingSchedule where
instance HasAnnotation TezStorage where
instance HasAnnotation (Storage TezStorage) where
instance ParameterHasEntrypoints TezParameter where
type ParameterEntrypointsDerivation TezParameter = EpdPlain
printVestingTezContract :: Maybe FilePath -> Bool -> IO ()
printVestingTezContract mOutput forceOneLine' =
maybe TL.putStrLn Utf8.writeFile mOutput $
printLorentzContract forceOneLine' $
(defaultContract vestingTezContract)
{ cDisableInitialCast = True }
initialVestedTicks :: VestedTicks
initialVestedTicks = 0
printInitVestingTezContract :: Address -> Address -> SecondsPerTick -> TokensPerTick -> IO ()
printInitVestingTezContract target adminAddr secondsPerTick' tokensPerTick' =
mkVestingSchedule secondsPerTick' tokensPerTick' >>= (\schedule' ->
TL.putStrLn $
printLorentzValue @(Storage TezStorage) forceOneLine $
Storage
(TezStorage target adminAddr)
initialVestedTicks
(assertValidSchedule schedule')
)
where
forceOneLine = True
|
bbfa48a9c13d8e4c74d218b8a9a78255e813462df1ea216f4a30d08ba654f276 | coccinelle/coccinelle | nativeint.mli | val zero : nativeint
val one : nativeint
val minus_one : nativeint
external neg : nativeint -> nativeint = "%nativeint_neg"
external add : nativeint -> nativeint -> nativeint = "%nativeint_add"
external sub : nativeint -> nativeint -> nativeint = "%nativeint_sub"
external mul : nativeint -> nativeint -> nativeint = "%nativeint_mul"
external div : nativeint -> nativeint -> nativeint = "%nativeint_div"
val unsigned_div : nativeint -> nativeint -> nativeint
external rem : nativeint -> nativeint -> nativeint = "%nativeint_mod"
val unsigned_rem : nativeint -> nativeint -> nativeint
val succ : nativeint -> nativeint
val pred : nativeint -> nativeint
val abs : nativeint -> nativeint
val size : int
val max_int : nativeint
val min_int : nativeint
external logand : nativeint -> nativeint -> nativeint = "%nativeint_and"
external logor : nativeint -> nativeint -> nativeint = "%nativeint_or"
external logxor : nativeint -> nativeint -> nativeint = "%nativeint_xor"
val lognot : nativeint -> nativeint
external shift_left : nativeint -> int -> nativeint = "%nativeint_lsl"
external shift_right : nativeint -> int -> nativeint = "%nativeint_asr"
external shift_right_logical :
nativeint -> int -> nativeint = "%nativeint_lsr"
external of_int : int -> nativeint = "%nativeint_of_int"
external to_int : nativeint -> int = "%nativeint_to_int"
val unsigned_to_int : nativeint -> int option
external of_float :
float -> nativeint = "caml_nativeint_of_float"
"caml_nativeint_of_float_unboxed"[@@unboxed ][@@noalloc ]
external to_float :
nativeint -> float = "caml_nativeint_to_float"
"caml_nativeint_to_float_unboxed"[@@unboxed ][@@noalloc ]
external of_int32 : int32 -> nativeint = "%nativeint_of_int32"
external to_int32 : nativeint -> int32 = "%nativeint_to_int32"
external of_string : string -> nativeint = "caml_nativeint_of_string"
val of_string_opt : string -> nativeint option
val to_string : nativeint -> string
type t = nativeint
val compare : t -> t -> int
val unsigned_compare : t -> t -> int
val equal : t -> t -> bool
val min : t -> t -> t
val max : t -> t -> t
external format : string -> nativeint -> string = "caml_nativeint_format"
| null | https://raw.githubusercontent.com/coccinelle/coccinelle/5448bb2bd03491ffec356bf7bd6ddcdbf4d36bc9/bundles/stdcompat/stdcompat-current/interfaces/4.13/nativeint.mli | ocaml | val zero : nativeint
val one : nativeint
val minus_one : nativeint
external neg : nativeint -> nativeint = "%nativeint_neg"
external add : nativeint -> nativeint -> nativeint = "%nativeint_add"
external sub : nativeint -> nativeint -> nativeint = "%nativeint_sub"
external mul : nativeint -> nativeint -> nativeint = "%nativeint_mul"
external div : nativeint -> nativeint -> nativeint = "%nativeint_div"
val unsigned_div : nativeint -> nativeint -> nativeint
external rem : nativeint -> nativeint -> nativeint = "%nativeint_mod"
val unsigned_rem : nativeint -> nativeint -> nativeint
val succ : nativeint -> nativeint
val pred : nativeint -> nativeint
val abs : nativeint -> nativeint
val size : int
val max_int : nativeint
val min_int : nativeint
external logand : nativeint -> nativeint -> nativeint = "%nativeint_and"
external logor : nativeint -> nativeint -> nativeint = "%nativeint_or"
external logxor : nativeint -> nativeint -> nativeint = "%nativeint_xor"
val lognot : nativeint -> nativeint
external shift_left : nativeint -> int -> nativeint = "%nativeint_lsl"
external shift_right : nativeint -> int -> nativeint = "%nativeint_asr"
external shift_right_logical :
nativeint -> int -> nativeint = "%nativeint_lsr"
external of_int : int -> nativeint = "%nativeint_of_int"
external to_int : nativeint -> int = "%nativeint_to_int"
val unsigned_to_int : nativeint -> int option
external of_float :
float -> nativeint = "caml_nativeint_of_float"
"caml_nativeint_of_float_unboxed"[@@unboxed ][@@noalloc ]
external to_float :
nativeint -> float = "caml_nativeint_to_float"
"caml_nativeint_to_float_unboxed"[@@unboxed ][@@noalloc ]
external of_int32 : int32 -> nativeint = "%nativeint_of_int32"
external to_int32 : nativeint -> int32 = "%nativeint_to_int32"
external of_string : string -> nativeint = "caml_nativeint_of_string"
val of_string_opt : string -> nativeint option
val to_string : nativeint -> string
type t = nativeint
val compare : t -> t -> int
val unsigned_compare : t -> t -> int
val equal : t -> t -> bool
val min : t -> t -> t
val max : t -> t -> t
external format : string -> nativeint -> string = "caml_nativeint_format"
| |
5f3ba25f13a7dcab155c18190ee9dc6a64a3e471cf33becf404d6c0fd084c8c6 | vult-dsp/vult | passes.ml |
The MIT License ( MIT )
Copyright ( c ) 2014 ,
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE .
The MIT License (MIT)
Copyright (c) 2014 Leonardo Laguna Ruiz, Carl Jönsson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*)
(** Transformations and optimizations of the syntax tree *)
open Prog
open Env
open Maps
open PassCommon
module CreateTupleTypes = struct
type 'a dependencies = ('a * 'a list) list
let getSubTuples (t : Typ.t) : Typ.t list = Typ.getSubTypes t |> List.filter Typ.isTuple
let makeTypeDeclaration (t : Typ.t) : stmt =
match !t with
| Typ.TComposed ([ "tuple" ], types, _) ->
let elems = List.mapi (fun i a -> [ "field_" ^ string_of_int i ], a, emptyAttr) types in
StmtType (t, elems, emptyAttr)
| _ -> failwith "CreateTupleTypes.makeTypeDeclaration: there should be only tuples here"
;;
let rec getDeclarations dependencies visited remaining : Typ.t dependencies =
match remaining with
| [] -> Hashtbl.fold (fun a b acc -> (a, b) :: acc) dependencies []
| h :: t when TypeSet.mem h visited -> getDeclarations dependencies visited t
| h :: t ->
let sub = getSubTuples h in
let visited' = TypeSet.add h visited in
let () = Hashtbl.add dependencies h sub in
getDeclarations dependencies visited' (sub @ t)
;;
let rec checkCircularDepedencies components =
match components with
| [] -> ()
| [ _ ] :: t -> checkCircularDepedencies t
| types :: _ ->
let types_str = List.map PrintProg.typeStr types |> String.concat ", " in
let msg = "The following tuple types have circular dependencies: " ^ types_str in
Error.raiseErrorMsg msg
;;
let run state =
let data = Env.get state in
let tuples = TypeSet.elements (PassData.getTuples data) |> List.map Typ.unlink in
let dependencies = getDeclarations (Hashtbl.create 8) TypeSet.empty tuples in
let components = Components.components dependencies in
let sorted = List.map List.hd components in
let decl = List.map makeTypeDeclaration sorted in
decl
;;
end
(* Basic transformations *)
let inferPass (name : Id.t) (state, stmts) =
let state' = Env.enter Scope.Module state name emptyAttr in
let stmts, state', _ = Inference.inferStmtList state' Inference.NoType stmts in
let state' = Env.exit state' in
state', stmts
;;
let foldPassAll pass state (results : parser_results list) =
let state, rev =
List.fold_left
(fun (state, acc) result ->
let name = [ moduleName result.file ] in
let state, presult = pass name (state, result.presult) in
state, { result with presult } :: acc)
(state, [])
results
in
state, List.rev rev
;;
let interPass (name : Id.t) (state, stmts) =
let data = Env.get state in
Interpreter.Env.addModule data.PassData.interp_env name;
let env' = Interpreter.Env.enterModule data.PassData.interp_env name in
Interpreter.loadStmts env' stmts;
state, stmts
;;
let rec applyPassLog apply pass pass_name (state, stmts) =
if Mapper.log then print_endline ("Running " ^ pass_name);
if apply then Mapper.map_stmt_list pass state stmts else state, stmts
;;
let applyPass name apply pass pass_name (state, stmts) =
let state' = Env.enter Scope.Module state name emptyAttr in
let state', stmts' = applyPassLog apply pass pass_name (state', stmts) in
let state' = Env.exit state' in
state', stmts'
;;
let inferPassAll = foldPassAll inferPass
let pass1All options =
foldPassAll (fun name (state, stmts) -> applyPass name options.pass1 Pass1.run "pass 1" (state, stmts))
;;
let pass2All options =
foldPassAll (fun name (state, stmts) -> applyPass name options.pass2 Pass2.run "pass 2" (state, stmts))
;;
let pass3All options =
foldPassAll (fun name (state, stmts) -> applyPass name options.pass3 Pass3.run "pass 3" (state, stmts))
;;
let pass4All options =
foldPassAll (fun name (state, stmts) -> applyPass name options.pass4 Pass4.run "pass 4" (state, stmts))
;;
let pass5All options =
foldPassAll (fun name (state, stmts) -> applyPass name options.pass5 Pass5.run "pass 5" (state, stmts))
;;
let interAll = foldPassAll (fun name (state, stmts) -> interPass name (state, stmts))
let rec exhaustPass pass (env, results) =
let env, results = pass env results in
if shouldReapply env then exhaustPass pass (reset env, results) else env, results
;;
let getExtensions (args : Args.args) =
match args with
| { code = LuaCode; template = "vcv-prototype" } -> Some `VCVPrototype
| _ -> None
;;
let passesAll args ?(options = default_options) results =
let extensions = getExtensions args in
let env = Env.empty ~extensions (PassData.empty args) in
(env, results)
|> exhaustPass inferPassAll
|> exhaustPass (pass1All options)
|> exhaustPass interAll
|> exhaustPass (pass2All options)
|> exhaustPass (pass3All options)
|> exhaustPass (pass4All options)
|> exhaustPass (pass5All options)
;;
let applyTransformations args ?(options = default_options) (results : parser_results list) =
let env, stmts_list = passesAll args ~options results in
let data = Env.get env in
let used = data.used_code in
if options.tuples
then (
let tuples = { presult = CreateTupleTypes.run env; file = "" } in
tuples :: stmts_list, used)
else stmts_list, used
;;
let applyTransformationsSingle args ?(options = default_options) (results : parser_results) =
let env, stmts' = passesAll args ~options [ results ] in
let stmts' = List.map (fun a -> a.presult) stmts' |> List.flatten in
let tuples = CreateTupleTypes.run env in
{ results with presult = tuples @ stmts' }
;;
| null | https://raw.githubusercontent.com/vult-dsp/vult/860b2b7a8e891aa729578175a931ce2a9345428b/src/passes/passes.ml | ocaml | * Transformations and optimizations of the syntax tree
Basic transformations |
The MIT License ( MIT )
Copyright ( c ) 2014 ,
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE .
The MIT License (MIT)
Copyright (c) 2014 Leonardo Laguna Ruiz, Carl Jönsson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*)
open Prog
open Env
open Maps
open PassCommon
module CreateTupleTypes = struct
type 'a dependencies = ('a * 'a list) list
let getSubTuples (t : Typ.t) : Typ.t list = Typ.getSubTypes t |> List.filter Typ.isTuple
let makeTypeDeclaration (t : Typ.t) : stmt =
match !t with
| Typ.TComposed ([ "tuple" ], types, _) ->
let elems = List.mapi (fun i a -> [ "field_" ^ string_of_int i ], a, emptyAttr) types in
StmtType (t, elems, emptyAttr)
| _ -> failwith "CreateTupleTypes.makeTypeDeclaration: there should be only tuples here"
;;
let rec getDeclarations dependencies visited remaining : Typ.t dependencies =
match remaining with
| [] -> Hashtbl.fold (fun a b acc -> (a, b) :: acc) dependencies []
| h :: t when TypeSet.mem h visited -> getDeclarations dependencies visited t
| h :: t ->
let sub = getSubTuples h in
let visited' = TypeSet.add h visited in
let () = Hashtbl.add dependencies h sub in
getDeclarations dependencies visited' (sub @ t)
;;
let rec checkCircularDepedencies components =
match components with
| [] -> ()
| [ _ ] :: t -> checkCircularDepedencies t
| types :: _ ->
let types_str = List.map PrintProg.typeStr types |> String.concat ", " in
let msg = "The following tuple types have circular dependencies: " ^ types_str in
Error.raiseErrorMsg msg
;;
let run state =
let data = Env.get state in
let tuples = TypeSet.elements (PassData.getTuples data) |> List.map Typ.unlink in
let dependencies = getDeclarations (Hashtbl.create 8) TypeSet.empty tuples in
let components = Components.components dependencies in
let sorted = List.map List.hd components in
let decl = List.map makeTypeDeclaration sorted in
decl
;;
end
let inferPass (name : Id.t) (state, stmts) =
let state' = Env.enter Scope.Module state name emptyAttr in
let stmts, state', _ = Inference.inferStmtList state' Inference.NoType stmts in
let state' = Env.exit state' in
state', stmts
;;
let foldPassAll pass state (results : parser_results list) =
let state, rev =
List.fold_left
(fun (state, acc) result ->
let name = [ moduleName result.file ] in
let state, presult = pass name (state, result.presult) in
state, { result with presult } :: acc)
(state, [])
results
in
state, List.rev rev
;;
let interPass (name : Id.t) (state, stmts) =
let data = Env.get state in
Interpreter.Env.addModule data.PassData.interp_env name;
let env' = Interpreter.Env.enterModule data.PassData.interp_env name in
Interpreter.loadStmts env' stmts;
state, stmts
;;
let rec applyPassLog apply pass pass_name (state, stmts) =
if Mapper.log then print_endline ("Running " ^ pass_name);
if apply then Mapper.map_stmt_list pass state stmts else state, stmts
;;
let applyPass name apply pass pass_name (state, stmts) =
let state' = Env.enter Scope.Module state name emptyAttr in
let state', stmts' = applyPassLog apply pass pass_name (state', stmts) in
let state' = Env.exit state' in
state', stmts'
;;
let inferPassAll = foldPassAll inferPass
let pass1All options =
foldPassAll (fun name (state, stmts) -> applyPass name options.pass1 Pass1.run "pass 1" (state, stmts))
;;
let pass2All options =
foldPassAll (fun name (state, stmts) -> applyPass name options.pass2 Pass2.run "pass 2" (state, stmts))
;;
let pass3All options =
foldPassAll (fun name (state, stmts) -> applyPass name options.pass3 Pass3.run "pass 3" (state, stmts))
;;
let pass4All options =
foldPassAll (fun name (state, stmts) -> applyPass name options.pass4 Pass4.run "pass 4" (state, stmts))
;;
let pass5All options =
foldPassAll (fun name (state, stmts) -> applyPass name options.pass5 Pass5.run "pass 5" (state, stmts))
;;
let interAll = foldPassAll (fun name (state, stmts) -> interPass name (state, stmts))
let rec exhaustPass pass (env, results) =
let env, results = pass env results in
if shouldReapply env then exhaustPass pass (reset env, results) else env, results
;;
let getExtensions (args : Args.args) =
match args with
| { code = LuaCode; template = "vcv-prototype" } -> Some `VCVPrototype
| _ -> None
;;
let passesAll args ?(options = default_options) results =
let extensions = getExtensions args in
let env = Env.empty ~extensions (PassData.empty args) in
(env, results)
|> exhaustPass inferPassAll
|> exhaustPass (pass1All options)
|> exhaustPass interAll
|> exhaustPass (pass2All options)
|> exhaustPass (pass3All options)
|> exhaustPass (pass4All options)
|> exhaustPass (pass5All options)
;;
let applyTransformations args ?(options = default_options) (results : parser_results list) =
let env, stmts_list = passesAll args ~options results in
let data = Env.get env in
let used = data.used_code in
if options.tuples
then (
let tuples = { presult = CreateTupleTypes.run env; file = "" } in
tuples :: stmts_list, used)
else stmts_list, used
;;
let applyTransformationsSingle args ?(options = default_options) (results : parser_results) =
let env, stmts' = passesAll args ~options [ results ] in
let stmts' = List.map (fun a -> a.presult) stmts' |> List.flatten in
let tuples = CreateTupleTypes.run env in
{ results with presult = tuples @ stmts' }
;;
|
775519c578f4240850b7eb0f13092bfc3d415c28ab14ef093502fe6b2edeca04 | openeuler-mirror/secGear | Genheader.ml |
* Copyright ( c ) Huawei Technologies Co. , Ltd. 2020 . All rights reserved .
* secGear is licensed under the Mulan PSL v2 .
* You can use this software according to the terms and conditions of the Mulan PSL v2 .
* You may obtain a copy of Mulan PSL v2 at :
*
* THIS SOFTWARE IS PROVIDED ON AN " AS IS " BASIS , WITHOUT WARRANTIES OF ANY KIND , EITHER EXPRESS OR
* IMPLIED , INCLUDING BUT NOT LIMITED TO NON - INFRINGEMENT , MERCHANTABILITY OR FIT FOR A PARTICULAR
* PURPOSE .
* See the Mulan PSL v2 for more details .
* Copyright (c) Huawei Technologies Co., Ltd. 2020. All rights reserved.
* secGear is licensed under the Mulan PSL v2.
* You can use this software according to the terms and conditions of the Mulan PSL v2.
* You may obtain a copy of Mulan PSL v2 at:
*
* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
* PURPOSE.
* See the Mulan PSL v2 for more details.
*)
open Intel.Ast
open Intel.CodeGen
open Printf
open Commonfunc
let generate_header_start (name: string) (tag: string) =
let guard =
sprintf "CODEGENER_%s_%s_H" (String.uppercase_ascii name) tag
in
"#ifndef " ^ guard ^ "\n" ^ "#define " ^ guard ^ "\n\n"
let generate_args_include (ufs: untrusted_func list) =
let error_include =
if List.exists (fun f -> f.uf_propagate_errno) ufs then "#include <errno.h>"
else "/* #include <errno.h> - Errno propagation not enabled so not included. */"
in
"#include <stdint.h>\n" ^
"#include <stdlib.h>\n\n" ^
"#include \"enclave.h\"\n" ^
error_include ^ "\n"
let generate_function_id_ex (tf: trusted_func) =
let f = tf.tf_fdecl in
let f_name = f.fname in
if tf.tf_is_switchless then
"fid_sl_async_" ^ f_name
else
"fid_" ^ f_name
let generate_function_id (f: func_decl) =
let f_name = f.fname in
"fid_" ^ f_name
let generate_unrproxy_prototype (fd: func_decl) =
let func_name = fd.fname in
let func_args =
let func_args_list =
List.map (fun f -> gen_parm_str f) fd.plist
in
if List.length fd.plist > 0 then
let func_args_pre = String.concat ",\n " func_args_list in
"(\n " ^ ( match fd.rtype with Void -> "" ^ func_args_pre
| _ -> (get_tystr fd.rtype ^ "* retval,\n " ^ func_args_pre))
else
"(\n " ^ ( match fd.rtype with Void -> ""
| _ -> (get_tystr fd.rtype ^ "* retval"))
in
[
"cc_enclave_result_t " ^ func_name ^ func_args ^")";
]
let generate_rproxy_prototype (fd: func_decl) =
let func_name = fd.fname in
let enclave_decl =
"(\n " ^ (match fd.rtype with Void -> "cc_enclave_t *enclave" | _ -> "cc_enclave_t *enclave,\n " ^ (get_tystr fd.rtype ^ "* retval"))
in
let func_args =
let func_args_list =
List.map (fun f -> gen_parm_str f) fd.plist
in
if List.length fd.plist > 0 then
let func_args_pre = String.concat ",\n " func_args_list in
",\n " ^ func_args_pre
else ""
in
[
"cc_enclave_result_t " ^ func_name ^ enclave_decl ^ func_args ^")";
]
let generate_rproxy_prototype_sl_async (tf: trusted_func) =
if not tf.tf_is_switchless then
[""]
else
let fd = tf.tf_fdecl in
let func_name = fd.fname ^ "_async" in
let enclave_decl =
"(\n " ^ (match fd.rtype with Void -> "cc_enclave_t *enclave,\n int *task_id" | _ -> "cc_enclave_t *enclave,\n int *task_id,\n " ^ (get_tystr fd.rtype ^ " *retval")) in
let func_args =
let func_args_list =
List.map (fun f -> gen_parm_str f) fd.plist
in
if List.length fd.plist > 0 then
let func_args_pre = String.concat ",\n " func_args_list in
",\n " ^ func_args_pre
else
""
in
[
"cc_enclave_result_t " ^ func_name ^ enclave_decl ^ func_args ^")";
]
let generate_parm_str (p: pdecl) =
let (_, declr) = p in
declr.identifier
let get_struct_ele_str (p: pdecl) =
let (pt, decl) = p in
let stype = get_param_atype pt in
get_typed_declr_str stype decl
let get_union_ele_str (m: mdecl) =
let (stype, decl) = m in
get_typed_declr_str stype decl
let generate_struct_def (s: struct_def) =
let struct_name = s.sname in
let struct_body_pre = s.smlist in
let struct_body =
if List.length struct_body_pre > 0 then
let struct_body_list =
List.map (fun f -> sprintf " %s;" (get_struct_ele_str f)) struct_body_pre
in
String.concat "\n" struct_body_list
else ""
in
"typedef struct " ^ struct_name ^ "\n{\n" ^ struct_body ^ "\n} " ^ struct_name ^ ";\n"
let generate_union_def (u: union_def) =
let union_name = u.uname in
let union_body_pre = u.umlist in
let union_body =
if List.length union_body_pre > 0 then
let union_body_list =
List.map (fun f -> sprintf " %s;" (get_union_ele_str f)) union_body_pre
in
String.concat "\n" union_body_list
else ""
in
"typedef union " ^ union_name ^ "\n{\n" ^ union_body ^ "\n} " ^ union_name ^ ";\n"
let generate_enum_def (e: enum_def) =
let get_enum_ele_str (ele: enum_ele) =
let (estr, eval) = ele in
match eval with
EnumValNone -> estr
| EnumVal eeval -> estr ^ "=" ^ (attr_value_to_string eeval)
in
let enum_name = e.enname in
let enum_body_pre = e.enbody in
let enum_body =
if List.length enum_body_pre > 0 then
let enum_body_list =
List.map (fun f -> sprintf "%s" (get_enum_ele_str f)) enum_body_pre
in
String.concat ",\n " enum_body_list
else ""
in
if enum_name = "" then
"enum \n{\n " ^ enum_body ^ "\n};\n"
else "typedef enum " ^ enum_name ^ "\n{\n " ^ enum_body ^ "\n} " ^ enum_name ^ ";\n"
let generate_comp_def (ct: composite_type) =
match ct with
StructDef s -> generate_struct_def s
| UnionDef u -> generate_union_def u
| EnumDef e -> generate_enum_def e
let generate_trust_marshal_struct (tf: trusted_func) =
let fd = tf.tf_fdecl in
let s_name =
sprintf "%s_size_t" fd.fname
in
let struct_start = "typedef struct _" ^ s_name ^ "\n{\n size_t retval_size;\n" in
let struct_body =
let struct_body_list =
List.map (fun f -> " size_t " ^ generate_parm_str f ^"_size;") fd.plist
in
let struct_body_para = String.concat "\n" struct_body_list in
let deep_copy = List.filter is_deep_copy fd.plist in
let pre (_: parameter_type) = "\n " in
let post = "" in
let generator (_ : parameter_type) (_ : parameter_type) (decl : declarator) (mem_decl : declarator) =
sprintf "size_t %s_%s_size;" decl.identifier mem_decl.identifier in
let deep_copy_para =
String.concat "\n "
(List.map (deep_copy_func pre generator post) deep_copy);
in
struct_body_para ^ deep_copy_para;
in
let struct_end =
sprintf "} %s;\n" s_name
in
if struct_body = "" then
struct_start ^ struct_end
else struct_start ^ struct_body ^ "\n" ^ struct_end
let generate_untrust_marshal_struct (uf: untrusted_func) =
let fd = uf.uf_fdecl in
let s_name =
sprintf "%s_size_t" fd.fname
in
let struct_start = "typedef struct _" ^ s_name ^ "\n{\n size_t retval_size;\n" in
let struct_body =
let struct_body_list =
List.map (fun f -> " size_t " ^ generate_parm_str f ^"_size;") fd.plist
in
let struct_body_para = String.concat "\n" struct_body_list in
let deep_copy = List.filter is_deep_copy fd.plist in
let pre (_: parameter_type) = "\n " in
let post = "" in
let generator (_ : parameter_type) (_ : parameter_type) (decl : declarator) (mem_decl : declarator) =
sprintf "size_t %s_%s_size;" decl.identifier mem_decl.identifier in
let deep_copy_para =
String.concat "\n "
(List.map (deep_copy_func pre generator post) deep_copy);
in
struct_body_para ^ deep_copy_para;
in
let struct_end =
sprintf "} %s;\n" s_name
in
if struct_body = "" then
struct_start ^ struct_end
else struct_start ^ struct_body ^ "\n" ^ struct_end
let c_start = "#ifdef __cplusplus\n" ^ "extern \"C\" {\n" ^ "#endif\n"
let c_end = "\n#ifdef __cplusplus\n" ^ "}\n" ^ "#endif\n"
let generate_args_header (ec: enclave_content) =
let hfile_start =
generate_header_start ec.file_shortnm "ARGS"
in
let hfile_end = "#endif\n" in
let hfile_include =
generate_args_include ec.ufunc_decls
in
let def_include_com = "/**** User includes. ****/\n" in
let def_include_list = ec.include_list in
let def_include =
if List.length def_include_list > 0 then
let def_include_pre =
List.map (fun f -> "#include \"" ^ f ^ "\"") def_include_list
in
String.concat "\n" def_include_pre
else "/* There were no user defined types. */"
in
let def_types_com = "/**** User defined types in EDL. ****/\n" in
let def_types_list = ec.comp_defs in
let def_types =
if List.length def_types_list > 0 then
let def_types_pre =
List.map generate_comp_def def_types_list
in
String.concat "\n" def_types_pre
else "/* There were no user defined types. */\n"
in
let trust_fstruct_com = "/**** Trusted function marshalling structs. ****/\n" in
let untrust_fstruct_com = "/**** Untrusted function marshalling structs. ****/\n" in
let trust_fstruct =
let trust_fstruct_pre =
List.map generate_trust_marshal_struct ec.tfunc_decls
in
String.concat "\n" trust_fstruct_pre
in
let untrust_fstruct =
let untrust_fstruct_pre =
List.map generate_untrust_marshal_struct ec.ufunc_decls
in
String.concat "\n" untrust_fstruct_pre
in
let tfunc_decls = List.filter is_not_switchless_function ec.tfunc_decls in
let sl_tfunc_decls = List.filter is_switchless_function ec.tfunc_decls in
let trust_fid_com = "/**** Trusted function IDs ****/\n" in
let sl_trust_fid_com = "\n/**** Trusted switchless function IDs ****/\n" in
let untrust_fid_com = "/**** Untrusted function IDs ****/\n" in
let trust_fid_body =
let trust_fid_pre =
List.mapi
(fun i f -> sprintf " %s = %d," (generate_function_id_ex f) (i + 2)) ec.tfunc_decls
in
String.concat "\n" trust_fid_pre
in
let sl_trust_fid_body =
let sl_trust_fid_pre =
List.mapi
(fun i f -> sprintf " %s = %d," (generate_function_id f.tf_fdecl) i) sl_tfunc_decls
in
String.concat "\n" sl_trust_fid_pre
in
let sl_trust_fid_max = " fid_trusted_switchless_call_id_max = SECGEAR_ENUM_MAX\n" in
let untrust_fid_body =
let untrust_fid_pre =
List.mapi
(fun i f -> sprintf " %s = %d," (generate_function_id f.uf_fdecl) i) ec.ufunc_decls
in
String.concat "\n" untrust_fid_pre
in
let untrust_fid_max =
" fid_untrusted_call_id_max = SECGEAR_ENUM_MAX\n"
in
let trust_fid_max =
" fid_trusted_call_id_max = SECGEAR_ENUM_MAX\n"
in
let trust_fid = "enum\n{\n" ^ trust_fid_body ^ "\n" ^ trust_fid_max ^ "};" in
let sl_trust_fid = "enum\n{\n" ^ sl_trust_fid_body ^ "\n" ^ sl_trust_fid_max ^ "};" in
let untrust_fid = "enum\n{\n" ^ untrust_fid_body ^ "\n" ^ untrust_fid_max ^ "};" in
[
hfile_start ^ hfile_include;
def_include_com ^ def_include;
c_start;
def_types_com ^ def_types;
trust_fstruct_com ^ trust_fstruct;
untrust_fstruct_com ^ untrust_fstruct;
trust_fid_com ^ trust_fid;
sl_trust_fid_com ^ sl_trust_fid;
untrust_fid_com ^ untrust_fid;
c_end;
hfile_end;
]
let generate_trusted_header (ec: enclave_content) =
let hfile_start =
generate_header_start ec.file_shortnm "T"
in
let hfile_end = "#endif\n" in
let hfile_include =
sprintf "#include \"enclave.h\"\n\n#include \"%s_args.h\"\n#include \"status.h\"\n#include \"gp.h\"\n#include \"gp_ocall.h\"\n" ec.file_shortnm
in
let trust_fproto_com = "/**** Trusted function prototypes. ****/\n" in
let untrust_fproto_com = "/**** Untrusted function prototypes. ****/\n" in
let r_proxy_proto =
List.map (fun f -> generate_unrproxy_prototype f.uf_fdecl) ec.ufunc_decls
in
let r_proxy =
String.concat ";\n\n" (List.flatten r_proxy_proto)
in
let trust_func_proto =
List.map gen_func_proto (tf_list_to_fd_list ec.tfunc_decls)
in
let trust_func =
String.concat ";\n\n" trust_func_proto
in
[
hfile_start ^ hfile_include;
c_start;
trust_fproto_com ^ trust_func ^ ";";
if (List.length ec.ufunc_decls <> 0) then untrust_fproto_com ^ r_proxy ^ ";"
else "/**** There is no untrusted function ****/";
c_end;
hfile_end;
]
let generate_untrusted_header (ec: enclave_content) =
let hfile_start =
generate_header_start ec.file_shortnm "U"
in
let hfile_end = "#endif\n" in
let hfile_include =
sprintf "#include \"%s_args.h\"\n#include \"secGear/enclave_internal.h\"\n" ec.file_shortnm
in
let agent_id = "#ifndef TEE_SECE_AGENT_ID\n#define TEE_SECE_AGENT_ID 0x53656345\n#endif\n"
in
let trust_fproto_com = "/**** Trusted function prototypes. ****/\n" in
let untrust_fproto_com = "/**** Untrusted function prototypes. ****/\n" in
let untrust_func_proto =
List.map gen_func_proto (uf_list_to_fd_list ec.ufunc_decls)
in
let untrust_func =
String.concat ";\n\n" untrust_func_proto
in
let r_proxy_proto =
List.map (fun f -> generate_rproxy_prototype f.tf_fdecl) ec.tfunc_decls
in
let r_proxy_proto_sl_async =
List.map (fun f -> generate_rproxy_prototype_sl_async f) ec.tfunc_decls
in
let r_proxy =
String.concat ";\n\n" (List.flatten r_proxy_proto)
in
let r_proxy_sl_async =
String.concat ";\n\n" (List.flatten r_proxy_proto_sl_async)
in
[
hfile_start ^ hfile_include;
c_start;
agent_id;
trust_fproto_com ^ r_proxy ^ r_proxy_sl_async ^ ";";
if (List.length ec.ufunc_decls <> 0) then untrust_fproto_com ^ untrust_func ^ ";"
else "/**** There is no untrusted function ****/";
c_end;
hfile_end;
]
| null | https://raw.githubusercontent.com/openeuler-mirror/secGear/a2dd0e8ac1f8117c78ef9e3abfa8ff48f3c4420d/tools/codegener/Genheader.ml | ocaml |
* Copyright ( c ) Huawei Technologies Co. , Ltd. 2020 . All rights reserved .
* secGear is licensed under the Mulan PSL v2 .
* You can use this software according to the terms and conditions of the Mulan PSL v2 .
* You may obtain a copy of Mulan PSL v2 at :
*
* THIS SOFTWARE IS PROVIDED ON AN " AS IS " BASIS , WITHOUT WARRANTIES OF ANY KIND , EITHER EXPRESS OR
* IMPLIED , INCLUDING BUT NOT LIMITED TO NON - INFRINGEMENT , MERCHANTABILITY OR FIT FOR A PARTICULAR
* PURPOSE .
* See the Mulan PSL v2 for more details .
* Copyright (c) Huawei Technologies Co., Ltd. 2020. All rights reserved.
* secGear is licensed under the Mulan PSL v2.
* You can use this software according to the terms and conditions of the Mulan PSL v2.
* You may obtain a copy of Mulan PSL v2 at:
*
* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
* PURPOSE.
* See the Mulan PSL v2 for more details.
*)
open Intel.Ast
open Intel.CodeGen
open Printf
open Commonfunc
let generate_header_start (name: string) (tag: string) =
let guard =
sprintf "CODEGENER_%s_%s_H" (String.uppercase_ascii name) tag
in
"#ifndef " ^ guard ^ "\n" ^ "#define " ^ guard ^ "\n\n"
let generate_args_include (ufs: untrusted_func list) =
let error_include =
if List.exists (fun f -> f.uf_propagate_errno) ufs then "#include <errno.h>"
else "/* #include <errno.h> - Errno propagation not enabled so not included. */"
in
"#include <stdint.h>\n" ^
"#include <stdlib.h>\n\n" ^
"#include \"enclave.h\"\n" ^
error_include ^ "\n"
let generate_function_id_ex (tf: trusted_func) =
let f = tf.tf_fdecl in
let f_name = f.fname in
if tf.tf_is_switchless then
"fid_sl_async_" ^ f_name
else
"fid_" ^ f_name
let generate_function_id (f: func_decl) =
let f_name = f.fname in
"fid_" ^ f_name
let generate_unrproxy_prototype (fd: func_decl) =
let func_name = fd.fname in
let func_args =
let func_args_list =
List.map (fun f -> gen_parm_str f) fd.plist
in
if List.length fd.plist > 0 then
let func_args_pre = String.concat ",\n " func_args_list in
"(\n " ^ ( match fd.rtype with Void -> "" ^ func_args_pre
| _ -> (get_tystr fd.rtype ^ "* retval,\n " ^ func_args_pre))
else
"(\n " ^ ( match fd.rtype with Void -> ""
| _ -> (get_tystr fd.rtype ^ "* retval"))
in
[
"cc_enclave_result_t " ^ func_name ^ func_args ^")";
]
let generate_rproxy_prototype (fd: func_decl) =
let func_name = fd.fname in
let enclave_decl =
"(\n " ^ (match fd.rtype with Void -> "cc_enclave_t *enclave" | _ -> "cc_enclave_t *enclave,\n " ^ (get_tystr fd.rtype ^ "* retval"))
in
let func_args =
let func_args_list =
List.map (fun f -> gen_parm_str f) fd.plist
in
if List.length fd.plist > 0 then
let func_args_pre = String.concat ",\n " func_args_list in
",\n " ^ func_args_pre
else ""
in
[
"cc_enclave_result_t " ^ func_name ^ enclave_decl ^ func_args ^")";
]
let generate_rproxy_prototype_sl_async (tf: trusted_func) =
if not tf.tf_is_switchless then
[""]
else
let fd = tf.tf_fdecl in
let func_name = fd.fname ^ "_async" in
let enclave_decl =
"(\n " ^ (match fd.rtype with Void -> "cc_enclave_t *enclave,\n int *task_id" | _ -> "cc_enclave_t *enclave,\n int *task_id,\n " ^ (get_tystr fd.rtype ^ " *retval")) in
let func_args =
let func_args_list =
List.map (fun f -> gen_parm_str f) fd.plist
in
if List.length fd.plist > 0 then
let func_args_pre = String.concat ",\n " func_args_list in
",\n " ^ func_args_pre
else
""
in
[
"cc_enclave_result_t " ^ func_name ^ enclave_decl ^ func_args ^")";
]
let generate_parm_str (p: pdecl) =
let (_, declr) = p in
declr.identifier
let get_struct_ele_str (p: pdecl) =
let (pt, decl) = p in
let stype = get_param_atype pt in
get_typed_declr_str stype decl
let get_union_ele_str (m: mdecl) =
let (stype, decl) = m in
get_typed_declr_str stype decl
let generate_struct_def (s: struct_def) =
let struct_name = s.sname in
let struct_body_pre = s.smlist in
let struct_body =
if List.length struct_body_pre > 0 then
let struct_body_list =
List.map (fun f -> sprintf " %s;" (get_struct_ele_str f)) struct_body_pre
in
String.concat "\n" struct_body_list
else ""
in
"typedef struct " ^ struct_name ^ "\n{\n" ^ struct_body ^ "\n} " ^ struct_name ^ ";\n"
let generate_union_def (u: union_def) =
let union_name = u.uname in
let union_body_pre = u.umlist in
let union_body =
if List.length union_body_pre > 0 then
let union_body_list =
List.map (fun f -> sprintf " %s;" (get_union_ele_str f)) union_body_pre
in
String.concat "\n" union_body_list
else ""
in
"typedef union " ^ union_name ^ "\n{\n" ^ union_body ^ "\n} " ^ union_name ^ ";\n"
let generate_enum_def (e: enum_def) =
let get_enum_ele_str (ele: enum_ele) =
let (estr, eval) = ele in
match eval with
EnumValNone -> estr
| EnumVal eeval -> estr ^ "=" ^ (attr_value_to_string eeval)
in
let enum_name = e.enname in
let enum_body_pre = e.enbody in
let enum_body =
if List.length enum_body_pre > 0 then
let enum_body_list =
List.map (fun f -> sprintf "%s" (get_enum_ele_str f)) enum_body_pre
in
String.concat ",\n " enum_body_list
else ""
in
if enum_name = "" then
"enum \n{\n " ^ enum_body ^ "\n};\n"
else "typedef enum " ^ enum_name ^ "\n{\n " ^ enum_body ^ "\n} " ^ enum_name ^ ";\n"
let generate_comp_def (ct: composite_type) =
match ct with
StructDef s -> generate_struct_def s
| UnionDef u -> generate_union_def u
| EnumDef e -> generate_enum_def e
let generate_trust_marshal_struct (tf: trusted_func) =
let fd = tf.tf_fdecl in
let s_name =
sprintf "%s_size_t" fd.fname
in
let struct_start = "typedef struct _" ^ s_name ^ "\n{\n size_t retval_size;\n" in
let struct_body =
let struct_body_list =
List.map (fun f -> " size_t " ^ generate_parm_str f ^"_size;") fd.plist
in
let struct_body_para = String.concat "\n" struct_body_list in
let deep_copy = List.filter is_deep_copy fd.plist in
let pre (_: parameter_type) = "\n " in
let post = "" in
let generator (_ : parameter_type) (_ : parameter_type) (decl : declarator) (mem_decl : declarator) =
sprintf "size_t %s_%s_size;" decl.identifier mem_decl.identifier in
let deep_copy_para =
String.concat "\n "
(List.map (deep_copy_func pre generator post) deep_copy);
in
struct_body_para ^ deep_copy_para;
in
let struct_end =
sprintf "} %s;\n" s_name
in
if struct_body = "" then
struct_start ^ struct_end
else struct_start ^ struct_body ^ "\n" ^ struct_end
let generate_untrust_marshal_struct (uf: untrusted_func) =
let fd = uf.uf_fdecl in
let s_name =
sprintf "%s_size_t" fd.fname
in
let struct_start = "typedef struct _" ^ s_name ^ "\n{\n size_t retval_size;\n" in
let struct_body =
let struct_body_list =
List.map (fun f -> " size_t " ^ generate_parm_str f ^"_size;") fd.plist
in
let struct_body_para = String.concat "\n" struct_body_list in
let deep_copy = List.filter is_deep_copy fd.plist in
let pre (_: parameter_type) = "\n " in
let post = "" in
let generator (_ : parameter_type) (_ : parameter_type) (decl : declarator) (mem_decl : declarator) =
sprintf "size_t %s_%s_size;" decl.identifier mem_decl.identifier in
let deep_copy_para =
String.concat "\n "
(List.map (deep_copy_func pre generator post) deep_copy);
in
struct_body_para ^ deep_copy_para;
in
let struct_end =
sprintf "} %s;\n" s_name
in
if struct_body = "" then
struct_start ^ struct_end
else struct_start ^ struct_body ^ "\n" ^ struct_end
let c_start = "#ifdef __cplusplus\n" ^ "extern \"C\" {\n" ^ "#endif\n"
let c_end = "\n#ifdef __cplusplus\n" ^ "}\n" ^ "#endif\n"
let generate_args_header (ec: enclave_content) =
let hfile_start =
generate_header_start ec.file_shortnm "ARGS"
in
let hfile_end = "#endif\n" in
let hfile_include =
generate_args_include ec.ufunc_decls
in
let def_include_com = "/**** User includes. ****/\n" in
let def_include_list = ec.include_list in
let def_include =
if List.length def_include_list > 0 then
let def_include_pre =
List.map (fun f -> "#include \"" ^ f ^ "\"") def_include_list
in
String.concat "\n" def_include_pre
else "/* There were no user defined types. */"
in
let def_types_com = "/**** User defined types in EDL. ****/\n" in
let def_types_list = ec.comp_defs in
let def_types =
if List.length def_types_list > 0 then
let def_types_pre =
List.map generate_comp_def def_types_list
in
String.concat "\n" def_types_pre
else "/* There were no user defined types. */\n"
in
let trust_fstruct_com = "/**** Trusted function marshalling structs. ****/\n" in
let untrust_fstruct_com = "/**** Untrusted function marshalling structs. ****/\n" in
let trust_fstruct =
let trust_fstruct_pre =
List.map generate_trust_marshal_struct ec.tfunc_decls
in
String.concat "\n" trust_fstruct_pre
in
let untrust_fstruct =
let untrust_fstruct_pre =
List.map generate_untrust_marshal_struct ec.ufunc_decls
in
String.concat "\n" untrust_fstruct_pre
in
let tfunc_decls = List.filter is_not_switchless_function ec.tfunc_decls in
let sl_tfunc_decls = List.filter is_switchless_function ec.tfunc_decls in
let trust_fid_com = "/**** Trusted function IDs ****/\n" in
let sl_trust_fid_com = "\n/**** Trusted switchless function IDs ****/\n" in
let untrust_fid_com = "/**** Untrusted function IDs ****/\n" in
let trust_fid_body =
let trust_fid_pre =
List.mapi
(fun i f -> sprintf " %s = %d," (generate_function_id_ex f) (i + 2)) ec.tfunc_decls
in
String.concat "\n" trust_fid_pre
in
let sl_trust_fid_body =
let sl_trust_fid_pre =
List.mapi
(fun i f -> sprintf " %s = %d," (generate_function_id f.tf_fdecl) i) sl_tfunc_decls
in
String.concat "\n" sl_trust_fid_pre
in
let sl_trust_fid_max = " fid_trusted_switchless_call_id_max = SECGEAR_ENUM_MAX\n" in
let untrust_fid_body =
let untrust_fid_pre =
List.mapi
(fun i f -> sprintf " %s = %d," (generate_function_id f.uf_fdecl) i) ec.ufunc_decls
in
String.concat "\n" untrust_fid_pre
in
let untrust_fid_max =
" fid_untrusted_call_id_max = SECGEAR_ENUM_MAX\n"
in
let trust_fid_max =
" fid_trusted_call_id_max = SECGEAR_ENUM_MAX\n"
in
let trust_fid = "enum\n{\n" ^ trust_fid_body ^ "\n" ^ trust_fid_max ^ "};" in
let sl_trust_fid = "enum\n{\n" ^ sl_trust_fid_body ^ "\n" ^ sl_trust_fid_max ^ "};" in
let untrust_fid = "enum\n{\n" ^ untrust_fid_body ^ "\n" ^ untrust_fid_max ^ "};" in
[
hfile_start ^ hfile_include;
def_include_com ^ def_include;
c_start;
def_types_com ^ def_types;
trust_fstruct_com ^ trust_fstruct;
untrust_fstruct_com ^ untrust_fstruct;
trust_fid_com ^ trust_fid;
sl_trust_fid_com ^ sl_trust_fid;
untrust_fid_com ^ untrust_fid;
c_end;
hfile_end;
]
let generate_trusted_header (ec: enclave_content) =
let hfile_start =
generate_header_start ec.file_shortnm "T"
in
let hfile_end = "#endif\n" in
let hfile_include =
sprintf "#include \"enclave.h\"\n\n#include \"%s_args.h\"\n#include \"status.h\"\n#include \"gp.h\"\n#include \"gp_ocall.h\"\n" ec.file_shortnm
in
let trust_fproto_com = "/**** Trusted function prototypes. ****/\n" in
let untrust_fproto_com = "/**** Untrusted function prototypes. ****/\n" in
let r_proxy_proto =
List.map (fun f -> generate_unrproxy_prototype f.uf_fdecl) ec.ufunc_decls
in
let r_proxy =
String.concat ";\n\n" (List.flatten r_proxy_proto)
in
let trust_func_proto =
List.map gen_func_proto (tf_list_to_fd_list ec.tfunc_decls)
in
let trust_func =
String.concat ";\n\n" trust_func_proto
in
[
hfile_start ^ hfile_include;
c_start;
trust_fproto_com ^ trust_func ^ ";";
if (List.length ec.ufunc_decls <> 0) then untrust_fproto_com ^ r_proxy ^ ";"
else "/**** There is no untrusted function ****/";
c_end;
hfile_end;
]
let generate_untrusted_header (ec: enclave_content) =
let hfile_start =
generate_header_start ec.file_shortnm "U"
in
let hfile_end = "#endif\n" in
let hfile_include =
sprintf "#include \"%s_args.h\"\n#include \"secGear/enclave_internal.h\"\n" ec.file_shortnm
in
let agent_id = "#ifndef TEE_SECE_AGENT_ID\n#define TEE_SECE_AGENT_ID 0x53656345\n#endif\n"
in
let trust_fproto_com = "/**** Trusted function prototypes. ****/\n" in
let untrust_fproto_com = "/**** Untrusted function prototypes. ****/\n" in
let untrust_func_proto =
List.map gen_func_proto (uf_list_to_fd_list ec.ufunc_decls)
in
let untrust_func =
String.concat ";\n\n" untrust_func_proto
in
let r_proxy_proto =
List.map (fun f -> generate_rproxy_prototype f.tf_fdecl) ec.tfunc_decls
in
let r_proxy_proto_sl_async =
List.map (fun f -> generate_rproxy_prototype_sl_async f) ec.tfunc_decls
in
let r_proxy =
String.concat ";\n\n" (List.flatten r_proxy_proto)
in
let r_proxy_sl_async =
String.concat ";\n\n" (List.flatten r_proxy_proto_sl_async)
in
[
hfile_start ^ hfile_include;
c_start;
agent_id;
trust_fproto_com ^ r_proxy ^ r_proxy_sl_async ^ ";";
if (List.length ec.ufunc_decls <> 0) then untrust_fproto_com ^ untrust_func ^ ";"
else "/**** There is no untrusted function ****/";
c_end;
hfile_end;
]
| |
d57d08f4ff4e53d33c82ec421e5795e1cabd4fe134fb5fdd2734d6e6030227b9 | redbadger/karma-tracker | repositories.cljs | (ns karma-tracker-ui.views.repositories
(:require [re-frame.core :as re-frame]
[karma-tracker-ui.views.contribution-totals :refer [map-totals]]
[karma-tracker-ui.views.bar-chart :as bar-chart]))
(defn repository-bar-segment [type {:keys [count]} scale]
[:span.bar-chart__bar.repository__bar {:class (str "repository__bar--" (name type))
:style (bar-chart/bar-style count scale)}])
(defn repository [repository bar-scale]
[:div.repository
[:div.bar-chart__label
[:a.repository__link {:href (:link repository) :target "_blank"}
[:span.repository__owner (:owner repository)]
[:span.repository__name (:name repository)]]
[:span.bar-chart__value (-> repository :total :count)]]
(into [:div.bar-chart__bar-container]
(map-totals [repository-bar-segment bar-scale] repository))])
(defn repositories []
(let [repository-totals (re-frame/subscribe [:repository-totals 10])]
(fn []
(let [bar-scale (/ 100 (-> @repository-totals first :total :count))]
[:section.repositories
[:h2 "Repositories"]
[:p.explanation "Number of contributions to each repository"]
(into [:div.bar-chart]
(map #(repository % bar-scale) @repository-totals))]))))
| null | https://raw.githubusercontent.com/redbadger/karma-tracker/c5375f32f4cd0386f6bb1560d979b79bceea19e2/ui/src/cljs/karma_tracker_ui/views/repositories.cljs | clojure | (ns karma-tracker-ui.views.repositories
(:require [re-frame.core :as re-frame]
[karma-tracker-ui.views.contribution-totals :refer [map-totals]]
[karma-tracker-ui.views.bar-chart :as bar-chart]))
(defn repository-bar-segment [type {:keys [count]} scale]
[:span.bar-chart__bar.repository__bar {:class (str "repository__bar--" (name type))
:style (bar-chart/bar-style count scale)}])
(defn repository [repository bar-scale]
[:div.repository
[:div.bar-chart__label
[:a.repository__link {:href (:link repository) :target "_blank"}
[:span.repository__owner (:owner repository)]
[:span.repository__name (:name repository)]]
[:span.bar-chart__value (-> repository :total :count)]]
(into [:div.bar-chart__bar-container]
(map-totals [repository-bar-segment bar-scale] repository))])
(defn repositories []
(let [repository-totals (re-frame/subscribe [:repository-totals 10])]
(fn []
(let [bar-scale (/ 100 (-> @repository-totals first :total :count))]
[:section.repositories
[:h2 "Repositories"]
[:p.explanation "Number of contributions to each repository"]
(into [:div.bar-chart]
(map #(repository % bar-scale) @repository-totals))]))))
| |
987372f7f032d96d68b3a66a11f596d5044e0fdac5c134f8fd059d767b4b6aef | Bike/mother | core.lisp | ;;;; core.lisp
;;;; miscellaneous core combiners
(in-package #:mother)
(defun $set! (in-env ptree val)
;; ($define! ptree form) is ($set! (get-current-environment) ptree form)
;; this primitive however does not take a form, it takes an already-evaluated thing!
(labels ((assign (ptree val)
(etypecase ptree
(null (unless (null val) (error "could not match ~:a with ~a" nil args)))
(symbol (setf (flat-lookup in-env ptree) val))
(%ignore)
(cons
(unless (consp val) (error "could not match ~a with ~a" ptree val))
(assign (car ptree) (car val))
(assign (cdr ptree) (cdr val))))))
(assign ptree val)))
| null | https://raw.githubusercontent.com/Bike/mother/be571bcc8ed2a9f6c65a5d73e5e711509499420b/core.lisp | lisp | core.lisp
miscellaneous core combiners
($define! ptree form) is ($set! (get-current-environment) ptree form)
this primitive however does not take a form, it takes an already-evaluated thing! |
(in-package #:mother)
(defun $set! (in-env ptree val)
(labels ((assign (ptree val)
(etypecase ptree
(null (unless (null val) (error "could not match ~:a with ~a" nil args)))
(symbol (setf (flat-lookup in-env ptree) val))
(%ignore)
(cons
(unless (consp val) (error "could not match ~a with ~a" ptree val))
(assign (car ptree) (car val))
(assign (cdr ptree) (cdr val))))))
(assign ptree val)))
|
237e44f5555a9655ec418263105e8176f8b501c31e00819d0b95b104f42ea29d | janestreet/learn-ocaml-workshop | frogger.mli | open Scaffold
module World : sig
type t
end
val create : unit -> World.t
val tick : World.t -> World.t
val handle_input : World.t -> Key.t -> World.t
val handle_event : World.t -> Event.t -> World.t
val draw : World.t -> Display_list.t
val finished : World.t -> bool
| null | https://raw.githubusercontent.com/janestreet/learn-ocaml-workshop/1ba9576b48b48a892644eb20c201c2c4aa643c32/03-frogger/frogger.mli | ocaml | open Scaffold
module World : sig
type t
end
val create : unit -> World.t
val tick : World.t -> World.t
val handle_input : World.t -> Key.t -> World.t
val handle_event : World.t -> Event.t -> World.t
val draw : World.t -> Display_list.t
val finished : World.t -> bool
| |
607ceceb341876c919dd3acfc5c47edcbfb1d3294344de7cd0ae80bcd17d7703 | clojurewerkz/ogre | where_test.clj | (ns clojurewerkz.ogre.suite.where-test
(:refer-clojure :exclude [and count drop filter group-by key key identity iterate loop map max min next not or range repeat reverse shuffle sort])
(:require [clojurewerkz.ogre.core :refer :all]
[clojurewerkz.ogre.util :as util])
(:import (org.apache.tinkerpop.gremlin.structure T Vertex)
(org.apache.tinkerpop.gremlin.process.traversal P)))
(defn get_g_V_hasXageX_asXaX_out_in_hasXageX_asXbX_selectXa_bX_whereXa_eqXbXX
"g.V().has('age').as('a').out().in().has('age').as('b').select('a','b').where('a', eq('b'))"
[g]
(traverse g (V)
(has :age) (as :a)
(out)
(in)
(has :age) (as :b)
(select :a :b)
(where :a (P/eq "b"))))
(defn get_g_V_hasXageX_asXaX_out_in_hasXageX_asXbX_selectXa_bX_whereXa_neqXbXX
"g.V().has('age').as('a').out().in().has('age').as('b').select('a','b').where('a', neq('b'))"
[g]
(traverse g (V)
(has :age) (as :a)
(out)
(in)
(has :age) (as :b)
(select :a :b)
(where :a (P/neq "b"))))
(defn get_g_V_hasXageX_asXaX_out_in_hasXageX_asXbX_selectXa_bX_whereXb_hasXname_markoXX
"g.V().has('age').as('a').out().in().has('age').as('b').select('a','b').where(as('b').has('name', 'marko'))"
[g]
(traverse g (V)
(has :age) (as :a)
(out)
(in)
(has :age) (as :b)
(select :a :b)
(where (__ (as :b) (has :name "marko")))))
(defn get_g_V_hasXageX_asXaX_out_in_hasXageX_asXbX_selectXa_bX_whereXa_outXknowsX_bX
"g.V().has('age').as('a').out().in().has('age').as('b').select('a','b').where(as('a').out('knows').as('b'))"
[g]
(traverse g (V)
(has :age) (as :a)
(out)
(in)
(has :age) (as :b)
(select :a :b)
(where (__ (as :a) (out :knows) (as :b)))))
(defn get_g_VX1X_asXaX_outXcreatedX_inXcreatedX_asXbX_whereXa_neqXbXX_name
"g.V(v1Id).as('a').out('created').in('created').as('b').where('a', neq('b')).values('name')"
[g v1Id]
(traverse g (V v1Id) (as :a)
(out :created)
(in :created) (as :b)
(where :a (P/neq "b"))
(values :name)))
(defn get_g_VX1X_asXaX_outXcreatedX_inXcreatedX_asXbX_whereXasXbX_outXcreatedX_hasXname_rippleXX_valuesXage_nameX
"g.V(v1Id).as('a').out('created').in('created').as('b').where(as('b').out('created').has('name', 'ripple')).values('age', 'name')"
[g v1Id]
(traverse g (V v1Id) (as :a)
(out :created)
(in :created) (as :b)
(where (__ (as :b) (out :created) (has :name "ripple")))
(values :age :name)))
(defn get_g_VX1X_asXaX_outXcreatedX_inXcreatedX_whereXeqXaXX_name
"g.V(v1Id).as('a').out('created').in('created').where(eq('a')).values('name')"
[g v1Id]
(traverse g (V v1Id) (as :a)
(out :created)
(in :created)
(where (P/eq "a"))
(values :name)))
(defn get_g_VX1X_asXaX_outXcreatedX_inXcreatedX_whereXneqXaXX_name
"g.V(v1Id).as('a').out('created').in('created').where(neq('a')).values('name')"
[g v1Id]
(traverse g (V v1Id) (as :a)
(out :created)
(in :created)
(where (P/neq "a"))
(values :name)))
(defn get_g_VX1X_out_aggregateXxX_out_whereXnotXwithinXaXXX
"g.V(v1Id).out().aggregate('x').out().where(not(within('x')))"
[g v1Id]
(traverse g (V v1Id)
(out)
(aggregate :x)
(out)
(where (P/not (P/within ["x"])))))
(defn get_g_withSideEffectXa_graph_verticesX2XX_VX1X_out_whereXneqXaXX
"g.withSideEffect('a', g.V(v2Id).next()).V(v1Id).out().where(neq('a'))"
[g v1Id v2Id]
(traverse g (with-side-effect :a ^Vertex (traverse g (V v2Id) (next!)))
(V v1Id)
(out)
(where (P/neq "a"))))
(defn get_g_VX1X_repeatXbothEXcreatedX_whereXwithoutXeXX_aggregateXeX_otherVX_emit_path
"g.V(v1Id).repeat(bothE('created').where(without('e')).aggregate('e').otherV()).emit().path()"
[g v1Id]
(traverse g (V v1Id)
(repeat (__ (bothE :created) (where (P/without ["e"])) (aggregate :e) (otherV)))
(emit)
(path)))
(defn get_g_V_whereXnotXoutXcreatedXXX_name
"g.V().where(not(out('created'))).values('name')"
[g]
(traverse g (V)
(where (__ (not (__ (out :created)))))
(values :name)))
(defn get_g_V_asXaX_out_asXbX_whereXandXasXaX_outXknowsX_asXbX__orXasXbX_outXcreatedX_hasXname_rippleX__asXbX_inXknowsX_count_isXnotXeqX0XXXXX_selectXa_bX
"g.V().as('a').out().as('b').where(and(as('a').out('knows').as('b'),
or(as('b').out('created').has('name', 'ripple'),
as('b').in('knows').count().is(not(eq(0)))))).select('a','b')"
[g]
(traverse g (V) (as :a)
(out) (as :b)
(where (__ (and (__ (as :a) (out :knows) (as :b))
(__ (or (__ (as :b) (out :created) (has :name "ripple"))
(__ (as :b) (in :knows) (count) (is (P/not (P/eq 0)))))))))
(select :a :b)))
(defn get_g_V_whereXoutXcreatedX_and_outXknowsX_or_inXknowsXX_valuesXnameX
"g.V().where(out('created').and().out('knows').or().in('knows')).values('name')"
[g]
(traverse g (V)
(where (__ (out :created)
(and)
(out :knows)
(or)
(in :knows)))
(values :name)))
(defn get_g_V_asXaX_outXcreatedX_asXbX_whereXandXasXbX_in__notXasXaX_outXcreatedX_hasXname_rippleXXX_selectXa_bX
"g.V().as('a').out('created').as('b').where(and(as('b').in(), not(as('a').out('created').has('name', 'ripple')))).select('a','b')"
[g]
(traverse g (V) (as :a)
(out :created) (as :b)
(where (__ (and (__ (as :b) (in))
(__ (not (__ (as :a) (out :created) (has :name "ripple")))))))
(select :a :b)))
(defn get_g_V_asXaX_outXcreatedX_asXbX_inXcreatedX_asXcX_bothXknowsX_bothXknowsX_asXdX_whereXc__notXeqXaX_orXeqXdXXXX_selectXa_b_c_dX
"g.V().as('a').out('created').as('b').in('created').as('c').both('knows').both('knows').as('d').
where('c', P.not(P.eq('a').or(P.eq('d')))).select('a','b','c','d')"
[g]
(traverse g (V) (as :a)
(out :created) (as :b)
(in :created) (as :c)
(both :knows)
(both :knows) (as :d)
(where :c (P/not (.or (P/eq "a") (P/eq "d"))))
(select :a :b :c :d)))
(defn get_g_V_asXaX_out_asXbX_whereXin_count_isXeqX3XX_or_whereXoutXcreatedX_and_hasXlabel_personXXX_selectXa_bX
"g.V().as('a').out().as('b').where(as('b').in().count().is(eq(3)).or().where(as('b').out('created').and().as('b').has(T.label, 'person'))).select('a','b')"
[g]
(traverse g (V) (as :a)
(out) (as :b)
(where (__ (as :b) (in) (count) (is (P/eq 3))
(or)
(where (__ (as :b) (out :created) (and) (as :b) (has (T/label) "person")))))
(select :a :b)))
(defn get_g_V_asXaX_outEXcreatedX_asXbX_inV_asXcX_inXcreatedX_asXdX_whereXa_ltXbX_orXgtXcXX_andXneqXdXXX_byXageX_byXweightX_byXinXcreatedX_valuesXageX_minX_selectXa_c_dX
"g.V().as('a').outE('created').as('b').
inV().as('c').
in('created').as('d').
where('a', lt('b').or(gt('c')).and(neq('d'))).
by('age').
by('weight').
by(in('created').values('age').min()).
select('a', 'c', 'd').by('name')"
[g]
(traverse g (V) (as :a)
(outE :created) (as :b)
(inV) (as :c)
(in :created) (as :d)
(where :a (.and (.or (P/lt "b") (P/gt "c")) (P/neq "d")))
(by :age)
(by :weight)
(by (__ (in :created) (values :age) (min)))
(select :a :c :d)
(by :name)))
(defn get_g_V_asXaX_outEXcreatedX_asXbX_inV_asXcX_whereXa_gtXbX_orXeqXbXXX_byXageX_byXweightX_byXweightX_selectXa_cX_byXnameX
"g.V().as('a').outE('created').as('b').inV().as('c').where('a', gt('b').or(eq('b'))).by('age').by('weight').by('weight').<String>select('a', 'c').by('name')"
[g]
(traverse g (V) (as :a)
(outE :created) (as :b)
(inV) (as :c)
(where :a (.or (P/gt "b") (P/eq "b")))
(by :age)
(by :weight)
(by :weight)
(select :a :c)
(by :name)))
(defn get_g_V_asXaX_outXcreatedX_inXcreatedX_asXbX_whereXa_gtXbXX_byXageX_selectXa_bX_byXnameX
"g.V().as('a').out('created').in('created').as('b').where('a', gt('b')).by('age').<String>select('a', 'b').by('name')"
[g]
(traverse g (V) (as :a)
(out :created)
(in :created) (as :b)
(where :a (P/gt "b"))
(by :age)
(select :a :b)
(by :name)))
(defn get_g_VX1X_asXaX_out_hasXageX_whereXgtXaXX_byXageX_name
"g.V(v1Id).as('a').out().has('age').where(gt('a')).by('age').values('name')"
[g v1Id]
(traverse g (V v1Id) (as :a)
(out)
(has :age)
(where (P/gt "a"))
(by :age)
(values :name)))
(defn get_g_withSideEffectXa_josh_peterX_VX1X_outXcreatedX_inXcreatedX_name_whereXwithinXaXX
"g.withSideEffect('a', Arrays.asList('josh', 'peter')).V(v1Id).out('created').in('created').values('name').where(P.within('a'))"
[g v1Id]
(traverse g (with-side-effect :a (java.util.ArrayList. ["josh", "peter"]))
(V v1Id)
(out :created)
(in :created)
(values :name)
(where (P/within (util/str-array ["a"])))))
(defn get_g_V_asXaX_outXcreatedX_whereXasXaX_name_isXjoshXX_inXcreatedX_name
"g.V().as('a').out('created').where(as('a').values('name').is('josh')).in('created').values('name')"
[g]
(traverse g (V) (as :a)
(out :created)
(where (__ (as :a) (values :name) (is "josh")))
(in :created)
(values :name)))
| null | https://raw.githubusercontent.com/clojurewerkz/ogre/cfc5648881d509a55f8a951e01d7b2a166e71d17/test/clojure/clojurewerkz/ogre/suite/where_test.clj | clojure | (ns clojurewerkz.ogre.suite.where-test
(:refer-clojure :exclude [and count drop filter group-by key key identity iterate loop map max min next not or range repeat reverse shuffle sort])
(:require [clojurewerkz.ogre.core :refer :all]
[clojurewerkz.ogre.util :as util])
(:import (org.apache.tinkerpop.gremlin.structure T Vertex)
(org.apache.tinkerpop.gremlin.process.traversal P)))
(defn get_g_V_hasXageX_asXaX_out_in_hasXageX_asXbX_selectXa_bX_whereXa_eqXbXX
"g.V().has('age').as('a').out().in().has('age').as('b').select('a','b').where('a', eq('b'))"
[g]
(traverse g (V)
(has :age) (as :a)
(out)
(in)
(has :age) (as :b)
(select :a :b)
(where :a (P/eq "b"))))
(defn get_g_V_hasXageX_asXaX_out_in_hasXageX_asXbX_selectXa_bX_whereXa_neqXbXX
"g.V().has('age').as('a').out().in().has('age').as('b').select('a','b').where('a', neq('b'))"
[g]
(traverse g (V)
(has :age) (as :a)
(out)
(in)
(has :age) (as :b)
(select :a :b)
(where :a (P/neq "b"))))
(defn get_g_V_hasXageX_asXaX_out_in_hasXageX_asXbX_selectXa_bX_whereXb_hasXname_markoXX
"g.V().has('age').as('a').out().in().has('age').as('b').select('a','b').where(as('b').has('name', 'marko'))"
[g]
(traverse g (V)
(has :age) (as :a)
(out)
(in)
(has :age) (as :b)
(select :a :b)
(where (__ (as :b) (has :name "marko")))))
(defn get_g_V_hasXageX_asXaX_out_in_hasXageX_asXbX_selectXa_bX_whereXa_outXknowsX_bX
"g.V().has('age').as('a').out().in().has('age').as('b').select('a','b').where(as('a').out('knows').as('b'))"
[g]
(traverse g (V)
(has :age) (as :a)
(out)
(in)
(has :age) (as :b)
(select :a :b)
(where (__ (as :a) (out :knows) (as :b)))))
(defn get_g_VX1X_asXaX_outXcreatedX_inXcreatedX_asXbX_whereXa_neqXbXX_name
"g.V(v1Id).as('a').out('created').in('created').as('b').where('a', neq('b')).values('name')"
[g v1Id]
(traverse g (V v1Id) (as :a)
(out :created)
(in :created) (as :b)
(where :a (P/neq "b"))
(values :name)))
(defn get_g_VX1X_asXaX_outXcreatedX_inXcreatedX_asXbX_whereXasXbX_outXcreatedX_hasXname_rippleXX_valuesXage_nameX
"g.V(v1Id).as('a').out('created').in('created').as('b').where(as('b').out('created').has('name', 'ripple')).values('age', 'name')"
[g v1Id]
(traverse g (V v1Id) (as :a)
(out :created)
(in :created) (as :b)
(where (__ (as :b) (out :created) (has :name "ripple")))
(values :age :name)))
(defn get_g_VX1X_asXaX_outXcreatedX_inXcreatedX_whereXeqXaXX_name
"g.V(v1Id).as('a').out('created').in('created').where(eq('a')).values('name')"
[g v1Id]
(traverse g (V v1Id) (as :a)
(out :created)
(in :created)
(where (P/eq "a"))
(values :name)))
(defn get_g_VX1X_asXaX_outXcreatedX_inXcreatedX_whereXneqXaXX_name
"g.V(v1Id).as('a').out('created').in('created').where(neq('a')).values('name')"
[g v1Id]
(traverse g (V v1Id) (as :a)
(out :created)
(in :created)
(where (P/neq "a"))
(values :name)))
(defn get_g_VX1X_out_aggregateXxX_out_whereXnotXwithinXaXXX
"g.V(v1Id).out().aggregate('x').out().where(not(within('x')))"
[g v1Id]
(traverse g (V v1Id)
(out)
(aggregate :x)
(out)
(where (P/not (P/within ["x"])))))
(defn get_g_withSideEffectXa_graph_verticesX2XX_VX1X_out_whereXneqXaXX
"g.withSideEffect('a', g.V(v2Id).next()).V(v1Id).out().where(neq('a'))"
[g v1Id v2Id]
(traverse g (with-side-effect :a ^Vertex (traverse g (V v2Id) (next!)))
(V v1Id)
(out)
(where (P/neq "a"))))
(defn get_g_VX1X_repeatXbothEXcreatedX_whereXwithoutXeXX_aggregateXeX_otherVX_emit_path
"g.V(v1Id).repeat(bothE('created').where(without('e')).aggregate('e').otherV()).emit().path()"
[g v1Id]
(traverse g (V v1Id)
(repeat (__ (bothE :created) (where (P/without ["e"])) (aggregate :e) (otherV)))
(emit)
(path)))
(defn get_g_V_whereXnotXoutXcreatedXXX_name
"g.V().where(not(out('created'))).values('name')"
[g]
(traverse g (V)
(where (__ (not (__ (out :created)))))
(values :name)))
(defn get_g_V_asXaX_out_asXbX_whereXandXasXaX_outXknowsX_asXbX__orXasXbX_outXcreatedX_hasXname_rippleX__asXbX_inXknowsX_count_isXnotXeqX0XXXXX_selectXa_bX
"g.V().as('a').out().as('b').where(and(as('a').out('knows').as('b'),
or(as('b').out('created').has('name', 'ripple'),
as('b').in('knows').count().is(not(eq(0)))))).select('a','b')"
[g]
(traverse g (V) (as :a)
(out) (as :b)
(where (__ (and (__ (as :a) (out :knows) (as :b))
(__ (or (__ (as :b) (out :created) (has :name "ripple"))
(__ (as :b) (in :knows) (count) (is (P/not (P/eq 0)))))))))
(select :a :b)))
(defn get_g_V_whereXoutXcreatedX_and_outXknowsX_or_inXknowsXX_valuesXnameX
"g.V().where(out('created').and().out('knows').or().in('knows')).values('name')"
[g]
(traverse g (V)
(where (__ (out :created)
(and)
(out :knows)
(or)
(in :knows)))
(values :name)))
(defn get_g_V_asXaX_outXcreatedX_asXbX_whereXandXasXbX_in__notXasXaX_outXcreatedX_hasXname_rippleXXX_selectXa_bX
"g.V().as('a').out('created').as('b').where(and(as('b').in(), not(as('a').out('created').has('name', 'ripple')))).select('a','b')"
[g]
(traverse g (V) (as :a)
(out :created) (as :b)
(where (__ (and (__ (as :b) (in))
(__ (not (__ (as :a) (out :created) (has :name "ripple")))))))
(select :a :b)))
(defn get_g_V_asXaX_outXcreatedX_asXbX_inXcreatedX_asXcX_bothXknowsX_bothXknowsX_asXdX_whereXc__notXeqXaX_orXeqXdXXXX_selectXa_b_c_dX
"g.V().as('a').out('created').as('b').in('created').as('c').both('knows').both('knows').as('d').
where('c', P.not(P.eq('a').or(P.eq('d')))).select('a','b','c','d')"
[g]
(traverse g (V) (as :a)
(out :created) (as :b)
(in :created) (as :c)
(both :knows)
(both :knows) (as :d)
(where :c (P/not (.or (P/eq "a") (P/eq "d"))))
(select :a :b :c :d)))
(defn get_g_V_asXaX_out_asXbX_whereXin_count_isXeqX3XX_or_whereXoutXcreatedX_and_hasXlabel_personXXX_selectXa_bX
"g.V().as('a').out().as('b').where(as('b').in().count().is(eq(3)).or().where(as('b').out('created').and().as('b').has(T.label, 'person'))).select('a','b')"
[g]
(traverse g (V) (as :a)
(out) (as :b)
(where (__ (as :b) (in) (count) (is (P/eq 3))
(or)
(where (__ (as :b) (out :created) (and) (as :b) (has (T/label) "person")))))
(select :a :b)))
(defn get_g_V_asXaX_outEXcreatedX_asXbX_inV_asXcX_inXcreatedX_asXdX_whereXa_ltXbX_orXgtXcXX_andXneqXdXXX_byXageX_byXweightX_byXinXcreatedX_valuesXageX_minX_selectXa_c_dX
"g.V().as('a').outE('created').as('b').
inV().as('c').
in('created').as('d').
where('a', lt('b').or(gt('c')).and(neq('d'))).
by('age').
by('weight').
by(in('created').values('age').min()).
select('a', 'c', 'd').by('name')"
[g]
(traverse g (V) (as :a)
(outE :created) (as :b)
(inV) (as :c)
(in :created) (as :d)
(where :a (.and (.or (P/lt "b") (P/gt "c")) (P/neq "d")))
(by :age)
(by :weight)
(by (__ (in :created) (values :age) (min)))
(select :a :c :d)
(by :name)))
(defn get_g_V_asXaX_outEXcreatedX_asXbX_inV_asXcX_whereXa_gtXbX_orXeqXbXXX_byXageX_byXweightX_byXweightX_selectXa_cX_byXnameX
"g.V().as('a').outE('created').as('b').inV().as('c').where('a', gt('b').or(eq('b'))).by('age').by('weight').by('weight').<String>select('a', 'c').by('name')"
[g]
(traverse g (V) (as :a)
(outE :created) (as :b)
(inV) (as :c)
(where :a (.or (P/gt "b") (P/eq "b")))
(by :age)
(by :weight)
(by :weight)
(select :a :c)
(by :name)))
(defn get_g_V_asXaX_outXcreatedX_inXcreatedX_asXbX_whereXa_gtXbXX_byXageX_selectXa_bX_byXnameX
"g.V().as('a').out('created').in('created').as('b').where('a', gt('b')).by('age').<String>select('a', 'b').by('name')"
[g]
(traverse g (V) (as :a)
(out :created)
(in :created) (as :b)
(where :a (P/gt "b"))
(by :age)
(select :a :b)
(by :name)))
(defn get_g_VX1X_asXaX_out_hasXageX_whereXgtXaXX_byXageX_name
"g.V(v1Id).as('a').out().has('age').where(gt('a')).by('age').values('name')"
[g v1Id]
(traverse g (V v1Id) (as :a)
(out)
(has :age)
(where (P/gt "a"))
(by :age)
(values :name)))
(defn get_g_withSideEffectXa_josh_peterX_VX1X_outXcreatedX_inXcreatedX_name_whereXwithinXaXX
"g.withSideEffect('a', Arrays.asList('josh', 'peter')).V(v1Id).out('created').in('created').values('name').where(P.within('a'))"
[g v1Id]
(traverse g (with-side-effect :a (java.util.ArrayList. ["josh", "peter"]))
(V v1Id)
(out :created)
(in :created)
(values :name)
(where (P/within (util/str-array ["a"])))))
(defn get_g_V_asXaX_outXcreatedX_whereXasXaX_name_isXjoshXX_inXcreatedX_name
"g.V().as('a').out('created').where(as('a').values('name').is('josh')).in('created').values('name')"
[g]
(traverse g (V) (as :a)
(out :created)
(where (__ (as :a) (values :name) (is "josh")))
(in :created)
(values :name)))
| |
7ab5436a99b52b7c0bc8e85bc15c3863084d457d22bf139f840c291209f068b9 | input-output-hk/cardano-ledger | Translation.hs | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE TypeApplications #
module Test.Cardano.Ledger.Mary.Translation (
maryTranslationTests,
maryEncodeDecodeTests,
)
where
import Cardano.Ledger.Allegra (Allegra)
import Cardano.Ledger.Binary
import Cardano.Ledger.Core
import Cardano.Ledger.Mary (Mary)
import Cardano.Ledger.Mary.Translation ()
import Cardano.Ledger.Shelley (Shelley)
import qualified Cardano.Ledger.Shelley.API as S
import Test.Cardano.Ledger.AllegraEraGen ()
import Test.Cardano.Ledger.Binary.RoundTrip
import Test.Cardano.Ledger.Shelley.Generator.ShelleyEraGen ()
import Test.Cardano.Ledger.Shelley.Serialisation.EraIndepGenerators ()
import Test.Cardano.Ledger.Shelley.Serialisation.Generators ()
import Test.Cardano.Ledger.ShelleyMA.Serialisation.Generators ()
import Test.Cardano.Ledger.TranslationTools (translateEraEncCBOR, translateEraEncoding)
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit (Assertion)
import Test.Tasty.QuickCheck (testProperty)
maryEncodeDecodeTests :: TestTree
maryEncodeDecodeTests =
testGroup
"encoded allegra types can be decoded as mary types"
[ testProperty
"decoding metadata"
( embedTripAnnExpectation @(TxAuxData Allegra) @(TxAuxData Mary)
(eraProtVerLow @Shelley)
(eraProtVerLow @Allegra)
(\_ _ -> pure ())
)
]
maryTranslationTests :: TestTree
maryTranslationTests =
testGroup
"Mary translation binary compatibiliby tests"
[ testProperty "Tx compatibility" $
translateEraEncoding @Mary @S.ShelleyTx () toCBOR toCBOR
, testProperty "ProposedPPUpdates compatibility" (test @S.ProposedPPUpdates)
, testProperty "ShelleyPPUPState compatibility" $
translateEraEncoding @Mary @S.ShelleyPPUPState () toCBOR toCBOR
, testProperty "TxOut compatibility" (test @S.ShelleyTxOut)
, testProperty "UTxO compatibility" $
translateEraEncoding @Mary @S.UTxO () toCBOR toCBOR
, testProperty "UTxOState compatibility" $
translateEraEncoding @Mary @S.UTxOState () toCBOR toCBOR
, testProperty "LedgerState compatibility" $
translateEraEncoding @Mary @S.LedgerState () toCBOR toCBOR
, testProperty "EpochState compatibility" $
translateEraEncoding @Mary @S.EpochState () toCBOR toCBOR
, testProperty "ShelleyTxWits compatibility" $
translateEraEncoding @Mary @S.ShelleyTxWits () toCBOR toCBOR
, testProperty "Update compatibility" (test @S.Update)
]
test ::
forall f.
( EncCBOR (f Allegra)
, EncCBOR (f Mary)
, TranslateEra Mary f
, Show (TranslationError Mary f)
) =>
f Allegra ->
Assertion
test = translateEraEncCBOR ([] :: [Mary]) ()
| null | https://raw.githubusercontent.com/input-output-hk/cardano-ledger/b9aa1ad1728c0ceeca62657ec94d6d099896c052/eras/shelley-ma/test-suite/test/Test/Cardano/Ledger/Mary/Translation.hs | haskell | # LANGUAGE RankNTypes # | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeApplications #
module Test.Cardano.Ledger.Mary.Translation (
maryTranslationTests,
maryEncodeDecodeTests,
)
where
import Cardano.Ledger.Allegra (Allegra)
import Cardano.Ledger.Binary
import Cardano.Ledger.Core
import Cardano.Ledger.Mary (Mary)
import Cardano.Ledger.Mary.Translation ()
import Cardano.Ledger.Shelley (Shelley)
import qualified Cardano.Ledger.Shelley.API as S
import Test.Cardano.Ledger.AllegraEraGen ()
import Test.Cardano.Ledger.Binary.RoundTrip
import Test.Cardano.Ledger.Shelley.Generator.ShelleyEraGen ()
import Test.Cardano.Ledger.Shelley.Serialisation.EraIndepGenerators ()
import Test.Cardano.Ledger.Shelley.Serialisation.Generators ()
import Test.Cardano.Ledger.ShelleyMA.Serialisation.Generators ()
import Test.Cardano.Ledger.TranslationTools (translateEraEncCBOR, translateEraEncoding)
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit (Assertion)
import Test.Tasty.QuickCheck (testProperty)
maryEncodeDecodeTests :: TestTree
maryEncodeDecodeTests =
testGroup
"encoded allegra types can be decoded as mary types"
[ testProperty
"decoding metadata"
( embedTripAnnExpectation @(TxAuxData Allegra) @(TxAuxData Mary)
(eraProtVerLow @Shelley)
(eraProtVerLow @Allegra)
(\_ _ -> pure ())
)
]
maryTranslationTests :: TestTree
maryTranslationTests =
testGroup
"Mary translation binary compatibiliby tests"
[ testProperty "Tx compatibility" $
translateEraEncoding @Mary @S.ShelleyTx () toCBOR toCBOR
, testProperty "ProposedPPUpdates compatibility" (test @S.ProposedPPUpdates)
, testProperty "ShelleyPPUPState compatibility" $
translateEraEncoding @Mary @S.ShelleyPPUPState () toCBOR toCBOR
, testProperty "TxOut compatibility" (test @S.ShelleyTxOut)
, testProperty "UTxO compatibility" $
translateEraEncoding @Mary @S.UTxO () toCBOR toCBOR
, testProperty "UTxOState compatibility" $
translateEraEncoding @Mary @S.UTxOState () toCBOR toCBOR
, testProperty "LedgerState compatibility" $
translateEraEncoding @Mary @S.LedgerState () toCBOR toCBOR
, testProperty "EpochState compatibility" $
translateEraEncoding @Mary @S.EpochState () toCBOR toCBOR
, testProperty "ShelleyTxWits compatibility" $
translateEraEncoding @Mary @S.ShelleyTxWits () toCBOR toCBOR
, testProperty "Update compatibility" (test @S.Update)
]
test ::
forall f.
( EncCBOR (f Allegra)
, EncCBOR (f Mary)
, TranslateEra Mary f
, Show (TranslationError Mary f)
) =>
f Allegra ->
Assertion
test = translateEraEncCBOR ([] :: [Mary]) ()
|
9e056538b25e5f4eac2b18fded64bc6c485f5640962e94f51debcad7ec99b2ec | coq/bot | Utils.ml | open Base
open Bot_info
open Cohttp
open Cohttp_lwt_unix
open Lwt
let f = Printf.sprintf
let string_match ~regexp string =
try
let _ = Str.search_forward (Str.regexp regexp) string 0 in
true
with Stdlib.Not_found -> false
let headers ~bot_info header_list =
Header.init ()
|> (fun headers -> Header.add_list headers header_list)
|> fun headers -> Header.add headers "User-Agent" bot_info.name
let print_response (resp, body) =
let code = resp |> Response.status |> Code.code_of_status in
Lwt_io.printf "Response code: %d.\n" code
>>= fun () ->
if code < 200 && code > 299 then
resp |> Response.headers |> Header.to_string
|> Lwt_io.printf "Headers: %s\n"
>>= fun () ->
body |> Cohttp_lwt.Body.to_string >>= Lwt_io.printf "Body:\n%s\n"
else Lwt.return_unit
let send_request ~bot_info ~body ~uri header_list =
let headers = headers header_list ~bot_info in
Client.post ~body ~headers uri >>= print_response
let handle_json action body =
try
let json = Yojson.Basic.from_string body in
(* print_endline "JSON decoded."; *)
Ok (action json)
with
| Yojson.Json_error err ->
Error (f "Json error: %s\n" err)
| Yojson.Basic.Util.Type_error (err, _) ->
Error (f "Json type error: %s\n" err)
GitHub specific
let project_api_preview_header =
[("Accept", "application/vnd.github.inertia-preview+json")]
let app_api_preview_header =
[("Accept", "application/vnd.github.machine-man-preview+json")]
let github_header bot_info =
[("Authorization", "bearer " ^ github_token bot_info)]
let generic_get ~bot_info relative_uri ?(header_list = []) json_handler =
let uri = "/" ^ relative_uri |> Uri.of_string in
let headers = headers (header_list @ github_header bot_info) ~bot_info in
Client.get ~headers uri
>>= (fun (_response, body) -> Cohttp_lwt.Body.to_string body)
>|= handle_json json_handler
| null | https://raw.githubusercontent.com/coq/bot/307bb9eea5bf8253926c1c2a4c4d3836f3679a09/bot-components/Utils.ml | ocaml | print_endline "JSON decoded."; | open Base
open Bot_info
open Cohttp
open Cohttp_lwt_unix
open Lwt
let f = Printf.sprintf
let string_match ~regexp string =
try
let _ = Str.search_forward (Str.regexp regexp) string 0 in
true
with Stdlib.Not_found -> false
let headers ~bot_info header_list =
Header.init ()
|> (fun headers -> Header.add_list headers header_list)
|> fun headers -> Header.add headers "User-Agent" bot_info.name
let print_response (resp, body) =
let code = resp |> Response.status |> Code.code_of_status in
Lwt_io.printf "Response code: %d.\n" code
>>= fun () ->
if code < 200 && code > 299 then
resp |> Response.headers |> Header.to_string
|> Lwt_io.printf "Headers: %s\n"
>>= fun () ->
body |> Cohttp_lwt.Body.to_string >>= Lwt_io.printf "Body:\n%s\n"
else Lwt.return_unit
let send_request ~bot_info ~body ~uri header_list =
let headers = headers header_list ~bot_info in
Client.post ~body ~headers uri >>= print_response
let handle_json action body =
try
let json = Yojson.Basic.from_string body in
Ok (action json)
with
| Yojson.Json_error err ->
Error (f "Json error: %s\n" err)
| Yojson.Basic.Util.Type_error (err, _) ->
Error (f "Json type error: %s\n" err)
GitHub specific
let project_api_preview_header =
[("Accept", "application/vnd.github.inertia-preview+json")]
let app_api_preview_header =
[("Accept", "application/vnd.github.machine-man-preview+json")]
let github_header bot_info =
[("Authorization", "bearer " ^ github_token bot_info)]
let generic_get ~bot_info relative_uri ?(header_list = []) json_handler =
let uri = "/" ^ relative_uri |> Uri.of_string in
let headers = headers (header_list @ github_header bot_info) ~bot_info in
Client.get ~headers uri
>>= (fun (_response, body) -> Cohttp_lwt.Body.to_string body)
>|= handle_json json_handler
|
00dc0c273e8982eddb6a88e891f0c22134feb1c9dff705547ec5ee957189c74f | haskell-tools/haskell-tools | ExtractedFormatting_res.hs | module Refactor.ExtractBinding.ExtractedFormatting where
stms
= extracted
where extracted = id
. id
| null | https://raw.githubusercontent.com/haskell-tools/haskell-tools/b1189ab4f63b29bbf1aa14af4557850064931e32/src/builtin-refactorings/examples/Refactor/ExtractBinding/ExtractedFormatting_res.hs | haskell | module Refactor.ExtractBinding.ExtractedFormatting where
stms
= extracted
where extracted = id
. id
| |
46b8d459d908f3f3fc532f226ceeda28ddf25e64476bff85fd095e4e315ebbac | aiya000/haskell-examples | Proxy.hs | {-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
import Data.Proxy (Proxy(..))
import Data.Typeable
data Saya = Saya
deriving (Typeable)
-- Values of `a` cannot be getten
data Void1 a
deriving instance Typeable a => Typeable (Void1 a)
getTypeNameWithoutValue :: Typeable a => Void1 a -> String
getTypeNameWithoutValue (_ :: Void1 a) = show $ typeRep (Proxy :: Proxy a)
main :: IO ()
main = print $ getTypeNameWithoutValue (undefined :: Void1 Saya)
| null | https://raw.githubusercontent.com/aiya000/haskell-examples/a337ba0e86be8bb1333e7eea852ba5fa1d177d8a/Data/Typeable/Proxy.hs | haskell | # LANGUAGE DeriveDataTypeable #
Values of `a` cannot be getten | # LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
import Data.Proxy (Proxy(..))
import Data.Typeable
data Saya = Saya
deriving (Typeable)
data Void1 a
deriving instance Typeable a => Typeable (Void1 a)
getTypeNameWithoutValue :: Typeable a => Void1 a -> String
getTypeNameWithoutValue (_ :: Void1 a) = show $ typeRep (Proxy :: Proxy a)
main :: IO ()
main = print $ getTypeNameWithoutValue (undefined :: Void1 Saya)
|
ee59eee03f82fcd71b44b6e1d6f8aef5c736d80d5914229afbf377eee13ec3d8 | awakesecurity/grpc-mqtt | Response.hs | # LANGUAGE RecordWildCards #
-- |
-- Module : Network.GRPC.MQTT.Message.Response
Copyright : ( c ) Arista Networks , 2022 - 2023
License : Apache License 2.0 , see COPYING
--
-- Stability : stable
Portability : non - portable ( GHC extensions )
--
-- @since 1.0.0
module Network.GRPC.MQTT.Message.Response
( -- * Wire Encoding
-- * Wire Decoding
unwrapResponse,
unwrapUnaryResponse,
unwrapClientStreamResponse,
unwrapServerStreamResponse,
unwrapBiDiStreamResponse,
-- * Response Handlers
makeResponseSender,
makeErrorResponseSender,
makeNormalResponseReader,
makeClientResponseReader,
makeServerResponseReader,
makeBiDiResponseReader,
)
where
--------------------------------------------------------------------------------
import Control.Concurrent.STM.TQueue (TQueue)
import Control.Monad.Except (MonadError, throwError)
import Data.Traversable (for)
import Network.GRPC.HighLevel as HL
( MetadataMap,
StatusCode,
StatusDetails (..),
)
import Network.GRPC.HighLevel.Client
( ClientResult
( ClientBiDiResponse,
ClientNormalResponse,
ClientReaderResponse,
ClientWriterResponse
),
GRPCMethodType (BiDiStreaming, ClientStreaming, Normal, ServerStreaming),
)
import Network.MQTT.Client (MQTTClient, QoS (QoS1), publishq)
import Network.MQTT.Topic (Topic)
import Proto3.Suite.Class (Message)
import Relude
import UnliftIO (MonadUnliftIO)
--------------------------------------------------------------------------------
import Network.GRPC.MQTT.Message qualified as Message
import Network.GRPC.MQTT.Message.Packet qualified as Packet
import Network.GRPC.MQTT.Serial (WireDecodeOptions, WireEncodeOptions)
import Network.GRPC.MQTT.Types (MQTTResult (GRPCResult, MQTTError), RemoteResult (..))
import Network.GRPC.MQTT.Wrapping qualified as Wrapping
import Network.GRPC.MQTT.Wrapping (parseErrorToRCE)
import Network.GRPC.LowLevel (NormalRequestResult (..))
import Proto.Mqtt
( MQTTResponse (..),
RemoteError,
ResponseBody (ResponseBody, responseBodyValue),
WrappedResponse (WrappedResponse),
WrappedResponseOrError
( WrappedResponseOrErrorError,
WrappedResponseOrErrorResponse
),
)
Response - Wire Encoding ----------------------------------------------------
wireEncodeResponse ::
WireEncodeOptions ->
RemoteResult s ->
ByteString
wireEncodeResponse options result = Message.toWireEncoded options (wrapResponse result)
wireEncodeErrorResponse ::
WireEncodeOptions ->
RemoteError ->
ByteString
wireEncodeErrorResponse options err =
let response :: WrappedResponse
response = WrappedResponse (Just (WrappedResponseOrErrorError err))
in Message.toWireEncoded options response
wrapResponse :: RemoteResult s -> WrappedResponse
wrapResponse res =
WrappedResponse . Just $
case res of
RemoteNormalResult NormalRequestResult{rspBody, initMD, trailMD, rspCode, details} ->
WrappedResponseOrErrorResponse $
MQTTResponse
(Just $ ResponseBody rspBody)
(Just $ Wrapping.fromMetadataMap initMD)
(Just $ Wrapping.fromMetadataMap trailMD)
(Wrapping.fromStatusCode rspCode)
(Wrapping.fromStatusDetails details)
RemoteWriterResult (rspBody, initMD, trailMD, rspCode, details) ->
WrappedResponseOrErrorResponse $
MQTTResponse
(ResponseBody <$> rspBody)
(Just $ Wrapping.fromMetadataMap initMD)
(Just $ Wrapping.fromMetadataMap trailMD)
(Wrapping.fromStatusCode rspCode)
(Wrapping.fromStatusDetails details)
RemoteReaderResult (rspMetadata, statusCode, details) ->
WrappedResponseOrErrorResponse $
MQTTResponse
Nothing
Nothing
(Just $ Wrapping.fromMetadataMap rspMetadata)
(Wrapping.fromStatusCode statusCode)
(Wrapping.fromStatusDetails details)
RemoteBiDiResult (rspMetadata, statusCode, details) ->
WrappedResponseOrErrorResponse $
MQTTResponse
Nothing
Nothing
(Just $ Wrapping.fromMetadataMap rspMetadata)
(Wrapping.fromStatusCode statusCode)
(Wrapping.fromStatusDetails details)
RemoteErrorResult err ->
WrappedResponseOrErrorError $ Wrapping.toRemoteError err
Response - Wire Decoding ----------------------------------------------------
data ParsedMQTTResponse response = ParsedMQTTResponse
{ responseBody :: Maybe response
, initMetadata :: MetadataMap
, trailMetadata :: MetadataMap
, statusCode :: StatusCode
, statusDetails :: StatusDetails
}
deriving stock (Functor, Foldable, Traversable)
unwrapResponse ::
MonadError RemoteError m =>
WireDecodeOptions ->
ByteString ->
m (ParsedMQTTResponse ByteString)
unwrapResponse options bytes = do
MQTTResponse{..} <-
case Message.fromWireEncoded @_ @WrappedResponse options bytes of
Left err -> throwError (Message.toRemoteError err)
Right (WrappedResponse Nothing) -> throwError (Wrapping.remoteError "Empty response")
Right (WrappedResponse (Just (WrappedResponseOrErrorError err))) -> throwError err
Right (WrappedResponse (Just (WrappedResponseOrErrorResponse rsp))) -> pure rsp
statusCode <-
case Wrapping.toStatusCode mqttresponseResponseCode of
Nothing -> throwError (Wrapping.remoteError ("Invalid reponse code: " <> Relude.show mqttresponseResponseCode))
Just sc -> pure sc
return $
ParsedMQTTResponse
(responseBodyValue <$> mqttresponseBody)
(maybe mempty Wrapping.toMetadataMap mqttresponseInitMetamap)
(maybe mempty Wrapping.toMetadataMap mqttresponseTrailMetamap)
statusCode
(Wrapping.toStatusDetails mqttresponseDetails)
decodeResponse ::
(MonadError RemoteError m, Message a) =>
WireDecodeOptions ->
ByteString ->
m (ParsedMQTTResponse a)
decodeResponse options bytes = do
response <- unwrapResponse options bytes
for response \body -> do
case Message.fromWireEncoded options body of
Left err -> throwError (Message.toRemoteError err)
Right rx -> pure rx
unwrapUnaryResponse ::
forall m rsp.
(MonadError RemoteError m, Message rsp) =>
WireDecodeOptions ->
ByteString ->
m (MQTTResult 'Normal rsp)
unwrapUnaryResponse options =
fmap toNormalResult . decodeResponse options
where
toNormalResult :: ParsedMQTTResponse rsp -> MQTTResult 'Normal rsp
toNormalResult ParsedMQTTResponse{..} =
case responseBody of
Nothing -> MQTTError "Empty response body"
(Just body) -> GRPCResult $ ClientNormalResponse body initMetadata trailMetadata statusCode statusDetails
unwrapClientStreamResponse ::
forall m rsp.
(MonadError RemoteError m, Message rsp) =>
WireDecodeOptions ->
ByteString ->
m (MQTTResult 'ClientStreaming rsp)
unwrapClientStreamResponse options =
fmap toClientStreamResult . decodeResponse options
where
toClientStreamResult :: ParsedMQTTResponse rsp -> MQTTResult 'ClientStreaming rsp
toClientStreamResult ParsedMQTTResponse{..} =
GRPCResult $ ClientWriterResponse responseBody initMetadata trailMetadata statusCode statusDetails
unwrapServerStreamResponse ::
forall m rsp.
(MonadError RemoteError m, Message rsp) =>
WireDecodeOptions ->
ByteString ->
m (MQTTResult 'ServerStreaming rsp)
unwrapServerStreamResponse options =
fmap toServerStreamResult . decodeResponse options
where
toServerStreamResult :: ParsedMQTTResponse rsp -> MQTTResult 'ServerStreaming rsp
toServerStreamResult ParsedMQTTResponse{..} =
GRPCResult $ ClientReaderResponse trailMetadata statusCode statusDetails
unwrapBiDiStreamResponse ::
forall m rsp.
(MonadError RemoteError m, Message rsp) =>
WireDecodeOptions ->
ByteString ->
m (MQTTResult 'BiDiStreaming rsp)
unwrapBiDiStreamResponse options =
fmap toBiDiStreamResult . decodeResponse options
where
toBiDiStreamResult :: ParsedMQTTResponse rsp -> MQTTResult 'BiDiStreaming rsp
toBiDiStreamResult ParsedMQTTResponse{..} =
GRPCResult $ ClientBiDiResponse trailMetadata statusCode statusDetails
-- Response Handlers -----------------------------------------------------------
makeResponseSender ::
MonadUnliftIO m =>
MQTTClient ->
Topic ->
Word32 ->
Maybe Natural ->
WireEncodeOptions ->
RemoteResult s ->
m ()
makeResponseSender client topic packetSizeLimit rateLimit options response =
let message :: ByteString
message = wireEncodeResponse options response
in Packet.makePacketSender packetSizeLimit rateLimit (liftIO . publish) message
where
publish :: ByteString -> IO ()
publish bytes = publishq client topic (fromStrict bytes) False QoS1 []
makeErrorResponseSender ::
MonadUnliftIO m =>
MQTTClient ->
Topic ->
Word32 ->
Maybe Natural ->
WireEncodeOptions ->
RemoteError ->
m ()
makeErrorResponseSender client topic packetSizeLimit rateLimit options err = do
let message :: ByteString
message = wireEncodeErrorResponse options err
in Packet.makePacketSender packetSizeLimit rateLimit (liftIO . publish) message
where
publish :: ByteString -> IO ()
publish bytes = publishq client topic (fromStrict bytes) False QoS1 []
makeNormalResponseReader ::
(MonadIO m, MonadError RemoteError m, Message a) =>
TQueue ByteString ->
WireDecodeOptions ->
m (MQTTResult 'Normal a)
makeNormalResponseReader channel options = do
runExceptT (Packet.makePacketReader channel) >>= \case
Left err -> throwError (parseErrorToRCE err)
Right bs -> unwrapUnaryResponse options bs
makeClientResponseReader ::
(MonadIO m, MonadError RemoteError m, Message a) =>
TQueue ByteString ->
WireDecodeOptions ->
m (MQTTResult 'ClientStreaming a)
makeClientResponseReader channel options = do
runExceptT (Packet.makePacketReader channel) >>= \case
Left err -> throwError (parseErrorToRCE err)
Right bs -> unwrapClientStreamResponse options bs
makeServerResponseReader ::
(MonadIO m, MonadError RemoteError m, Message a) =>
TQueue ByteString ->
WireDecodeOptions ->
m (MQTTResult 'ServerStreaming a)
makeServerResponseReader channel options = do
runExceptT (Packet.makePacketReader channel) >>= \case
Left err -> throwError (parseErrorToRCE err)
Right bs -> unwrapServerStreamResponse options bs
makeBiDiResponseReader ::
(MonadIO m, MonadError RemoteError m, Message a) =>
TQueue ByteString ->
WireDecodeOptions ->
m (MQTTResult 'BiDiStreaming a)
makeBiDiResponseReader channel options =
runExceptT (Packet.makePacketReader channel) >>= \case
Left err -> throwError (parseErrorToRCE err)
Right bs -> unwrapBiDiStreamResponse options bs
| null | https://raw.githubusercontent.com/awakesecurity/grpc-mqtt/fbde6f3fe90e82260469bab922c5ebb9f0b40e95/src/Network/GRPC/MQTT/Message/Response.hs | haskell | |
Module : Network.GRPC.MQTT.Message.Response
Stability : stable
@since 1.0.0
* Wire Encoding
* Wire Decoding
* Response Handlers
------------------------------------------------------------------------------
------------------------------------------------------------------------------
--------------------------------------------------
--------------------------------------------------
Response Handlers ----------------------------------------------------------- | # LANGUAGE RecordWildCards #
Copyright : ( c ) Arista Networks , 2022 - 2023
License : Apache License 2.0 , see COPYING
Portability : non - portable ( GHC extensions )
module Network.GRPC.MQTT.Message.Response
unwrapResponse,
unwrapUnaryResponse,
unwrapClientStreamResponse,
unwrapServerStreamResponse,
unwrapBiDiStreamResponse,
makeResponseSender,
makeErrorResponseSender,
makeNormalResponseReader,
makeClientResponseReader,
makeServerResponseReader,
makeBiDiResponseReader,
)
where
import Control.Concurrent.STM.TQueue (TQueue)
import Control.Monad.Except (MonadError, throwError)
import Data.Traversable (for)
import Network.GRPC.HighLevel as HL
( MetadataMap,
StatusCode,
StatusDetails (..),
)
import Network.GRPC.HighLevel.Client
( ClientResult
( ClientBiDiResponse,
ClientNormalResponse,
ClientReaderResponse,
ClientWriterResponse
),
GRPCMethodType (BiDiStreaming, ClientStreaming, Normal, ServerStreaming),
)
import Network.MQTT.Client (MQTTClient, QoS (QoS1), publishq)
import Network.MQTT.Topic (Topic)
import Proto3.Suite.Class (Message)
import Relude
import UnliftIO (MonadUnliftIO)
import Network.GRPC.MQTT.Message qualified as Message
import Network.GRPC.MQTT.Message.Packet qualified as Packet
import Network.GRPC.MQTT.Serial (WireDecodeOptions, WireEncodeOptions)
import Network.GRPC.MQTT.Types (MQTTResult (GRPCResult, MQTTError), RemoteResult (..))
import Network.GRPC.MQTT.Wrapping qualified as Wrapping
import Network.GRPC.MQTT.Wrapping (parseErrorToRCE)
import Network.GRPC.LowLevel (NormalRequestResult (..))
import Proto.Mqtt
( MQTTResponse (..),
RemoteError,
ResponseBody (ResponseBody, responseBodyValue),
WrappedResponse (WrappedResponse),
WrappedResponseOrError
( WrappedResponseOrErrorError,
WrappedResponseOrErrorResponse
),
)
wireEncodeResponse ::
WireEncodeOptions ->
RemoteResult s ->
ByteString
wireEncodeResponse options result = Message.toWireEncoded options (wrapResponse result)
wireEncodeErrorResponse ::
WireEncodeOptions ->
RemoteError ->
ByteString
wireEncodeErrorResponse options err =
let response :: WrappedResponse
response = WrappedResponse (Just (WrappedResponseOrErrorError err))
in Message.toWireEncoded options response
wrapResponse :: RemoteResult s -> WrappedResponse
wrapResponse res =
WrappedResponse . Just $
case res of
RemoteNormalResult NormalRequestResult{rspBody, initMD, trailMD, rspCode, details} ->
WrappedResponseOrErrorResponse $
MQTTResponse
(Just $ ResponseBody rspBody)
(Just $ Wrapping.fromMetadataMap initMD)
(Just $ Wrapping.fromMetadataMap trailMD)
(Wrapping.fromStatusCode rspCode)
(Wrapping.fromStatusDetails details)
RemoteWriterResult (rspBody, initMD, trailMD, rspCode, details) ->
WrappedResponseOrErrorResponse $
MQTTResponse
(ResponseBody <$> rspBody)
(Just $ Wrapping.fromMetadataMap initMD)
(Just $ Wrapping.fromMetadataMap trailMD)
(Wrapping.fromStatusCode rspCode)
(Wrapping.fromStatusDetails details)
RemoteReaderResult (rspMetadata, statusCode, details) ->
WrappedResponseOrErrorResponse $
MQTTResponse
Nothing
Nothing
(Just $ Wrapping.fromMetadataMap rspMetadata)
(Wrapping.fromStatusCode statusCode)
(Wrapping.fromStatusDetails details)
RemoteBiDiResult (rspMetadata, statusCode, details) ->
WrappedResponseOrErrorResponse $
MQTTResponse
Nothing
Nothing
(Just $ Wrapping.fromMetadataMap rspMetadata)
(Wrapping.fromStatusCode statusCode)
(Wrapping.fromStatusDetails details)
RemoteErrorResult err ->
WrappedResponseOrErrorError $ Wrapping.toRemoteError err
data ParsedMQTTResponse response = ParsedMQTTResponse
{ responseBody :: Maybe response
, initMetadata :: MetadataMap
, trailMetadata :: MetadataMap
, statusCode :: StatusCode
, statusDetails :: StatusDetails
}
deriving stock (Functor, Foldable, Traversable)
unwrapResponse ::
MonadError RemoteError m =>
WireDecodeOptions ->
ByteString ->
m (ParsedMQTTResponse ByteString)
unwrapResponse options bytes = do
MQTTResponse{..} <-
case Message.fromWireEncoded @_ @WrappedResponse options bytes of
Left err -> throwError (Message.toRemoteError err)
Right (WrappedResponse Nothing) -> throwError (Wrapping.remoteError "Empty response")
Right (WrappedResponse (Just (WrappedResponseOrErrorError err))) -> throwError err
Right (WrappedResponse (Just (WrappedResponseOrErrorResponse rsp))) -> pure rsp
statusCode <-
case Wrapping.toStatusCode mqttresponseResponseCode of
Nothing -> throwError (Wrapping.remoteError ("Invalid reponse code: " <> Relude.show mqttresponseResponseCode))
Just sc -> pure sc
return $
ParsedMQTTResponse
(responseBodyValue <$> mqttresponseBody)
(maybe mempty Wrapping.toMetadataMap mqttresponseInitMetamap)
(maybe mempty Wrapping.toMetadataMap mqttresponseTrailMetamap)
statusCode
(Wrapping.toStatusDetails mqttresponseDetails)
decodeResponse ::
(MonadError RemoteError m, Message a) =>
WireDecodeOptions ->
ByteString ->
m (ParsedMQTTResponse a)
decodeResponse options bytes = do
response <- unwrapResponse options bytes
for response \body -> do
case Message.fromWireEncoded options body of
Left err -> throwError (Message.toRemoteError err)
Right rx -> pure rx
unwrapUnaryResponse ::
forall m rsp.
(MonadError RemoteError m, Message rsp) =>
WireDecodeOptions ->
ByteString ->
m (MQTTResult 'Normal rsp)
unwrapUnaryResponse options =
fmap toNormalResult . decodeResponse options
where
toNormalResult :: ParsedMQTTResponse rsp -> MQTTResult 'Normal rsp
toNormalResult ParsedMQTTResponse{..} =
case responseBody of
Nothing -> MQTTError "Empty response body"
(Just body) -> GRPCResult $ ClientNormalResponse body initMetadata trailMetadata statusCode statusDetails
unwrapClientStreamResponse ::
forall m rsp.
(MonadError RemoteError m, Message rsp) =>
WireDecodeOptions ->
ByteString ->
m (MQTTResult 'ClientStreaming rsp)
unwrapClientStreamResponse options =
fmap toClientStreamResult . decodeResponse options
where
toClientStreamResult :: ParsedMQTTResponse rsp -> MQTTResult 'ClientStreaming rsp
toClientStreamResult ParsedMQTTResponse{..} =
GRPCResult $ ClientWriterResponse responseBody initMetadata trailMetadata statusCode statusDetails
unwrapServerStreamResponse ::
forall m rsp.
(MonadError RemoteError m, Message rsp) =>
WireDecodeOptions ->
ByteString ->
m (MQTTResult 'ServerStreaming rsp)
unwrapServerStreamResponse options =
fmap toServerStreamResult . decodeResponse options
where
toServerStreamResult :: ParsedMQTTResponse rsp -> MQTTResult 'ServerStreaming rsp
toServerStreamResult ParsedMQTTResponse{..} =
GRPCResult $ ClientReaderResponse trailMetadata statusCode statusDetails
unwrapBiDiStreamResponse ::
forall m rsp.
(MonadError RemoteError m, Message rsp) =>
WireDecodeOptions ->
ByteString ->
m (MQTTResult 'BiDiStreaming rsp)
unwrapBiDiStreamResponse options =
fmap toBiDiStreamResult . decodeResponse options
where
toBiDiStreamResult :: ParsedMQTTResponse rsp -> MQTTResult 'BiDiStreaming rsp
toBiDiStreamResult ParsedMQTTResponse{..} =
GRPCResult $ ClientBiDiResponse trailMetadata statusCode statusDetails
makeResponseSender ::
MonadUnliftIO m =>
MQTTClient ->
Topic ->
Word32 ->
Maybe Natural ->
WireEncodeOptions ->
RemoteResult s ->
m ()
makeResponseSender client topic packetSizeLimit rateLimit options response =
let message :: ByteString
message = wireEncodeResponse options response
in Packet.makePacketSender packetSizeLimit rateLimit (liftIO . publish) message
where
publish :: ByteString -> IO ()
publish bytes = publishq client topic (fromStrict bytes) False QoS1 []
makeErrorResponseSender ::
MonadUnliftIO m =>
MQTTClient ->
Topic ->
Word32 ->
Maybe Natural ->
WireEncodeOptions ->
RemoteError ->
m ()
makeErrorResponseSender client topic packetSizeLimit rateLimit options err = do
let message :: ByteString
message = wireEncodeErrorResponse options err
in Packet.makePacketSender packetSizeLimit rateLimit (liftIO . publish) message
where
publish :: ByteString -> IO ()
publish bytes = publishq client topic (fromStrict bytes) False QoS1 []
makeNormalResponseReader ::
(MonadIO m, MonadError RemoteError m, Message a) =>
TQueue ByteString ->
WireDecodeOptions ->
m (MQTTResult 'Normal a)
makeNormalResponseReader channel options = do
runExceptT (Packet.makePacketReader channel) >>= \case
Left err -> throwError (parseErrorToRCE err)
Right bs -> unwrapUnaryResponse options bs
makeClientResponseReader ::
(MonadIO m, MonadError RemoteError m, Message a) =>
TQueue ByteString ->
WireDecodeOptions ->
m (MQTTResult 'ClientStreaming a)
makeClientResponseReader channel options = do
runExceptT (Packet.makePacketReader channel) >>= \case
Left err -> throwError (parseErrorToRCE err)
Right bs -> unwrapClientStreamResponse options bs
makeServerResponseReader ::
(MonadIO m, MonadError RemoteError m, Message a) =>
TQueue ByteString ->
WireDecodeOptions ->
m (MQTTResult 'ServerStreaming a)
makeServerResponseReader channel options = do
runExceptT (Packet.makePacketReader channel) >>= \case
Left err -> throwError (parseErrorToRCE err)
Right bs -> unwrapServerStreamResponse options bs
makeBiDiResponseReader ::
(MonadIO m, MonadError RemoteError m, Message a) =>
TQueue ByteString ->
WireDecodeOptions ->
m (MQTTResult 'BiDiStreaming a)
makeBiDiResponseReader channel options =
runExceptT (Packet.makePacketReader channel) >>= \case
Left err -> throwError (parseErrorToRCE err)
Right bs -> unwrapBiDiStreamResponse options bs
|
46815fff9117b6a7a1c777673b33f28a76a145a0fcd56450ef2ed475cc791d9f | rametta/retros | Sessions.hs | module Web.Controller.Sessions where
import Web.Controller.Prelude
import Web.View.Sessions.New ()
import qualified IHP.AuthSupport.Controller.Sessions as Sessions
instance Controller SessionsController where
action NewSessionAction = Sessions.newSessionAction @User
action CreateSessionAction = Sessions.createSessionAction @User
action DeleteSessionAction = Sessions.deleteSessionAction @User
instance Sessions.SessionsControllerConfig User
| null | https://raw.githubusercontent.com/rametta/retros/624290889df98cd2eb3c37848eeae15281eeda54/Web/Controller/Sessions.hs | haskell | module Web.Controller.Sessions where
import Web.Controller.Prelude
import Web.View.Sessions.New ()
import qualified IHP.AuthSupport.Controller.Sessions as Sessions
instance Controller SessionsController where
action NewSessionAction = Sessions.newSessionAction @User
action CreateSessionAction = Sessions.createSessionAction @User
action DeleteSessionAction = Sessions.deleteSessionAction @User
instance Sessions.SessionsControllerConfig User
| |
6b35eae84cb72408d8b3c3557774ba193f942d188e82864d9ec2bc1b630cabe7 | gwkkwg/cl-mathstats | class-defs.lisp | (in-package #:cl-mathstats)
(defcondition data-error (error)
())
;;; ---------------------------------------------------------------------------
(defcondition insufficient-data (data-error)
())
;;; ---------------------------------------------------------------------------
(defcondition no-data (insufficient-data)
())
;;; ---------------------------------------------------------------------------
(defcondition zero-standard-deviation (data-error)
())
;;; ---------------------------------------------------------------------------
(defcondition zero-variance (data-error)
())
;;; ---------------------------------------------------------------------------
(defcondition unmatched-sequences (data-error)
())
;;; ---------------------------------------------------------------------------
(defcondition not-binary-variables (data-error)
())
;;; ---------------------------------------------------------------------------
(defcondition enormous-contingency-table ()
())
| null | https://raw.githubusercontent.com/gwkkwg/cl-mathstats/39af1b5e1ebbad6f75dab2c4214ce48852031bbf/dev/class-defs.lisp | lisp | ---------------------------------------------------------------------------
---------------------------------------------------------------------------
---------------------------------------------------------------------------
---------------------------------------------------------------------------
---------------------------------------------------------------------------
---------------------------------------------------------------------------
--------------------------------------------------------------------------- | (in-package #:cl-mathstats)
(defcondition data-error (error)
())
(defcondition insufficient-data (data-error)
())
(defcondition no-data (insufficient-data)
())
(defcondition zero-standard-deviation (data-error)
())
(defcondition zero-variance (data-error)
())
(defcondition unmatched-sequences (data-error)
())
(defcondition not-binary-variables (data-error)
())
(defcondition enormous-contingency-table ()
())
|
3a8fb77a15db0c09acfde8d761b861424b2b72fe06f7bed450316394723c11f6 | fossas/fossa-cli | Scala.hs | | The scala strategy leverages the machinery from maven - pom .
--
Sbt has a command to export pom files , with one caveat -- in multi - project
-- setups, parent/child relationships are not present in the generated poms.
--
-- The only non-trivial logic that exists in this strategy is adding edges
-- between poms in the maven "global closure", before building the individual
-- multi-project closures.
module Strategy.Scala (
discover,
findProjects,
ScalaProject (..),
) where
import App.Fossa.Analyze.Types (AnalyzeProject (analyzeProject'), analyzeProject)
import Control.Effect.Diagnostics (
Diagnostics,
errCtx,
fatalText,
fromMaybeText,
recover,
warnOnErr,
(<||>),
)
import Control.Effect.Reader (Reader)
import Control.Effect.Stack (context)
import Data.Aeson (KeyValue ((.=)), ToJSON (toJSON), object)
import Data.ByteString.Lazy (ByteString)
import Data.Maybe (fromMaybe, listToMaybe, mapMaybe)
import Data.String.Conversion (ConvertUtf8 (decodeUtf8), toString, toText)
import Data.Text (Text)
import Data.Text qualified as Text
import Data.Text.Lazy qualified as TL
import Diag.Common (MissingDeepDeps (MissingDeepDeps))
import Discovery.Filters (AllFilters)
import Discovery.Simple (simpleDiscover)
import Discovery.Walk (
WalkStep (WalkContinue, WalkSkipAll),
findFileNamed,
walkWithFilters',
)
import Effect.Exec (
AllowErr (Never),
Command (..),
Exec,
Has,
execThrow,
)
import Effect.Logger (Logger, logDebug, viaShow)
import Effect.ReadFS (ReadFS, readContentsXML)
import GHC.Generics (Generic)
import Path (
Abs,
Dir,
File,
Path,
parent,
parseAbsFile,
toFilePath,
)
import Strategy.Maven.Pom qualified as Pom
import Strategy.Maven.Pom.Closure (MavenProjectClosure, buildProjectClosures, closurePath)
import Strategy.Maven.Pom.PomFile (RawPom (rawPomArtifact, rawPomGroup, rawPomVersion))
import Strategy.Maven.Pom.Resolver (buildGlobalClosure)
import Strategy.Scala.Errors (FailedToListProjects (FailedToListProjects), MaybeWithoutDependencyTreeTask (MaybeWithoutDependencyTreeTask), MissingFullDependencyPlugin (MissingFullDependencyPlugin))
import Strategy.Scala.Plugin (genTreeJson, hasDependencyPlugins)
import Strategy.Scala.SbtDependencyTree (SbtArtifact (SbtArtifact), analyze, sbtDepTreeCmd)
import Strategy.Scala.SbtDependencyTreeJson qualified as TreeJson
import Types (
DependencyResults (..),
DiscoveredProject (..),
DiscoveredProjectType (ScalaProjectType),
GraphBreadth (Complete, Partial),
)
discover ::
( Has Exec sig m
, Has ReadFS sig m
, Has Diagnostics sig m
, Has (Reader AllFilters) sig m
) =>
Path Abs Dir ->
m [DiscoveredProject ScalaProject]
discover = simpleDiscover findProjects' mkProject ScalaProjectType
where
findProjects' dir = concatMap toScalaProjects <$> (findProjects dir)
toScalaProjects :: SbtTargets -> [ScalaProject]
toScalaProjects (SbtTargets maybeSbtDepTree treeJsonPaths closure) =
map
(mkScalaProject (SbtTargets maybeSbtDepTree treeJsonPaths closure))
closure
mkScalaProject :: SbtTargets -> MavenProjectClosure -> ScalaProject
mkScalaProject (SbtTargets maybeSbtDepTree treeJsonPaths _) cls =
ScalaProject maybeSbtDepTree (findRelevantDependencyTreeJson cls treeJsonPaths) cls
findRelevantDependencyTreeJson :: MavenProjectClosure -> [Path Abs File] -> Maybe (Path Abs File)
findRelevantDependencyTreeJson closure paths = do
let clsPath = parent $ closurePath closure
-- treeJson are written in /module/target/
-- where as makePom may write in /module/target/scala-version/ or /module/target/
--
-- match closure to treeJson based common parent path /module/target/
module can only have one pom closure , and one or none tree json file .
let matchingPaths = filter (\p -> parent p == clsPath || parent p == parent clsPath) paths
listToMaybe matchingPaths
data ScalaProject = ScalaProject
{ rawSbtDepTree :: Maybe ByteString
, unScalaProjectDepTreeJson :: Maybe (Path Abs File)
, unScalaProject :: MavenProjectClosure
}
deriving (Eq, Ord, Show, Generic)
instance ToJSON ScalaProject where
toJSON scalaProject =
object
[ "unScalaProject" .= unScalaProject scalaProject
]
instance AnalyzeProject ScalaProject where
analyzeProject _ = getDeps
analyzeProject' _ = const $ fatalText "Cannot analyze scala project statically"
mkProject :: ScalaProject -> DiscoveredProject ScalaProject
mkProject (ScalaProject sbtBuildDir sbtTreeJson closure) =
DiscoveredProject
{ projectType = ScalaProjectType
, projectPath = parent $ closurePath closure
, projectBuildTargets = mempty
, projectData = ScalaProject sbtBuildDir sbtTreeJson closure
}
getDeps :: (Has Exec sig m, Has ReadFS sig m, Has Diagnostics sig m, Has Logger sig m) => ScalaProject -> m DependencyResults
getDeps project =
warnOnErr MissingDeepDeps (analyzeWithDepTreeJson project <||> analyzeWithSbtDepTree project)
<||> analyzeWithPoms project
pathToText :: Path ar fd -> Text
pathToText = toText . toFilePath
data SbtTargets = SbtTargets (Maybe ByteString) [Path Abs File] [MavenProjectClosure]
findProjects ::
( Has Exec sig m
, Has ReadFS sig m
, Has Diagnostics sig m
, Has (Reader AllFilters) sig m
) =>
Path Abs Dir ->
m [SbtTargets]
findProjects = walkWithFilters' $ \dir _ files -> do
case findFileNamed "build.sbt" files of
Nothing -> pure ([], WalkContinue)
Just _ -> do
projectsRes <-
recover
. warnOnErr (FailedToListProjects dir)
. context ("Listing sbt projects at " <> pathToText dir)
$ genPoms dir
(miniDepPlugin, depPlugin) <- hasDependencyPlugins dir
case (projectsRes, miniDepPlugin, depPlugin) of
(Nothing, _, _) -> pure ([], WalkSkipAll)
(Just projects, False, False) -> pure ([SbtTargets Nothing [] projects], WalkSkipAll)
(Just projects, _, True) -> do
-- project is explicitly configured to use dependency-tree-plugin
treeJSONs <- recover $ genTreeJson dir
pure ([SbtTargets Nothing (fromMaybe [] treeJSONs) projects], WalkSkipAll)
(Just projects, True, _) -> do
-- project is using miniature dependency tree plugin,
-- which is included by default with sbt 1.4+
depTreeStdOut <-
recover $
context ("inferring dependencies") $
errCtx (MaybeWithoutDependencyTreeTask) $
execThrow dir sbtDepTreeCmd
case (length projects > 1, depTreeStdOut) of
-- not emitting warning or error, to avoid duplication from
-- those in `analyze` step. further analysis warn/errors are
-- included in analysis scan summary.
(True, _) -> pure ([SbtTargets Nothing [] projects], WalkSkipAll)
(_, Just _) -> pure ([SbtTargets depTreeStdOut [] projects], WalkSkipAll)
(_, _) -> pure ([], WalkSkipAll)
analyzeWithPoms :: (Has Diagnostics sig m) => ScalaProject -> m DependencyResults
analyzeWithPoms (ScalaProject _ _ closure) = context "Analyzing sbt dependencies with generated pom" $ do
pure $
DependencyResults
{ dependencyGraph = Pom.analyze' closure
, dependencyGraphBreadth = Partial
, dependencyManifestFiles = [closurePath closure]
}
analyzeWithDepTreeJson :: (Has ReadFS sig m, Has Diagnostics sig m) => ScalaProject -> m DependencyResults
analyzeWithDepTreeJson (ScalaProject _ treeJson closure) = context "Analyzing sbt dependencies using dependencyBrowseTreeHTML" $ do
treeJson' <- errCtx MissingFullDependencyPlugin $ fromMaybeText "Could not retrieve output from sbt dependencyBrowseTreeHTML" treeJson
projectGraph <- TreeJson.analyze treeJson'
pure $
DependencyResults
{ dependencyGraph = projectGraph
, dependencyGraphBreadth = Complete
, dependencyManifestFiles = [closurePath closure]
}
analyzeWithSbtDepTree :: (Has Exec sig m, Has ReadFS sig m, Has Logger sig m, Has Diagnostics sig m) => ScalaProject -> m DependencyResults
analyzeWithSbtDepTree (ScalaProject maybeDepTree _ closure) = context "Analyzing sbt dependencies using dependencyTree" $ do
projectArtifact <- pomSbtArtifact
logDebug $ "identified artifact whose descendent to include when graphing: " <> viaShow projectArtifact
projectGraph <- analyze maybeDepTree projectArtifact
pure $
DependencyResults
{ dependencyGraph = projectGraph
, dependencyGraphBreadth = Complete
, dependencyManifestFiles = [closurePath closure]
}
where
pomSbtArtifact :: (Has ReadFS sig m, Has Diagnostics sig m) => m SbtArtifact
pomSbtArtifact = do
let pomPath = closurePath closure
maybeRawPom <- recover (readContentsXML @RawPom pomPath)
groupId <- fromMaybeText ("Could not retrieve project group from generated pom file:" <> toText pomPath) (rawPomGroup =<< maybeRawPom)
artifactId <- fromMaybeText ("Could not retrieve project artifact from generated pom file:" <> toText pomPath) (rawPomArtifact <$> maybeRawPom)
version <- fromMaybeText ("Could not retrieve project version from generated pom file:" <> toText pomPath) (rawPomVersion =<< maybeRawPom)
pure $ SbtArtifact groupId artifactId version
makePomCmd :: Command
makePomCmd =
Command
{ cmdName = "sbt"
, -- --no-colors to disable ANSI escape codes
-- --batch to disable interactivity. normally, if an `sbt` command fails, it'll drop into repl mode: --batch will disable the repl.
cmdArgs = ["--no-colors", "--batch", "makePom"]
, cmdAllowErr = Never
}
genPoms :: (Has Exec sig m, Has ReadFS sig m, Has Diagnostics sig m) => Path Abs Dir -> m [MavenProjectClosure]
genPoms projectDir = do
stdoutBL <- context "Generating poms" $ execThrow projectDir makePomCmd
-- stdout for "sbt makePom" looks something like:
--
-- > ...
-- > [info] Wrote /absolute/path/to/pom.xml
-- > [info] Wrote /absolute/path/to/other/pom.xml
-- > ...
let stdoutLText = decodeUtf8 stdoutBL
stdout = TL.toStrict stdoutLText
--
stdoutLines :: [Text]
stdoutLines = Text.lines stdout
--
pomLines :: [Text]
pomLines = mapMaybe (Text.stripPrefix "[info] Wrote ") stdoutLines
--
pomLocations :: Maybe [Path Abs File]
pomLocations = traverse (parseAbsFile . toString) pomLines
case pomLocations of
Nothing -> fatalText ("Could not parse pom paths from:\n" <> Text.unlines pomLines)
Just [] -> fatalText "No sbt projects found"
Just paths -> do
globalClosure <- buildGlobalClosure paths
pure $ buildProjectClosures projectDir globalClosure
| null | https://raw.githubusercontent.com/fossas/fossa-cli/ad4ab0b369995c7fc6d6056d0038141c492ad8cb/src/Strategy/Scala.hs | haskell |
in multi - project
setups, parent/child relationships are not present in the generated poms.
The only non-trivial logic that exists in this strategy is adding edges
between poms in the maven "global closure", before building the individual
multi-project closures.
treeJson are written in /module/target/
where as makePom may write in /module/target/scala-version/ or /module/target/
match closure to treeJson based common parent path /module/target/
project is explicitly configured to use dependency-tree-plugin
project is using miniature dependency tree plugin,
which is included by default with sbt 1.4+
not emitting warning or error, to avoid duplication from
those in `analyze` step. further analysis warn/errors are
included in analysis scan summary.
--no-colors to disable ANSI escape codes
--batch to disable interactivity. normally, if an `sbt` command fails, it'll drop into repl mode: --batch will disable the repl.
stdout for "sbt makePom" looks something like:
> ...
> [info] Wrote /absolute/path/to/pom.xml
> [info] Wrote /absolute/path/to/other/pom.xml
> ...
| | The scala strategy leverages the machinery from maven - pom .
module Strategy.Scala (
discover,
findProjects,
ScalaProject (..),
) where
import App.Fossa.Analyze.Types (AnalyzeProject (analyzeProject'), analyzeProject)
import Control.Effect.Diagnostics (
Diagnostics,
errCtx,
fatalText,
fromMaybeText,
recover,
warnOnErr,
(<||>),
)
import Control.Effect.Reader (Reader)
import Control.Effect.Stack (context)
import Data.Aeson (KeyValue ((.=)), ToJSON (toJSON), object)
import Data.ByteString.Lazy (ByteString)
import Data.Maybe (fromMaybe, listToMaybe, mapMaybe)
import Data.String.Conversion (ConvertUtf8 (decodeUtf8), toString, toText)
import Data.Text (Text)
import Data.Text qualified as Text
import Data.Text.Lazy qualified as TL
import Diag.Common (MissingDeepDeps (MissingDeepDeps))
import Discovery.Filters (AllFilters)
import Discovery.Simple (simpleDiscover)
import Discovery.Walk (
WalkStep (WalkContinue, WalkSkipAll),
findFileNamed,
walkWithFilters',
)
import Effect.Exec (
AllowErr (Never),
Command (..),
Exec,
Has,
execThrow,
)
import Effect.Logger (Logger, logDebug, viaShow)
import Effect.ReadFS (ReadFS, readContentsXML)
import GHC.Generics (Generic)
import Path (
Abs,
Dir,
File,
Path,
parent,
parseAbsFile,
toFilePath,
)
import Strategy.Maven.Pom qualified as Pom
import Strategy.Maven.Pom.Closure (MavenProjectClosure, buildProjectClosures, closurePath)
import Strategy.Maven.Pom.PomFile (RawPom (rawPomArtifact, rawPomGroup, rawPomVersion))
import Strategy.Maven.Pom.Resolver (buildGlobalClosure)
import Strategy.Scala.Errors (FailedToListProjects (FailedToListProjects), MaybeWithoutDependencyTreeTask (MaybeWithoutDependencyTreeTask), MissingFullDependencyPlugin (MissingFullDependencyPlugin))
import Strategy.Scala.Plugin (genTreeJson, hasDependencyPlugins)
import Strategy.Scala.SbtDependencyTree (SbtArtifact (SbtArtifact), analyze, sbtDepTreeCmd)
import Strategy.Scala.SbtDependencyTreeJson qualified as TreeJson
import Types (
DependencyResults (..),
DiscoveredProject (..),
DiscoveredProjectType (ScalaProjectType),
GraphBreadth (Complete, Partial),
)
discover ::
( Has Exec sig m
, Has ReadFS sig m
, Has Diagnostics sig m
, Has (Reader AllFilters) sig m
) =>
Path Abs Dir ->
m [DiscoveredProject ScalaProject]
discover = simpleDiscover findProjects' mkProject ScalaProjectType
where
findProjects' dir = concatMap toScalaProjects <$> (findProjects dir)
toScalaProjects :: SbtTargets -> [ScalaProject]
toScalaProjects (SbtTargets maybeSbtDepTree treeJsonPaths closure) =
map
(mkScalaProject (SbtTargets maybeSbtDepTree treeJsonPaths closure))
closure
mkScalaProject :: SbtTargets -> MavenProjectClosure -> ScalaProject
mkScalaProject (SbtTargets maybeSbtDepTree treeJsonPaths _) cls =
ScalaProject maybeSbtDepTree (findRelevantDependencyTreeJson cls treeJsonPaths) cls
findRelevantDependencyTreeJson :: MavenProjectClosure -> [Path Abs File] -> Maybe (Path Abs File)
findRelevantDependencyTreeJson closure paths = do
let clsPath = parent $ closurePath closure
module can only have one pom closure , and one or none tree json file .
let matchingPaths = filter (\p -> parent p == clsPath || parent p == parent clsPath) paths
listToMaybe matchingPaths
data ScalaProject = ScalaProject
{ rawSbtDepTree :: Maybe ByteString
, unScalaProjectDepTreeJson :: Maybe (Path Abs File)
, unScalaProject :: MavenProjectClosure
}
deriving (Eq, Ord, Show, Generic)
instance ToJSON ScalaProject where
toJSON scalaProject =
object
[ "unScalaProject" .= unScalaProject scalaProject
]
instance AnalyzeProject ScalaProject where
analyzeProject _ = getDeps
analyzeProject' _ = const $ fatalText "Cannot analyze scala project statically"
mkProject :: ScalaProject -> DiscoveredProject ScalaProject
mkProject (ScalaProject sbtBuildDir sbtTreeJson closure) =
DiscoveredProject
{ projectType = ScalaProjectType
, projectPath = parent $ closurePath closure
, projectBuildTargets = mempty
, projectData = ScalaProject sbtBuildDir sbtTreeJson closure
}
getDeps :: (Has Exec sig m, Has ReadFS sig m, Has Diagnostics sig m, Has Logger sig m) => ScalaProject -> m DependencyResults
getDeps project =
warnOnErr MissingDeepDeps (analyzeWithDepTreeJson project <||> analyzeWithSbtDepTree project)
<||> analyzeWithPoms project
pathToText :: Path ar fd -> Text
pathToText = toText . toFilePath
data SbtTargets = SbtTargets (Maybe ByteString) [Path Abs File] [MavenProjectClosure]
findProjects ::
( Has Exec sig m
, Has ReadFS sig m
, Has Diagnostics sig m
, Has (Reader AllFilters) sig m
) =>
Path Abs Dir ->
m [SbtTargets]
findProjects = walkWithFilters' $ \dir _ files -> do
case findFileNamed "build.sbt" files of
Nothing -> pure ([], WalkContinue)
Just _ -> do
projectsRes <-
recover
. warnOnErr (FailedToListProjects dir)
. context ("Listing sbt projects at " <> pathToText dir)
$ genPoms dir
(miniDepPlugin, depPlugin) <- hasDependencyPlugins dir
case (projectsRes, miniDepPlugin, depPlugin) of
(Nothing, _, _) -> pure ([], WalkSkipAll)
(Just projects, False, False) -> pure ([SbtTargets Nothing [] projects], WalkSkipAll)
(Just projects, _, True) -> do
treeJSONs <- recover $ genTreeJson dir
pure ([SbtTargets Nothing (fromMaybe [] treeJSONs) projects], WalkSkipAll)
(Just projects, True, _) -> do
depTreeStdOut <-
recover $
context ("inferring dependencies") $
errCtx (MaybeWithoutDependencyTreeTask) $
execThrow dir sbtDepTreeCmd
case (length projects > 1, depTreeStdOut) of
(True, _) -> pure ([SbtTargets Nothing [] projects], WalkSkipAll)
(_, Just _) -> pure ([SbtTargets depTreeStdOut [] projects], WalkSkipAll)
(_, _) -> pure ([], WalkSkipAll)
analyzeWithPoms :: (Has Diagnostics sig m) => ScalaProject -> m DependencyResults
analyzeWithPoms (ScalaProject _ _ closure) = context "Analyzing sbt dependencies with generated pom" $ do
pure $
DependencyResults
{ dependencyGraph = Pom.analyze' closure
, dependencyGraphBreadth = Partial
, dependencyManifestFiles = [closurePath closure]
}
analyzeWithDepTreeJson :: (Has ReadFS sig m, Has Diagnostics sig m) => ScalaProject -> m DependencyResults
analyzeWithDepTreeJson (ScalaProject _ treeJson closure) = context "Analyzing sbt dependencies using dependencyBrowseTreeHTML" $ do
treeJson' <- errCtx MissingFullDependencyPlugin $ fromMaybeText "Could not retrieve output from sbt dependencyBrowseTreeHTML" treeJson
projectGraph <- TreeJson.analyze treeJson'
pure $
DependencyResults
{ dependencyGraph = projectGraph
, dependencyGraphBreadth = Complete
, dependencyManifestFiles = [closurePath closure]
}
analyzeWithSbtDepTree :: (Has Exec sig m, Has ReadFS sig m, Has Logger sig m, Has Diagnostics sig m) => ScalaProject -> m DependencyResults
analyzeWithSbtDepTree (ScalaProject maybeDepTree _ closure) = context "Analyzing sbt dependencies using dependencyTree" $ do
projectArtifact <- pomSbtArtifact
logDebug $ "identified artifact whose descendent to include when graphing: " <> viaShow projectArtifact
projectGraph <- analyze maybeDepTree projectArtifact
pure $
DependencyResults
{ dependencyGraph = projectGraph
, dependencyGraphBreadth = Complete
, dependencyManifestFiles = [closurePath closure]
}
where
pomSbtArtifact :: (Has ReadFS sig m, Has Diagnostics sig m) => m SbtArtifact
pomSbtArtifact = do
let pomPath = closurePath closure
maybeRawPom <- recover (readContentsXML @RawPom pomPath)
groupId <- fromMaybeText ("Could not retrieve project group from generated pom file:" <> toText pomPath) (rawPomGroup =<< maybeRawPom)
artifactId <- fromMaybeText ("Could not retrieve project artifact from generated pom file:" <> toText pomPath) (rawPomArtifact <$> maybeRawPom)
version <- fromMaybeText ("Could not retrieve project version from generated pom file:" <> toText pomPath) (rawPomVersion =<< maybeRawPom)
pure $ SbtArtifact groupId artifactId version
makePomCmd :: Command
makePomCmd =
Command
{ cmdName = "sbt"
cmdArgs = ["--no-colors", "--batch", "makePom"]
, cmdAllowErr = Never
}
genPoms :: (Has Exec sig m, Has ReadFS sig m, Has Diagnostics sig m) => Path Abs Dir -> m [MavenProjectClosure]
genPoms projectDir = do
stdoutBL <- context "Generating poms" $ execThrow projectDir makePomCmd
let stdoutLText = decodeUtf8 stdoutBL
stdout = TL.toStrict stdoutLText
stdoutLines :: [Text]
stdoutLines = Text.lines stdout
pomLines :: [Text]
pomLines = mapMaybe (Text.stripPrefix "[info] Wrote ") stdoutLines
pomLocations :: Maybe [Path Abs File]
pomLocations = traverse (parseAbsFile . toString) pomLines
case pomLocations of
Nothing -> fatalText ("Could not parse pom paths from:\n" <> Text.unlines pomLines)
Just [] -> fatalText "No sbt projects found"
Just paths -> do
globalClosure <- buildGlobalClosure paths
pure $ buildProjectClosures projectDir globalClosure
|
2d47be89f9c4a01abd1c11c00f51c2a319d005d8c4f19caceb9f657e77fbee1b | ocamllabs/opam-doc | html_utils.mli | (** Utility functions to generate html tags and html pages (+ quick hacks to
be removed) *)
open Cow
* { 3 Html tags generators }
* Generate a < div class="info">xxx</div > tag or if None
val make_info : Html.t option -> Html.t
(** Convert options to nil **)
val opt_to_nil: Html.t option -> Html.t
(** Generate a <div class="ocaml_summary">xxx</div> tag *)
val make_summary : Html.t -> Html.t
(** Wrap the content into a <span class="keyword"> html tag *)
val keyword : string -> Html.t
(** Wrap the column list into a <table class="typetable"> by wraping the
elements with <tr> tags. The elements should already have the appropriates
<td> tags *)
val make_type_table : Html.t list -> Html.t
* Wrap the content into a < span > that will contain a class attribute with
the unique i d used by the javascript to lookup types , classes , values , ...
The html inlining prevents using i d because two tags with the attribute :
id="TYPEt " could also be defined in submodules and therefore false the
search .
[ Html_utils.generate_mark mark element_name content ]
the unique id used by the javascript to lookup types, classes, values, ...
The html inlining prevents using id because two tags with the attribute :
id="TYPEt" could also be defined in submodules and therefore false the
search.
[Html_utils.generate_mark mark element_name content]
*)
val generate_mark : Opam_doc_config.mark -> string -> Html.t -> Html.t
(** Generate the <td> for a variant element *)
val make_variant_cell :
string -> string -> Html.t list -> Html.t option -> Html.t
(** Generate the <td> for a record element *)
val make_record_label_cell :
string -> string -> bool -> Html.t -> Html.t option -> Html.t
* Generate the type parameters for a type signature .
e.g : ( + ' a,-'b ) for the type ( + ' a,-'b ) t
e.g: (+'a,-'b) for the type (+'a,-'b) t *)
val html_of_type_param_list :
Html.t list -> [< `Negative | `None | `Positive ] list -> Html.t
* Generate the class parameters for a class signature .
e.g : ( + ' a,-'b ) for the type ( + ' a,-'b ) t
e.g: (+'a,-'b) for the type (+'a,-'b) t *)
val html_of_type_class_param_list :
Html.t list -> [< `Negative | `None | `Positive ] list -> Html.t
* the include 's module_type to retrieve the contained elements and
return it as a json array
return it as a json array *)
val js_array_of_include_items : Types.signature -> string
* the include 's module_type to retrieve the contained types and
return it as a json array
return it as a json array *)
val js_array_of_include_types : Types.signature -> string
* { 3 Html page generation }
(** Writes the style file if non-existent *)
val output_style_file : unit -> unit
(** Writes the script file if non-existent *)
val output_script_file : unit -> unit
(** Generate the current package summary. Elements should be of the form:
name * description *)
val generate_package_summary : string -> (string * Html.t) list -> unit
(** Generate the current package index *)
val generate_package_index : string -> unit
(** Generate the main page that displays every package contained in the global
index file *)
val generate_global_packages_index : Index.global -> unit
(** Wrap the module signature elements into a special <div> used by the
javascript lookup *)
val create_signature_content : Html.t list -> Html.t
(** Wrap the class signature elements into a special <div> used by the
javascript lookup *)
val create_class_signature_content : Html.t list -> Html.t
* Create a class container used by the javascript inliner
val create_class_container : string -> Html.t -> Html.t -> Gentyp.path option
-> Html.t
* Extract the first sentence out of an info
val cut_first_sentence : Info.t -> Info.t
| null | https://raw.githubusercontent.com/ocamllabs/opam-doc/4f5a332750177e3016b82d5923a681510335af4c/src/opam-doc-index/html_utils.mli | ocaml | * Utility functions to generate html tags and html pages (+ quick hacks to
be removed)
* Convert options to nil *
* Generate a <div class="ocaml_summary">xxx</div> tag
* Wrap the content into a <span class="keyword"> html tag
* Wrap the column list into a <table class="typetable"> by wraping the
elements with <tr> tags. The elements should already have the appropriates
<td> tags
* Generate the <td> for a variant element
* Generate the <td> for a record element
* Writes the style file if non-existent
* Writes the script file if non-existent
* Generate the current package summary. Elements should be of the form:
name * description
* Generate the current package index
* Generate the main page that displays every package contained in the global
index file
* Wrap the module signature elements into a special <div> used by the
javascript lookup
* Wrap the class signature elements into a special <div> used by the
javascript lookup |
open Cow
* { 3 Html tags generators }
* Generate a < div class="info">xxx</div > tag or if None
val make_info : Html.t option -> Html.t
val opt_to_nil: Html.t option -> Html.t
val make_summary : Html.t -> Html.t
val keyword : string -> Html.t
val make_type_table : Html.t list -> Html.t
* Wrap the content into a < span > that will contain a class attribute with
the unique i d used by the javascript to lookup types , classes , values , ...
The html inlining prevents using i d because two tags with the attribute :
id="TYPEt " could also be defined in submodules and therefore false the
search .
[ Html_utils.generate_mark mark element_name content ]
the unique id used by the javascript to lookup types, classes, values, ...
The html inlining prevents using id because two tags with the attribute :
id="TYPEt" could also be defined in submodules and therefore false the
search.
[Html_utils.generate_mark mark element_name content]
*)
val generate_mark : Opam_doc_config.mark -> string -> Html.t -> Html.t
val make_variant_cell :
string -> string -> Html.t list -> Html.t option -> Html.t
val make_record_label_cell :
string -> string -> bool -> Html.t -> Html.t option -> Html.t
* Generate the type parameters for a type signature .
e.g : ( + ' a,-'b ) for the type ( + ' a,-'b ) t
e.g: (+'a,-'b) for the type (+'a,-'b) t *)
val html_of_type_param_list :
Html.t list -> [< `Negative | `None | `Positive ] list -> Html.t
* Generate the class parameters for a class signature .
e.g : ( + ' a,-'b ) for the type ( + ' a,-'b ) t
e.g: (+'a,-'b) for the type (+'a,-'b) t *)
val html_of_type_class_param_list :
Html.t list -> [< `Negative | `None | `Positive ] list -> Html.t
* the include 's module_type to retrieve the contained elements and
return it as a json array
return it as a json array *)
val js_array_of_include_items : Types.signature -> string
* the include 's module_type to retrieve the contained types and
return it as a json array
return it as a json array *)
val js_array_of_include_types : Types.signature -> string
* { 3 Html page generation }
val output_style_file : unit -> unit
val output_script_file : unit -> unit
val generate_package_summary : string -> (string * Html.t) list -> unit
val generate_package_index : string -> unit
val generate_global_packages_index : Index.global -> unit
val create_signature_content : Html.t list -> Html.t
val create_class_signature_content : Html.t list -> Html.t
* Create a class container used by the javascript inliner
val create_class_container : string -> Html.t -> Html.t -> Gentyp.path option
-> Html.t
* Extract the first sentence out of an info
val cut_first_sentence : Info.t -> Info.t
|
9484745394697becf04e02bd29a7b9367cb2cb689bf9472b6aba365cda61b370 | roelvandijk/numerals | BUL.hs | |
[ @ISO639 - 1@ ] bg
[ @ISO639 - 2@ ] bul
[ @ISO639 - 3@ ] bul
[ @Native name@ ]
[ @English name@ ] Bulgarian
[@ISO639-1@] bg
[@ISO639-2@] bul
[@ISO639-3@] bul
[@Native name@] Български език
[@English name@] Bulgarian
-}
module Text.Numeral.Language.BUL
( -- * Language entry
entry
-- * Conversions
, cardinal
-- * Structure
, struct
-- * Bounds
, bounds
) where
--------------------------------------------------------------------------------
-- Imports
--------------------------------------------------------------------------------
import "base" Data.Function ( fix )
import qualified "containers" Data.Map as M
import "this" Text.Numeral
import "this" Text.Numeral.Misc ( dec )
import "this" Text.Numeral.Entry
import qualified "this" Text.Numeral.BigNum as BN
import "text" Data.Text ( Text )
--------------------------------------------------------------------------------
BUL
--------------------------------------------------------------------------------
entry :: Entry
entry = emptyEntry
{ entIso639_1 = Just "bg"
, entIso639_2 = ["bul"]
, entIso639_3 = Just "bul"
, entNativeNames = ["Български език"]
, entEnglishName = Just "Bulgarian"
, entCardinal = Just Conversion
{ toNumeral = cardinal
, toStructure = struct
}
}
cardinal :: (Integral a) => Inflection -> a -> Maybe Text
cardinal inf = cardinalRepr inf . struct
struct :: (Integral a) => a -> Exp
struct = pos $ fix $ rule `combine` shortScale1_bg
where
rule = findRule ( 0, lit )
[ ( 13, add 10 L )
, ( 20, mul 10 R L )
, ( 100, step 100 10 R L)
, (1000, step 1000 1000 R L)
]
(dec 6 - 1)
-- | Like 'shortScale1' but forces the right-hand-side to have
-- masculine gender.
shortScale1_bg :: (Integral a) => Rule a
shortScale1_bg = mulScale_ bgMul 3 3 R L rule
where
bgMul f m scale' _ = masculineMul (f m) scale'
masculineMul x y = ChangeGender (Just Masculine) $ Mul x y
rule = findRule (1, lit) [] 14
bounds :: (Integral a) => (a, a)
bounds = let x = dec 48 - 1 in (negate x, x)
cardinalRepr :: Inflection -> Exp -> Maybe Text
cardinalRepr = render defaultRepr
{ reprValue = \inf n -> M.lookup n (syms inf)
, reprScale = shortScaleRepr
, reprAdd = Just (⊞)
, reprMul = Just (⊡)
, reprNeg = Just $ \_ _ -> "минус "
}
where
(Lit 100 ⊞ Lit _) _ = " и "
(_ ⊞ Add _ (Lit 10)) _ = " и "
(_ ⊞ Mul _ (Lit 10)) _ = " и "
(_ ⊞ Lit 10) _ = ""
((_ `Mul` Lit _) ⊞ Lit _) _ = " и "
(ChangeGender _ _ ⊞ Lit _) _ = " и "
(_ ⊞ _ ) _ = " "
(_ ⊡ Lit n) _ | n <= 100 = ""
(_ ⊡ _ ) _ = " "
syms inf =
M.fromList
[ (0, const "нула")
, (1, const $ if isFeminine inf
then "една"
else if isMasculine inf
then "един"
else "едно"
)
, (2, \c -> case c of
CtxMul _ (Lit 10) _ -> "два"
CtxMul _ (Lit 100) _ -> "две"
CtxMul _ (Lit 1000) _ -> "две"
_ | isMasculine inf -> "два"
| otherwise -> "две"
)
, (3, const "три")
, (4, const "четири")
, (5, const "пет")
, (6, const "шест")
, (7, const "седем")
, (8, const "осем")
, (9, const "девет")
, (10, \c -> case c of
CtxAdd _ (Lit n) _
| n <= 9 -> "надесет"
_ -> "десет"
)
, (11, const "единадесет")
, (12, const "дванадесет")
, (100, \c -> case c of
CtxMul _ (Lit 2) _ -> "ста"
CtxMul _ (Lit 3) _ -> "ста"
CtxMul R _ _ -> "стотин"
_ -> "сто"
)
, (1000, \c -> case c of
CtxMul R _ _ -> "хиляди"
_ -> "хиляда"
)
]
shortScaleRepr :: Inflection -> Integer -> Integer -> Exp -> Ctx Exp -> Maybe Text
shortScaleRepr inf b o e
= case e of
Lit 2 -> BN.scaleRepr (BN.quantityName "илиард" "илиарда") syms inf b o e
_ -> BN.scaleRepr (BN.quantityName "илион" "илиона") syms inf b o e
where
syms = [ ( 1, const "м")
, ( 2, const "м")
, ( 3, const "тр")
, ( 4, const "квадр")
, ( 5, const "квинт")
, ( 6, const "секст")
, ( 7, const "септ")
, ( 8, const "окт")
, ( 9, const "нон")
, (10, const "дец")
, (11, const "индец")
, (12, const "дуодец")
, (13, const "тридец")
, (14, const "куадродец")
]
| null | https://raw.githubusercontent.com/roelvandijk/numerals/b1e4121e0824ac0646a3230bd311818e159ec127/src/Text/Numeral/Language/BUL.hs | haskell | * Language entry
* Conversions
* Structure
* Bounds
------------------------------------------------------------------------------
Imports
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Like 'shortScale1' but forces the right-hand-side to have
masculine gender. | |
[ @ISO639 - 1@ ] bg
[ @ISO639 - 2@ ] bul
[ @ISO639 - 3@ ] bul
[ @Native name@ ]
[ @English name@ ] Bulgarian
[@ISO639-1@] bg
[@ISO639-2@] bul
[@ISO639-3@] bul
[@Native name@] Български език
[@English name@] Bulgarian
-}
module Text.Numeral.Language.BUL
entry
, cardinal
, struct
, bounds
) where
import "base" Data.Function ( fix )
import qualified "containers" Data.Map as M
import "this" Text.Numeral
import "this" Text.Numeral.Misc ( dec )
import "this" Text.Numeral.Entry
import qualified "this" Text.Numeral.BigNum as BN
import "text" Data.Text ( Text )
BUL
entry :: Entry
entry = emptyEntry
{ entIso639_1 = Just "bg"
, entIso639_2 = ["bul"]
, entIso639_3 = Just "bul"
, entNativeNames = ["Български език"]
, entEnglishName = Just "Bulgarian"
, entCardinal = Just Conversion
{ toNumeral = cardinal
, toStructure = struct
}
}
cardinal :: (Integral a) => Inflection -> a -> Maybe Text
cardinal inf = cardinalRepr inf . struct
struct :: (Integral a) => a -> Exp
struct = pos $ fix $ rule `combine` shortScale1_bg
where
rule = findRule ( 0, lit )
[ ( 13, add 10 L )
, ( 20, mul 10 R L )
, ( 100, step 100 10 R L)
, (1000, step 1000 1000 R L)
]
(dec 6 - 1)
shortScale1_bg :: (Integral a) => Rule a
shortScale1_bg = mulScale_ bgMul 3 3 R L rule
where
bgMul f m scale' _ = masculineMul (f m) scale'
masculineMul x y = ChangeGender (Just Masculine) $ Mul x y
rule = findRule (1, lit) [] 14
bounds :: (Integral a) => (a, a)
bounds = let x = dec 48 - 1 in (negate x, x)
cardinalRepr :: Inflection -> Exp -> Maybe Text
cardinalRepr = render defaultRepr
{ reprValue = \inf n -> M.lookup n (syms inf)
, reprScale = shortScaleRepr
, reprAdd = Just (⊞)
, reprMul = Just (⊡)
, reprNeg = Just $ \_ _ -> "минус "
}
where
(Lit 100 ⊞ Lit _) _ = " и "
(_ ⊞ Add _ (Lit 10)) _ = " и "
(_ ⊞ Mul _ (Lit 10)) _ = " и "
(_ ⊞ Lit 10) _ = ""
((_ `Mul` Lit _) ⊞ Lit _) _ = " и "
(ChangeGender _ _ ⊞ Lit _) _ = " и "
(_ ⊞ _ ) _ = " "
(_ ⊡ Lit n) _ | n <= 100 = ""
(_ ⊡ _ ) _ = " "
syms inf =
M.fromList
[ (0, const "нула")
, (1, const $ if isFeminine inf
then "една"
else if isMasculine inf
then "един"
else "едно"
)
, (2, \c -> case c of
CtxMul _ (Lit 10) _ -> "два"
CtxMul _ (Lit 100) _ -> "две"
CtxMul _ (Lit 1000) _ -> "две"
_ | isMasculine inf -> "два"
| otherwise -> "две"
)
, (3, const "три")
, (4, const "четири")
, (5, const "пет")
, (6, const "шест")
, (7, const "седем")
, (8, const "осем")
, (9, const "девет")
, (10, \c -> case c of
CtxAdd _ (Lit n) _
| n <= 9 -> "надесет"
_ -> "десет"
)
, (11, const "единадесет")
, (12, const "дванадесет")
, (100, \c -> case c of
CtxMul _ (Lit 2) _ -> "ста"
CtxMul _ (Lit 3) _ -> "ста"
CtxMul R _ _ -> "стотин"
_ -> "сто"
)
, (1000, \c -> case c of
CtxMul R _ _ -> "хиляди"
_ -> "хиляда"
)
]
shortScaleRepr :: Inflection -> Integer -> Integer -> Exp -> Ctx Exp -> Maybe Text
shortScaleRepr inf b o e
= case e of
Lit 2 -> BN.scaleRepr (BN.quantityName "илиард" "илиарда") syms inf b o e
_ -> BN.scaleRepr (BN.quantityName "илион" "илиона") syms inf b o e
where
syms = [ ( 1, const "м")
, ( 2, const "м")
, ( 3, const "тр")
, ( 4, const "квадр")
, ( 5, const "квинт")
, ( 6, const "секст")
, ( 7, const "септ")
, ( 8, const "окт")
, ( 9, const "нон")
, (10, const "дец")
, (11, const "индец")
, (12, const "дуодец")
, (13, const "тридец")
, (14, const "куадродец")
]
|
0f5e3574c0f4411d1fab9650227d71b136f79c1ed38a40b9529d713e4e6fe0e7 | acowley/Frames | Melt.hs | # LANGUAGE ConstraintKinds , DataKinds , FlexibleContexts , FlexibleInstances ,
KindSignatures , MultiParamTypeClasses , PolyKinds ,
ScopedTypeVariables , TypeFamilies , TypeOperators ,
UndecidableInstances #
KindSignatures, MultiParamTypeClasses, PolyKinds,
ScopedTypeVariables, TypeFamilies, TypeOperators,
UndecidableInstances #-}
module Frames.Melt where
import Data.Proxy
import Data.Vinyl
import Data.Vinyl.CoRec (CoRec(..))
import Data.Vinyl.TypeLevel
import Frames.Col
import Frames.Frame (Frame(..), FrameRec)
import Frames.Rec
import Frames.RecF (ColumnHeaders(..))
type family Elem t ts :: Bool where
Elem t '[] = 'False
Elem t (t ': ts) = 'True
Elem t (s ': ts) = Elem t ts
type family Or (a :: Bool) (b :: Bool) :: Bool where
Or 'True b = 'True
Or a b = b
type family Not a :: Bool where
Not 'True = 'False
Not 'False = 'True
type family Disjoint ss ts :: Bool where
Disjoint '[] ts = 'True
Disjoint (s ': ss) ts = Or (Not (Elem s ts)) (Disjoint ss ts)
type ElemOf ts r = RElem r ts (RIndex r ts)
class RowToColumn ts rs where
rowToColumnAux :: Proxy ts -> Rec f rs -> [CoRec f ts]
instance RowToColumn ts '[] where
rowToColumnAux _ _ = []
instance (r ∈ ts, RowToColumn ts rs) => RowToColumn ts (r ': rs) where
rowToColumnAux p (x :& xs) = CoRec x : rowToColumnAux p xs
-- | Transform a record into a list of its fields, retaining proof
-- that each field is part of the whole.
rowToColumn :: RowToColumn ts ts => Rec f ts -> [CoRec f ts]
rowToColumn = rowToColumnAux Proxy
meltAux :: forall vs ss ts.
(vs ⊆ ts, ss ⊆ ts, Disjoint ss ts ~ 'True, ts ≅ (vs ++ ss),
ColumnHeaders vs, RowToColumn vs vs)
=> Record ts
-> [Record ("value" :-> CoRec ElField vs ': ss)]
meltAux r = map (\val -> Field val :& ids) (rowToColumn vals)
where ids = rcast r :: Record ss
vals = rcast r :: Record vs
type family RDeleteAll ss ts where
RDeleteAll '[] ts = ts
RDeleteAll (s ': ss) ts = RDeleteAll ss (RDelete s ts)
-- | This is 'melt', but the variables are at the front of the record,
-- which reads a bit odd.
meltRow' :: forall proxy vs ts ss. (vs ⊆ ts, ss ⊆ ts, vs ~ RDeleteAll ss ts,
Disjoint ss ts ~ 'True, ts ≅ (vs ++ ss),
ColumnHeaders vs, RowToColumn vs vs)
=> proxy ss
-> Record ts
-> [Record ("value" :-> CoRec ElField vs ': ss)]
meltRow' _ = meltAux
-- | Turn a cons into a snoc after the fact.
retroSnoc :: forall t ts. Record (t ': ts) -> Record (ts ++ '[t])
retroSnoc (x :& xs) = go xs
where go :: Record ss -> Record (ss ++ '[t])
go RNil = x :& RNil
go (y :& ys) = y :& go ys
| Like @melt@ in the @reshape2@ package for the @R@ language . It
-- stacks multiple columns into a single column over multiple
-- rows. Takes a specification of the id columns that remain
-- unchanged. The remaining columns will be stacked.
--
Suppose we have a record , @r : : Record [ Name , Age , If we
apply @melt [ pr1|Name| ] r@ , we get two values with type
[ Name , " value " :-> CoRec Identity [ Age , Weight]]@. The first will
contain @Age@ in the @value@ column , and the second will contain
-- @Weight@ in the @value@ column.
meltRow :: (vs ⊆ ts, ss ⊆ ts, vs ~ RDeleteAll ss ts,
Disjoint ss ts ~ 'True, ts ≅ (vs ++ ss),
ColumnHeaders vs, RowToColumn vs vs)
=> proxy ss
-> Record ts
-> [Record (ss ++ '["value" :-> CoRec ElField vs])]
meltRow = (map retroSnoc .) . meltRow'
class HasLength (ts :: [k]) where
hasLength :: proxy ts -> Int
instance HasLength '[] where hasLength _ = 0
instance forall t ts. HasLength ts => HasLength (t ': ts) where
hasLength _ = 1 + hasLength (Proxy :: Proxy ts)
| Applies ' meltRow ' to each row of a ' FrameRec ' .
melt :: forall vs ts ss proxy.
(vs ⊆ ts, ss ⊆ ts, vs ~ RDeleteAll ss ts, HasLength vs,
Disjoint ss ts ~ 'True, ts ≅ (vs ++ ss),
ColumnHeaders vs, RowToColumn vs vs)
=> proxy ss
-> FrameRec ts
-> FrameRec (ss ++ '["value" :-> CoRec ElField vs])
melt p (Frame n v) = Frame (n*numVs) go
where numVs = hasLength (Proxy :: Proxy vs)
go i = let (j,k) = i `quotRem` numVs
in meltRow p (v j) !! k
| null | https://raw.githubusercontent.com/acowley/Frames/aeca953fe608de38d827b8a078ebf2d329edae04/src/Frames/Melt.hs | haskell | | Transform a record into a list of its fields, retaining proof
that each field is part of the whole.
| This is 'melt', but the variables are at the front of the record,
which reads a bit odd.
| Turn a cons into a snoc after the fact.
stacks multiple columns into a single column over multiple
rows. Takes a specification of the id columns that remain
unchanged. The remaining columns will be stacked.
@Weight@ in the @value@ column. | # LANGUAGE ConstraintKinds , DataKinds , FlexibleContexts , FlexibleInstances ,
KindSignatures , MultiParamTypeClasses , PolyKinds ,
ScopedTypeVariables , TypeFamilies , TypeOperators ,
UndecidableInstances #
KindSignatures, MultiParamTypeClasses, PolyKinds,
ScopedTypeVariables, TypeFamilies, TypeOperators,
UndecidableInstances #-}
module Frames.Melt where
import Data.Proxy
import Data.Vinyl
import Data.Vinyl.CoRec (CoRec(..))
import Data.Vinyl.TypeLevel
import Frames.Col
import Frames.Frame (Frame(..), FrameRec)
import Frames.Rec
import Frames.RecF (ColumnHeaders(..))
type family Elem t ts :: Bool where
Elem t '[] = 'False
Elem t (t ': ts) = 'True
Elem t (s ': ts) = Elem t ts
type family Or (a :: Bool) (b :: Bool) :: Bool where
Or 'True b = 'True
Or a b = b
type family Not a :: Bool where
Not 'True = 'False
Not 'False = 'True
type family Disjoint ss ts :: Bool where
Disjoint '[] ts = 'True
Disjoint (s ': ss) ts = Or (Not (Elem s ts)) (Disjoint ss ts)
type ElemOf ts r = RElem r ts (RIndex r ts)
class RowToColumn ts rs where
rowToColumnAux :: Proxy ts -> Rec f rs -> [CoRec f ts]
instance RowToColumn ts '[] where
rowToColumnAux _ _ = []
instance (r ∈ ts, RowToColumn ts rs) => RowToColumn ts (r ': rs) where
rowToColumnAux p (x :& xs) = CoRec x : rowToColumnAux p xs
rowToColumn :: RowToColumn ts ts => Rec f ts -> [CoRec f ts]
rowToColumn = rowToColumnAux Proxy
meltAux :: forall vs ss ts.
(vs ⊆ ts, ss ⊆ ts, Disjoint ss ts ~ 'True, ts ≅ (vs ++ ss),
ColumnHeaders vs, RowToColumn vs vs)
=> Record ts
-> [Record ("value" :-> CoRec ElField vs ': ss)]
meltAux r = map (\val -> Field val :& ids) (rowToColumn vals)
where ids = rcast r :: Record ss
vals = rcast r :: Record vs
type family RDeleteAll ss ts where
RDeleteAll '[] ts = ts
RDeleteAll (s ': ss) ts = RDeleteAll ss (RDelete s ts)
meltRow' :: forall proxy vs ts ss. (vs ⊆ ts, ss ⊆ ts, vs ~ RDeleteAll ss ts,
Disjoint ss ts ~ 'True, ts ≅ (vs ++ ss),
ColumnHeaders vs, RowToColumn vs vs)
=> proxy ss
-> Record ts
-> [Record ("value" :-> CoRec ElField vs ': ss)]
meltRow' _ = meltAux
retroSnoc :: forall t ts. Record (t ': ts) -> Record (ts ++ '[t])
retroSnoc (x :& xs) = go xs
where go :: Record ss -> Record (ss ++ '[t])
go RNil = x :& RNil
go (y :& ys) = y :& go ys
| Like @melt@ in the @reshape2@ package for the @R@ language . It
Suppose we have a record , @r : : Record [ Name , Age , If we
apply @melt [ pr1|Name| ] r@ , we get two values with type
[ Name , " value " :-> CoRec Identity [ Age , Weight]]@. The first will
contain @Age@ in the @value@ column , and the second will contain
meltRow :: (vs ⊆ ts, ss ⊆ ts, vs ~ RDeleteAll ss ts,
Disjoint ss ts ~ 'True, ts ≅ (vs ++ ss),
ColumnHeaders vs, RowToColumn vs vs)
=> proxy ss
-> Record ts
-> [Record (ss ++ '["value" :-> CoRec ElField vs])]
meltRow = (map retroSnoc .) . meltRow'
class HasLength (ts :: [k]) where
hasLength :: proxy ts -> Int
instance HasLength '[] where hasLength _ = 0
instance forall t ts. HasLength ts => HasLength (t ': ts) where
hasLength _ = 1 + hasLength (Proxy :: Proxy ts)
| Applies ' meltRow ' to each row of a ' FrameRec ' .
melt :: forall vs ts ss proxy.
(vs ⊆ ts, ss ⊆ ts, vs ~ RDeleteAll ss ts, HasLength vs,
Disjoint ss ts ~ 'True, ts ≅ (vs ++ ss),
ColumnHeaders vs, RowToColumn vs vs)
=> proxy ss
-> FrameRec ts
-> FrameRec (ss ++ '["value" :-> CoRec ElField vs])
melt p (Frame n v) = Frame (n*numVs) go
where numVs = hasLength (Proxy :: Proxy vs)
go i = let (j,k) = i `quotRem` numVs
in meltRow p (v j) !! k
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.