_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
c7bb1173eb365d06167de05596a8769e0b79235f08b33e4c2b6bf31f5e9d3386 | fulcro-legacy/fulcro-todomvc | en_US.cljs | (ns fulcro-todomvc.i18n.en-US (:require fulcro.i18n) (:import goog.module.ModuleManager))
This file was generated by fulcro 's i18n leiningen plugin .
(def translations {})
(swap!
fulcro.i18n/*loaded-translations*
(fn [x] (assoc x "en-US" translations)))
(try
(-> goog.module.ModuleManager .getInstance (.setLoaded "en-US"))
(catch js/Object obj))
| null | https://raw.githubusercontent.com/fulcro-legacy/fulcro-todomvc/928ec1f5bf790e83093bfb595667d895f319068e/src/main/fulcro_todomvc/i18n/en_US.cljs | clojure | (ns fulcro-todomvc.i18n.en-US (:require fulcro.i18n) (:import goog.module.ModuleManager))
This file was generated by fulcro 's i18n leiningen plugin .
(def translations {})
(swap!
fulcro.i18n/*loaded-translations*
(fn [x] (assoc x "en-US" translations)))
(try
(-> goog.module.ModuleManager .getInstance (.setLoaded "en-US"))
(catch js/Object obj))
| |
d21d7a37edd51db95b1e17c99a656bd9de69fe5beee59882eb2f85407c390806 | Nutr1t07/wl-bot | Json.hs | # LANGUAGE FlexibleContexts #
{-# LANGUAGE RankNTypes #-}
module Utils.Json where
import Data.Aeson
import Data.Aeson.Types
import GHC.Generics
dropToJSON
:: forall a . (Generic a, GToJSON Zero (Rep a)) => Int -> a -> Value
dropToJSON prefix = genericToJSON defaultOptions
{ fieldLabelModifier = drop prefix
, omitNothingFields = True
}
dropParseJSON
:: forall a . (Generic a, GFromJSON Zero (Rep a)) => Int -> Value -> Parser a
dropParseJSON prefix =
genericParseJSON defaultOptions { fieldLabelModifier = drop prefix }
| null | https://raw.githubusercontent.com/Nutr1t07/wl-bot/4d9db61613a3819d0addbc7e04d77bb2f57892f0/src/Utils/Json.hs | haskell | # LANGUAGE RankNTypes # | # LANGUAGE FlexibleContexts #
module Utils.Json where
import Data.Aeson
import Data.Aeson.Types
import GHC.Generics
dropToJSON
:: forall a . (Generic a, GToJSON Zero (Rep a)) => Int -> a -> Value
dropToJSON prefix = genericToJSON defaultOptions
{ fieldLabelModifier = drop prefix
, omitNothingFields = True
}
dropParseJSON
:: forall a . (Generic a, GFromJSON Zero (Rep a)) => Int -> Value -> Parser a
dropParseJSON prefix =
genericParseJSON defaultOptions { fieldLabelModifier = drop prefix }
|
8d382b12b5255d30e1b22c7e87cb6b10964cd5d5330a16cd9409c1dd238af970 | marick/Midje | t_one_seven.clj | (ns midje.util.t-one-seven
(:require [midje.sweet :refer :all]
[midje.util
[laziness :refer :all]
[thread-safe-var-nesting :refer :all]]
[midje.test-util :refer :all]))
;; Justification for use of eagerly
(def counter (atom :needs-to-be-initialized))
(def #^:dynamic *mocked-function-produces-next-element* inc)
(defn function-under-test-produces-a-lazy-list []
(iterate *mocked-function-produces-next-element* 1))
(defn mock-use []
(binding [*mocked-function-produces-next-element* (fn [n] (swap! counter inc) (inc n))]
(eagerly (take 5 (function-under-test-produces-a-lazy-list)))))
(fact "1.7alpha bug: eagerly forces evaluation"
(reset! counter 1)
(mock-use)
@counter => 5
)
;; (midje.checking.facts/creation-time-check
;; (midje.data.compendium/record-fact-existence!
( clojure.core/with-meta
;; (clojure.core/fn
;; []
;; (midje.parsing.util.wrapping/midje-wrapped
( midje.data.prerequisite - state / with - installed - fakes
( midje.parsing.1 - to - explicit - form.parse - background / background - fakes )
( reset ! counter 1 )
;; (mock-use)
;; (midje.parsing.util.wrapping/midje-wrapped
;; (midje.checking.checkables/check-one
;; (clojure.core/merge
;; {:description (midje.data.nested-facts/descriptions),
: expected - result - form ' 5 ,
;; :check-expectation :expect-match,
;; :midje.parsing.lexical-maps/a-midje-checkable-map? true,
: function - under - test ( clojure.core/fn [ ] @counter ) ,
: expected - result 5 ,
;; :position
;; (pointer.core/line-number-known nil),
;; :namespace clojure.core/*ns*}
;; {:arrow '=>, :call-form '@counter}
;; (commons.clojure.core/hash-map-duplicates-ok
;; :position
( pointer.core/line-number-known 18 ) ) )
;; [])))))
;; (clojure.core/merge
{ : / guid " 9a01710b2962f9945f24630a0579bba82671d1ef " ,
: / name " eagerly forces evaluation " ,
: / description " eagerly forces evaluation " ,
: / source
;; '(fact
;; "eagerly forces evaluation"
( reset ! counter 1 )
;; (mock-use)
@counter
;; =>
;; 5),
: / namespace ' midje.util.t - one - seven ,
: / file " / util / t_one_seven.clj " ,
: / line 17 }
{ : / top - level - fact ? true } ) ) ) )
;; After justification, more facts.
(unfinished exploder)
;; The following is lazy, so it should not cause an error.
(map exploder [1 2 3])
(defrecord Foo [x y])
| null | https://raw.githubusercontent.com/marick/Midje/2b9bcb117442d3bd2d16446b47540888d683c717/test/midje/util/t_one_seven.clj | clojure | Justification for use of eagerly
(midje.checking.facts/creation-time-check
(midje.data.compendium/record-fact-existence!
(clojure.core/fn
[]
(midje.parsing.util.wrapping/midje-wrapped
(mock-use)
(midje.parsing.util.wrapping/midje-wrapped
(midje.checking.checkables/check-one
(clojure.core/merge
{:description (midje.data.nested-facts/descriptions),
:check-expectation :expect-match,
:midje.parsing.lexical-maps/a-midje-checkable-map? true,
:position
(pointer.core/line-number-known nil),
:namespace clojure.core/*ns*}
{:arrow '=>, :call-form '@counter}
(commons.clojure.core/hash-map-duplicates-ok
:position
[])))))
(clojure.core/merge
'(fact
"eagerly forces evaluation"
(mock-use)
=>
5),
After justification, more facts.
The following is lazy, so it should not cause an error. | (ns midje.util.t-one-seven
(:require [midje.sweet :refer :all]
[midje.util
[laziness :refer :all]
[thread-safe-var-nesting :refer :all]]
[midje.test-util :refer :all]))
(def counter (atom :needs-to-be-initialized))
(def #^:dynamic *mocked-function-produces-next-element* inc)
(defn function-under-test-produces-a-lazy-list []
(iterate *mocked-function-produces-next-element* 1))
(defn mock-use []
(binding [*mocked-function-produces-next-element* (fn [n] (swap! counter inc) (inc n))]
(eagerly (take 5 (function-under-test-produces-a-lazy-list)))))
(fact "1.7alpha bug: eagerly forces evaluation"
(reset! counter 1)
(mock-use)
@counter => 5
)
( clojure.core/with-meta
( midje.data.prerequisite - state / with - installed - fakes
( midje.parsing.1 - to - explicit - form.parse - background / background - fakes )
( reset ! counter 1 )
: expected - result - form ' 5 ,
: function - under - test ( clojure.core/fn [ ] @counter ) ,
: expected - result 5 ,
( pointer.core/line-number-known 18 ) ) )
{ : / guid " 9a01710b2962f9945f24630a0579bba82671d1ef " ,
: / name " eagerly forces evaluation " ,
: / description " eagerly forces evaluation " ,
: / source
( reset ! counter 1 )
@counter
: / namespace ' midje.util.t - one - seven ,
: / file " / util / t_one_seven.clj " ,
: / line 17 }
{ : / top - level - fact ? true } ) ) ) )
(unfinished exploder)
(map exploder [1 2 3])
(defrecord Foo [x y])
|
0acd72a66a749b708cbcbb3dcfe9549316330716c56899f1314bc2280014d8bf | cpichard/filesequence | Padding.hs | module System.FileSequence.Padding where
import System.FileSequence.FrameList (FrameNumber)
-- | Store Padding information
We consider 2 cases :
-- * PaddingFixed:
when the padding is fixed and we know it because the number is prefixed with zeroes
like 010 , 00010 , etc .
* :
-- when we are not sure the padding is fixed
like 677 100 400
-- however it can't be superior to the number of digits
data Padding = PaddingFixed Int
| PaddingMax Int
deriving (Show, Eq)
-- | Paddings can be merged if they are compatible.
-- To be compatible, they should be able to restitute the frame string
-- with a printf("%0xd")
mergePadding :: Padding -> Padding -> Maybe Padding
mergePadding (PaddingFixed a) (PaddingFixed b)
| a == b = Just $ PaddingFixed a -- same fixed padding
different padding ex : 010 ! = 0010
mergePadding (PaddingMax a) (PaddingFixed b)
| b <= a = Just $ PaddingFixed b
| otherwise = Nothing
mergePadding (PaddingFixed a) (PaddingMax b) = mergePadding (PaddingMax b) (PaddingFixed a)
mergePadding (PaddingMax a) (PaddingMax b) = Just $ PaddingMax (min a b)
-- | Deduce the padding from the frame number and the number of digit used to encode it
-- The digits does not take the minus.
deducePadding :: FrameNumber -> Int -> Padding
deducePadding frameNb numberDigits
| abs (toInteger frameNb) < 10^(numberDigits-1) = PaddingFixed numberDigits
| otherwise = PaddingMax numberDigits
| null | https://raw.githubusercontent.com/cpichard/filesequence/39cd8eb7dd0bc494c181c5b04fc9ff2fae5202d0/src/System/FileSequence/Padding.hs | haskell | | Store Padding information
* PaddingFixed:
when we are not sure the padding is fixed
however it can't be superior to the number of digits
| Paddings can be merged if they are compatible.
To be compatible, they should be able to restitute the frame string
with a printf("%0xd")
same fixed padding
| Deduce the padding from the frame number and the number of digit used to encode it
The digits does not take the minus. | module System.FileSequence.Padding where
import System.FileSequence.FrameList (FrameNumber)
We consider 2 cases :
when the padding is fixed and we know it because the number is prefixed with zeroes
like 010 , 00010 , etc .
* :
like 677 100 400
data Padding = PaddingFixed Int
| PaddingMax Int
deriving (Show, Eq)
mergePadding :: Padding -> Padding -> Maybe Padding
mergePadding (PaddingFixed a) (PaddingFixed b)
different padding ex : 010 ! = 0010
mergePadding (PaddingMax a) (PaddingFixed b)
| b <= a = Just $ PaddingFixed b
| otherwise = Nothing
mergePadding (PaddingFixed a) (PaddingMax b) = mergePadding (PaddingMax b) (PaddingFixed a)
mergePadding (PaddingMax a) (PaddingMax b) = Just $ PaddingMax (min a b)
deducePadding :: FrameNumber -> Int -> Padding
deducePadding frameNb numberDigits
| abs (toInteger frameNb) < 10^(numberDigits-1) = PaddingFixed numberDigits
| otherwise = PaddingMax numberDigits
|
b969c32e4ed7e6bd3165779ec579de6e8dd7e1c26c04945a0961d591336a3aab | oscoin/oscoin | Lenient.hs | | A version of the ' Nakamoto ' consensus more lenient on the validation
-- aspect , and suitable for development .
-- aspect, and suitable for development.
-}
module Oscoin.Consensus.Nakamoto.Lenient
( nakamotoConsensusLenient
, validateLenient
, mineLenient
) where
import Oscoin.Prelude
import Oscoin.Consensus.Nakamoto
( PoW
, blockTime
, chainDifficulty
, nakamotoConsensus
, validateBasic
)
import Oscoin.Consensus.Types
import Oscoin.Consensus.Validation
( validateDifficulty
, validateHeight
, validateParentHash
, validateTimestamp
)
import Oscoin.Crypto.Blockchain
import Oscoin.Crypto.Hash (Hash, Hashable)
import Oscoin.Node.Mempool.Class
import qualified Oscoin.Node.Mempool.Class as Mempool
import qualified Oscoin.Telemetry as Telemetry
import Oscoin.Time (seconds)
import Codec.Serialise (Serialise)
import Control.Monad.Except (liftEither, runExcept)
import qualified Crypto.Data.Auth.Tree.Class as AuthTree
import Data.ByteArray (ByteArrayAccess)
-- | Like 'nakamotoConsensus', but uses a looser validation function, more
-- suitable for development.
nakamotoConsensusLenient
:: forall c tx m.
( MonadIO m
, MonadMempool c tx m
, Serialise tx
, Serialise (Beneficiary c)
, AuthTree.MerkleHash (Hash c)
, ByteArrayAccess (BlockHash c)
, Hashable c (BlockHeader c (Sealed c PoW))
)
=> Telemetry.Tracer m
^ Block time lower bound in seconds ( see ' mineLenient ' )
-> Consensus c tx PoW m
nakamotoConsensusLenient probed blkTimeLower =
let nak = nakamotoConsensus probed :: Consensus c tx PoW m
in nak { cValidate = validateLenient
, cMiner = mineLenient blkTimeLower (cMiner nak)
}
-- | A more lenient validation function, where the \"age\" of block with
-- respect to the parent is not checked.
validateLenient
:: ( ByteArrayAccess (BlockHash c)
, Hashable c (BlockHeader c (Sealed c PoW))
, AuthTree.MerkleHash (Hash c)
, Serialise tx
, Serialise (Beneficiary c)
)
=> Validate c tx PoW
validateLenient [] blk =
validateBasic blk
validateLenient prefix@(parent:_) blk = runExcept $ do
validateHeight parent blk
validateParentHash parent blk
validateDifficulty chainDifficulty prefix blk
validateTimestamp parent blk
liftEither (validateBasic blk)
-- | Modify mining behaviour when the network does not produce transactions.
--
-- Currently, this simply delays mining an empty block by 'blockTime' or 'Word8'
-- seconds, whichever is lower. Mainly useful to avoid busy looping in an idle
-- network, while retaining correctness.
--
mineLenient
:: forall c tx m. (MonadIO m, MonadMempool c tx m)
Block time lower bound in seconds
-> Miner c PoW m
-> Miner c PoW m
mineLenient blkTimeLowerSecs inner getBlocks unsealedBlock = do
nTxs <- Mempool.numTxs
when (nTxs == 0) $
let
blkTime = min (fromIntegral blkTimeLowerSecs * seconds) blockTime
blkTimeMu = fromIntegral blkTime `div` 1000
in
liftIO $ threadDelay blkTimeMu
inner getBlocks unsealedBlock
| null | https://raw.githubusercontent.com/oscoin/oscoin/2eb5652c9999dd0f30c70b3ba6b638156c74cdb1/src/Oscoin/Consensus/Nakamoto/Lenient.hs | haskell | aspect , and suitable for development .
aspect, and suitable for development.
| Like 'nakamotoConsensus', but uses a looser validation function, more
suitable for development.
| A more lenient validation function, where the \"age\" of block with
respect to the parent is not checked.
| Modify mining behaviour when the network does not produce transactions.
Currently, this simply delays mining an empty block by 'blockTime' or 'Word8'
seconds, whichever is lower. Mainly useful to avoid busy looping in an idle
network, while retaining correctness.
| | A version of the ' Nakamoto ' consensus more lenient on the validation
-}
module Oscoin.Consensus.Nakamoto.Lenient
( nakamotoConsensusLenient
, validateLenient
, mineLenient
) where
import Oscoin.Prelude
import Oscoin.Consensus.Nakamoto
( PoW
, blockTime
, chainDifficulty
, nakamotoConsensus
, validateBasic
)
import Oscoin.Consensus.Types
import Oscoin.Consensus.Validation
( validateDifficulty
, validateHeight
, validateParentHash
, validateTimestamp
)
import Oscoin.Crypto.Blockchain
import Oscoin.Crypto.Hash (Hash, Hashable)
import Oscoin.Node.Mempool.Class
import qualified Oscoin.Node.Mempool.Class as Mempool
import qualified Oscoin.Telemetry as Telemetry
import Oscoin.Time (seconds)
import Codec.Serialise (Serialise)
import Control.Monad.Except (liftEither, runExcept)
import qualified Crypto.Data.Auth.Tree.Class as AuthTree
import Data.ByteArray (ByteArrayAccess)
nakamotoConsensusLenient
:: forall c tx m.
( MonadIO m
, MonadMempool c tx m
, Serialise tx
, Serialise (Beneficiary c)
, AuthTree.MerkleHash (Hash c)
, ByteArrayAccess (BlockHash c)
, Hashable c (BlockHeader c (Sealed c PoW))
)
=> Telemetry.Tracer m
^ Block time lower bound in seconds ( see ' mineLenient ' )
-> Consensus c tx PoW m
nakamotoConsensusLenient probed blkTimeLower =
let nak = nakamotoConsensus probed :: Consensus c tx PoW m
in nak { cValidate = validateLenient
, cMiner = mineLenient blkTimeLower (cMiner nak)
}
validateLenient
:: ( ByteArrayAccess (BlockHash c)
, Hashable c (BlockHeader c (Sealed c PoW))
, AuthTree.MerkleHash (Hash c)
, Serialise tx
, Serialise (Beneficiary c)
)
=> Validate c tx PoW
validateLenient [] blk =
validateBasic blk
validateLenient prefix@(parent:_) blk = runExcept $ do
validateHeight parent blk
validateParentHash parent blk
validateDifficulty chainDifficulty prefix blk
validateTimestamp parent blk
liftEither (validateBasic blk)
mineLenient
:: forall c tx m. (MonadIO m, MonadMempool c tx m)
Block time lower bound in seconds
-> Miner c PoW m
-> Miner c PoW m
mineLenient blkTimeLowerSecs inner getBlocks unsealedBlock = do
nTxs <- Mempool.numTxs
when (nTxs == 0) $
let
blkTime = min (fromIntegral blkTimeLowerSecs * seconds) blockTime
blkTimeMu = fromIntegral blkTime `div` 1000
in
liftIO $ threadDelay blkTimeMu
inner getBlocks unsealedBlock
|
71e950b6e2725bce59277c120df51dedacbea8eeb3cc928d86e37f0e88a52cad | GaloisInc/what4 | ExprBuilderSMTLib2.hs | # LANGUAGE DataKinds #
# LANGUAGE ExistentialQuantification #
{-# LANGUAGE ExplicitForAll #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PatternSynonyms #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
for instance
import ProbeSolvers
import Test.Tasty
import Test.Tasty.Checklist as TC
import Test.Tasty.ExpectedFailure
import Test.Tasty.Hedgehog.Alt
import Test.Tasty.HUnit
import Control.Exception (bracket, try, finally, SomeException)
import Control.Monad (void)
import Control.Monad.IO.Class (MonadIO(..))
import qualified Data.BitVector.Sized as BV
import Data.Foldable
import qualified Data.Map as Map
import Data.Maybe ( fromMaybe )
import Data.Parameterized.Context ( pattern Empty, pattern (:>) )
import qualified Data.Text as Text
import qualified Hedgehog as H
import qualified Hedgehog.Gen as HGen
import qualified Hedgehog.Range as HRange
import qualified Prettyprinter as PP
import System.Environment ( lookupEnv )
import qualified Data.Parameterized.Context as Ctx
import Data.Parameterized.Nonce
import Data.Parameterized.Some
import System.IO
import LibBF
import What4.BaseTypes
import What4.Config
import What4.Expr
import What4.Interface
import What4.InterpretedFloatingPoint
import What4.Protocol.Online
import What4.Protocol.SMTLib2
import What4.SatResult
import What4.Solver.Adapter
import qualified What4.Solver.CVC4 as CVC4
import qualified What4.Solver.Z3 as Z3
import qualified What4.Solver.Yices as Yices
import qualified What4.Utils.BVDomain as WUB
import qualified What4.Utils.BVDomain.Arith as WUBA
import qualified What4.Utils.ResolveBounds.BV as WURB
import What4.Utils.StringLiteral
import What4.Utils.Versions (ver, SolverBounds(..), emptySolverBounds)
data SomePred = forall t . SomePred (BoolExpr t)
deriving instance Show SomePred
type SimpleExprBuilder t fs = ExprBuilder t EmptyExprBuilderState fs
instance TestShow Text.Text where testShow = show
instance TestShow (StringLiteral Unicode) where testShow = show
debugOutputFiles :: Bool
debugOutputFiles = False
--debugOutputFiles = True
maybeClose :: Maybe Handle -> IO ()
maybeClose Nothing = return ()
maybeClose (Just h) = hClose h
userSymbol' :: String -> SolverSymbol
userSymbol' s = case userSymbol s of
Left e -> error $ show e
Right symbol -> symbol
withSym :: FloatModeRepr fm -> (forall t . SimpleExprBuilder t (Flags fm) -> IO a) -> IO a
withSym floatMode pred_gen = withIONonceGenerator $ \gen ->
pred_gen =<< newExprBuilder floatMode EmptyExprBuilderState gen
withYices :: (forall t. SimpleExprBuilder t (Flags FloatReal) -> SolverProcess t Yices.Connection -> IO a) -> IO a
withYices action = withSym FloatRealRepr $ \sym ->
do extendConfig Yices.yicesOptions (getConfiguration sym)
bracket
(do h <- if debugOutputFiles then Just <$> openFile "yices.out" WriteMode else return Nothing
s <- startSolverProcess Yices.yicesDefaultFeatures h sym
return (h,s))
(\(h,s) -> void $ try @SomeException (shutdownSolverProcess s `finally` maybeClose h))
(\(_,s) -> action sym s)
withZ3 :: (forall t . SimpleExprBuilder t (Flags FloatIEEE) -> Session t Z3.Z3 -> IO ()) -> IO ()
withZ3 action = withIONonceGenerator $ \nonce_gen -> do
sym <- newExprBuilder FloatIEEERepr EmptyExprBuilderState nonce_gen
extendConfig Z3.z3Options (getConfiguration sym)
Z3.withZ3 sym "z3" defaultLogData { logCallbackVerbose = (\_ -> putStrLn) } (action sym)
withOnlineZ3
:: (forall t . SimpleExprBuilder t (Flags FloatIEEE) -> SolverProcess t (Writer Z3.Z3) -> IO a)
-> IO a
withOnlineZ3 action = withSym FloatIEEERepr $ \sym -> do
extendConfig Z3.z3Options (getConfiguration sym)
bracket
(do h <- if debugOutputFiles then Just <$> openFile "z3.out" WriteMode else return Nothing
s <- startSolverProcess (defaultFeatures Z3.Z3) h sym
return (h,s))
(\(h,s) -> void $ try @SomeException (shutdownSolverProcess s `finally` maybeClose h))
(\(_,s) -> action sym s)
withCVC4
:: (forall t . SimpleExprBuilder t (Flags FloatReal) -> SolverProcess t (Writer CVC4.CVC4) -> IO a)
-> IO a
withCVC4 action = withSym FloatRealRepr $ \sym -> do
extendConfig CVC4.cvc4Options (getConfiguration sym)
bracket
(do h <- if debugOutputFiles then Just <$> openFile "cvc4.out" WriteMode else return Nothing
s <- startSolverProcess (defaultFeatures CVC4.CVC4) h sym
return (h,s))
(\(h,s) -> void $ try @SomeException (shutdownSolverProcess s `finally` maybeClose h))
(\(_,s) -> action sym s)
withModel
:: Session t Z3.Z3
-> BoolExpr t
-> ((forall tp . What4.Expr.Expr t tp -> IO (GroundValue tp)) -> IO ())
-> IO ()
withModel s p action = do
assume (sessionWriter s) p
runCheckSat s $ \case
Sat (GroundEvalFn {..}, _) -> action groundEval
Unsat _ -> "unsat" @?= ("sat" :: String)
Unknown -> "unknown" @?= ("sat" :: String)
-- exists y . (x + 2.0) + (x + 2.0) < y
iFloatTestPred
:: ( forall t
. (IsInterpretedFloatExprBuilder (SimpleExprBuilder t fs))
=> SimpleExprBuilder t fs
-> IO SomePred
)
iFloatTestPred sym = do
x <- freshFloatConstant sym (userSymbol' "x") SingleFloatRepr
e0 <- iFloatLitSingle sym 2.0
e1 <- iFloatAdd @_ @SingleFloat sym RNE x e0
e2 <- iFloatAdd @_ @SingleFloat sym RTZ e1 e1
y <- freshFloatBoundVar sym (userSymbol' "y") SingleFloatRepr
e3 <- iFloatLt @_ @SingleFloat sym e2 $ varExpr sym y
SomePred <$> existsPred sym y e3
floatSinglePrecision :: FloatPrecisionRepr Prec32
floatSinglePrecision = knownRepr
floatDoublePrecision :: FloatPrecisionRepr Prec64
floatDoublePrecision = knownRepr
floatSingleType :: BaseTypeRepr (BaseFloatType Prec32)
floatSingleType = BaseFloatRepr floatSinglePrecision
floatDoubleType :: BaseTypeRepr (BaseFloatType Prec64)
floatDoubleType = BaseFloatRepr floatDoublePrecision
testInterpretedFloatReal :: TestTree
testInterpretedFloatReal = testCase "Float interpreted as real" $ do
actual <- withSym FloatRealRepr iFloatTestPred
expected <- withSym FloatRealRepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- realLit sym 2.0
e1 <- realAdd sym x e0
e2 <- realAdd sym e1 e1
y <- freshBoundVar sym (userSymbol' "y") knownRepr
e3 <- realLt sym e2 $ varExpr sym y
SomePred <$> existsPred sym y e3
show actual @?= show expected
testFloatUninterpreted :: TestTree
testFloatUninterpreted = testCase "Float uninterpreted" $ do
actual <- withSym FloatUninterpretedRepr iFloatTestPred
expected <- withSym FloatUninterpretedRepr $ \sym -> do
let bvtp = BaseBVRepr $ knownNat @32
rne_rm <- intLit sym $ toInteger $ fromEnum RNE
rtz_rm <- intLit sym $ toInteger $ fromEnum RTZ
x <- freshConstant sym (userSymbol' "x") knownRepr
-- Floating point literal: 2.0
e1 <- bvLit sym knownRepr (BV.mkBV knownRepr (bfToBits (float32 NearEven) (bfFromInt 2)))
add_fn <- freshTotalUninterpFn
sym
(userSymbol' "uninterpreted_float_add")
(Ctx.empty Ctx.:> BaseIntegerRepr Ctx.:> bvtp Ctx.:> bvtp)
bvtp
e2 <- applySymFn sym add_fn $ Ctx.empty Ctx.:> rne_rm Ctx.:> x Ctx.:> e1
e3 <- applySymFn sym add_fn $ Ctx.empty Ctx.:> rtz_rm Ctx.:> e2 Ctx.:> e2
y <- freshBoundVar sym (userSymbol' "y") knownRepr
lt_fn <- freshTotalUninterpFn sym
(userSymbol' "uninterpreted_float_lt")
(Ctx.empty Ctx.:> bvtp Ctx.:> bvtp)
BaseBoolRepr
e4 <- applySymFn sym lt_fn $ Ctx.empty Ctx.:> e3 Ctx.:> varExpr sym y
SomePred <$> existsPred sym y e4
show actual @?= show expected
testInterpretedFloatIEEE :: TestTree
testInterpretedFloatIEEE = testCase "Float interpreted as IEEE float" $ do
actual <- withSym FloatIEEERepr iFloatTestPred
expected <- withSym FloatIEEERepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- floatLitRational sym floatSinglePrecision 2.0
e1 <- floatAdd sym RNE x e0
e2 <- floatAdd sym RTZ e1 e1
y <- freshBoundVar sym (userSymbol' "y") knownRepr
e3 <- floatLt sym e2 $ varExpr sym y
SomePred <$> existsPred sym y e3
show actual @?= show expected
x < = 0.5 & & x > = 1.5
testFloatUnsat0 :: TestTree
testFloatUnsat0 = testCase "Unsat float formula" $ withZ3 $ \sym s -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- floatLitRational sym floatSinglePrecision 0.5
e1 <- floatLitRational sym knownRepr 1.5
p0 <- floatLe sym x e0
p1 <- floatGe sym x e1
assume (sessionWriter s) p0
assume (sessionWriter s) p1
runCheckSat s $ \res -> isUnsat res @? "unsat"
x * x < 0
testFloatUnsat1 :: TestTree
testFloatUnsat1 = testCase "Unsat float formula" $ withZ3 $ \sym s -> do
x <- freshConstant sym (userSymbol' "x") floatSingleType
e0 <- floatMul sym RNE x x
p0 <- floatIsNeg sym e0
assume (sessionWriter s) p0
runCheckSat s $ \res -> isUnsat res @? "unsat"
x + y > = x & & x ! = infinity & & y > 0 with rounding to + infinity
testFloatUnsat2 :: TestTree
testFloatUnsat2 = testCase "Sat float formula" $ withZ3 $ \sym s -> do
x <- freshConstant sym (userSymbol' "x") floatSingleType
y <- freshConstant sym (userSymbol' "y") knownRepr
p0 <- notPred sym =<< floatIsInf sym x
p1 <- floatIsPos sym y
p2 <- notPred sym =<< floatIsZero sym y
e0 <- floatAdd sym RTP x y
p3 <- floatGe sym x e0
p4 <- foldlM (andPred sym) (truePred sym) [p1, p2, p3]
assume (sessionWriter s) p4
runCheckSat s $ \res -> isSat res @? "sat"
assume (sessionWriter s) p0
runCheckSat s $ \res -> isUnsat res @? "unsat"
x = = 2.5 & & y = = + infinity
testFloatSat0 :: TestTree
testFloatSat0 = testCase "Sat float formula" $ withZ3 $ \sym s -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- floatLitRational sym floatSinglePrecision 2.5
p0 <- floatEq sym x e0
y <- freshConstant sym (userSymbol' "y") knownRepr
e1 <- floatPInf sym floatSinglePrecision
p1 <- floatEq sym y e1
p2 <- andPred sym p0 p1
withModel s p2 $ \groundEval -> do
(@?=) (bfFromDouble 2.5) =<< groundEval x
y_val <- groundEval y
assertBool ("expected y = +infinity, actual y = " ++ show y_val) $
bfIsInf y_val && bfIsPos y_val
x > = 0.5 & & x < = 1.5
testFloatSat1 :: TestTree
testFloatSat1 = testCase "Sat float formula" $ withZ3 $ \sym s -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- floatLitRational sym floatSinglePrecision 0.5
e1 <- floatLitRational sym knownRepr 1.5
p0 <- floatGe sym x e0
p1 <- floatLe sym x e1
p2 <- andPred sym p0 p1
withModel s p2 $ \groundEval -> do
x_val <- groundEval x
assertBool ("expected x in [0.5, 1.5], actual x = " ++ show x_val) $
bfFromDouble 0.5 <= x_val && x_val <= bfFromDouble 1.5
testFloatToBinary :: TestTree
testFloatToBinary = testCase "float to binary" $ withZ3 $ \sym s -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
y <- freshConstant sym (userSymbol' "y") knownRepr
e0 <- floatToBinary sym x
e1 <- bvAdd sym e0 y
e2 <- floatFromBinary sym floatSinglePrecision e1
p0 <- floatNe sym x e2
assume (sessionWriter s) p0
runCheckSat s $ \res -> isSat res @? "sat"
p1 <- notPred sym =<< bvIsNonzero sym y
assume (sessionWriter s) p1
runCheckSat s $ \res -> isUnsat res @? "unsat"
testFloatFromBinary :: TestTree
testFloatFromBinary = testCase "float from binary" $ withZ3 $ \sym s -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- floatFromBinary sym floatSinglePrecision x
e1 <- floatToBinary sym e0
p0 <- bvNe sym x e1
assume (sessionWriter s) p0
runCheckSat s $ \res -> isSat res @? "sat"
p1 <- notPred sym =<< floatIsNaN sym e0
assume (sessionWriter s) p1
runCheckSat s $ \res -> isUnsat res @? "unsat"
testFloatBinarySimplification :: TestTree
testFloatBinarySimplification = testCase "float binary simplification" $
withSym FloatIEEERepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- floatToBinary sym x
e1 <- floatFromBinary sym floatSinglePrecision e0
e1 @?= x
testRealFloatBinarySimplification :: TestTree
testRealFloatBinarySimplification =
testCase "real float binary simplification" $
withSym FloatRealRepr $ \sym -> do
x <- freshFloatConstant sym (userSymbol' "x") SingleFloatRepr
e0 <- iFloatToBinary sym SingleFloatRepr x
e1 <- iFloatFromBinary sym SingleFloatRepr e0
e1 @?= x
testFloatCastSimplification :: TestTree
testFloatCastSimplification = testCase "float cast simplification" $
withSym FloatIEEERepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") floatSingleType
e0 <- floatCast sym floatDoublePrecision RNE x
e1 <- floatCast sym floatSinglePrecision RNE e0
e1 @?= x
testFloatCastNoSimplification :: TestTree
testFloatCastNoSimplification = testCase "float cast no simplification" $
withSym FloatIEEERepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") floatDoubleType
e0 <- floatCast sym floatSinglePrecision RNE x
e1 <- floatCast sym floatDoublePrecision RNE e0
e1 /= x @? ""
testBVSelectShl :: TestTree
testBVSelectShl = testCase "select shl simplification" $
withSym FloatIEEERepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- bvLit sym (knownNat @64) (BV.zero knownNat)
e1 <- bvConcat sym e0 x
e2 <- bvShl sym e1 =<< bvLit sym knownRepr (BV.mkBV knownNat 64)
e3 <- bvSelect sym (knownNat @64) (knownNat @64) e2
e3 @?= x
testBVSelectLshr :: TestTree
testBVSelectLshr = testCase "select lshr simplification" $
withSym FloatIEEERepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- bvConcat sym x =<< bvLit sym (knownNat @64) (BV.zero knownNat)
e1 <- bvLshr sym e0 =<< bvLit sym knownRepr (BV.mkBV knownNat 64)
e2 <- bvSelect sym (knownNat @0) (knownNat @64) e1
e2 @?= x
testBVOrShlZext :: TestTree
testBVOrShlZext = testCase "bv or-shl-zext -> concat simplification" $
withSym FloatIEEERepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") (BaseBVRepr $ knownNat @8)
y <- freshConstant sym (userSymbol' "y") (BaseBVRepr $ knownNat @8)
e0 <- bvZext sym (knownNat @16) x
e1 <- bvShl sym e0 =<< bvLit sym knownRepr (BV.mkBV knownNat 8)
e2 <- bvZext sym (knownNat @16) y
e3 <- bvOrBits sym e1 e2
show e3 @?= "bvConcat cx@0:bv cy@1:bv"
e4 <- bvOrBits sym e2 e1
show e4 @?= show e3
arrayCopyTest :: TestTree
arrayCopyTest = testCase "arrayCopy" $ withZ3 $ \sym s -> do
a <- freshConstant sym (userSymbol' "a") (BaseArrayRepr (Ctx.singleton (BaseBVRepr $ knownNat @64)) (BaseBVRepr $ knownNat @8))
b <- freshConstant sym (userSymbol' "b") knownRepr
i <- freshConstant sym (userSymbol' "i") (BaseBVRepr $ knownNat @64)
j <- freshConstant sym (userSymbol' "j") knownRepr
k <- freshConstant sym (userSymbol' "k") knownRepr
n <- freshConstant sym (userSymbol' "n") knownRepr
copy_a_i_b_j_n <- arrayCopy sym a i b j n
add_i_k <- bvAdd sym i k
copy_a_i_b_j_n_at_add_i_k <- arrayLookup sym copy_a_i_b_j_n (Ctx.singleton add_i_k)
add_j_k <- bvAdd sym j k
b_at_add_j_k <- arrayLookup sym b (Ctx.singleton add_j_k)
assume (sessionWriter s) =<< bvUle sym i =<< bvLit sym knownRepr (BV.mkBV knownNat 1024)
assume (sessionWriter s) =<< bvUle sym j =<< bvLit sym knownRepr (BV.mkBV knownNat 1024)
assume (sessionWriter s) =<< bvUle sym n =<< bvLit sym knownRepr (BV.mkBV knownNat 1024)
assume (sessionWriter s) =<< bvNe sym copy_a_i_b_j_n_at_add_i_k b_at_add_j_k
runCheckSat s $ \res -> isSat res @? "sat"
assume (sessionWriter s) =<< bvUlt sym k n
runCheckSat s $ \res -> isUnsat res @? "unsat"
arraySetTest :: TestTree
arraySetTest = testCase "arraySet" $ withZ3 $ \sym s -> do
a <- freshConstant sym (userSymbol' "a") knownRepr
i <- freshConstant sym (userSymbol' "i") (BaseBVRepr $ knownNat @64)
j <- freshConstant sym (userSymbol' "j") knownRepr
n <- freshConstant sym (userSymbol' "n") knownRepr
v <- freshConstant sym (userSymbol' "v") (BaseBVRepr $ knownNat @8)
set_a_i_v_n <- arraySet sym a i v n
add_i_j <- bvAdd sym i j
set_a_i_v_n_at_add_i_j <- arrayLookup sym set_a_i_v_n (Ctx.singleton add_i_j)
assume (sessionWriter s) =<< bvUle sym i =<< bvLit sym knownRepr (BV.mkBV knownNat 1024)
assume (sessionWriter s) =<< bvUle sym n =<< bvLit sym knownRepr (BV.mkBV knownNat 1024)
assume (sessionWriter s) =<< bvNe sym v set_a_i_v_n_at_add_i_j
runCheckSat s $ \res -> isSat res @? "sat"
assume (sessionWriter s) =<< bvUlt sym j n
runCheckSat s $ \res -> isUnsat res @? "unsat"
arrayCopySetTest :: TestTree
arrayCopySetTest = testCase "arrayCopy/arraySet" $ withZ3 $ \sym s -> do
a <- freshConstant sym (userSymbol' "a") knownRepr
i <- freshConstant sym (userSymbol' "i") (BaseBVRepr $ knownNat @64)
n <- freshConstant sym (userSymbol' "n") knownRepr
v <- freshConstant sym (userSymbol' "v") (BaseBVRepr $ knownNat @8)
const_v <- constantArray sym (Ctx.singleton (BaseBVRepr $ knownNat @64)) v
z <- bvLit sym knownRepr $ BV.mkBV knownNat 0
copy_a_i_v_n <- arrayCopy sym a i const_v z n
set_a_i_v_n <- arraySet sym a i v n
assume (sessionWriter s) =<< bvUle sym i =<< bvLit sym knownRepr (BV.mkBV knownNat 1024)
assume (sessionWriter s) =<< bvUle sym n =<< bvLit sym knownRepr (BV.mkBV knownNat 1024)
p <- notPred sym =<< arrayEq sym copy_a_i_v_n set_a_i_v_n
assume (sessionWriter s) p
runCheckSat s $ \res -> isUnsat res @? "unsat"
testUninterpretedFunctionScope :: TestTree
testUninterpretedFunctionScope = testCase "uninterpreted function scope" $
withOnlineZ3 $ \sym s -> do
fn <- freshTotalUninterpFn sym (userSymbol' "f") knownRepr BaseIntegerRepr
x <- freshConstant sym (userSymbol' "x") BaseIntegerRepr
y <- freshConstant sym (userSymbol' "y") BaseIntegerRepr
e0 <- applySymFn sym fn (Ctx.empty Ctx.:> x)
e1 <- applySymFn sym fn (Ctx.empty Ctx.:> y)
p0 <- intEq sym x y
p1 <- notPred sym =<< intEq sym e0 e1
p2 <- andPred sym p0 p1
res1 <- checkSatisfiable s "test" p2
isUnsat res1 @? "unsat"
res2 <- checkSatisfiable s "test" p2
isUnsat res2 @? "unsat"
testBVIteNesting :: TestTree
testBVIteNesting = testCase "nested bitvector ites" $ withZ3 $ \sym s -> do
bv0 <- bvLit sym (knownNat @32) (BV.zero knownNat)
let setSymBit bv idx = do
c1 <- freshConstant sym (userSymbol' ("c1_" ++ show idx)) knownRepr
c2 <- freshConstant sym (userSymbol' ("c2_" ++ show idx)) knownRepr
c3 <- freshConstant sym (userSymbol' ("c3_" ++ show idx)) knownRepr
tt1 <- freshConstant sym (userSymbol' ("tt1_" ++ show idx)) knownRepr
tt2 <- freshConstant sym (userSymbol' ("tt2_" ++ show idx)) knownRepr
tt3 <- freshConstant sym (userSymbol' ("tt3_" ++ show idx)) knownRepr
tt4 <- freshConstant sym (userSymbol' ("tt4_" ++ show idx)) knownRepr
ite1 <- itePred sym c1 tt1 tt2
ite2 <- itePred sym c2 tt3 tt4
ite3 <- itePred sym c3 ite1 ite2
bvSet sym bv idx ite3
bv1 <- foldlM setSymBit bv0 [0..31]
p <- testBitBV sym 0 bv1
assume (sessionWriter s) p
runCheckSat s $ \res -> isSat res @? "sat"
testRotate1 :: TestTree
testRotate1 = testCase "rotate test1" $ withOnlineZ3 $ \sym s -> do
bv <- freshConstant sym (userSymbol' "bv") (BaseBVRepr (knownNat @32))
bv1 <- bvRol sym bv =<< bvLit sym knownNat (BV.mkBV knownNat 8)
bv2 <- bvRol sym bv1 =<< bvLit sym knownNat (BV.mkBV knownNat 16)
bv3 <- bvRol sym bv2 =<< bvLit sym knownNat (BV.mkBV knownNat 8)
bv4 <- bvRor sym bv2 =<< bvLit sym knownNat (BV.mkBV knownNat 24)
bv5 <- bvRor sym bv2 =<< bvLit sym knownNat (BV.mkBV knownNat 28)
res <- checkSatisfiable s "test" =<< notPred sym =<< bvEq sym bv bv3
isUnsat res @? "unsat1"
res1 <- checkSatisfiable s "test" =<< notPred sym =<< bvEq sym bv bv4
isUnsat res1 @? "unsat2"
res2 <- checkSatisfiable s "test" =<< notPred sym =<< bvEq sym bv bv5
isSat res2 @? "sat"
testRotate2 :: TestTree
testRotate2 = testCase "rotate test2" $ withOnlineZ3 $ \sym s -> do
bv <- freshConstant sym (userSymbol' "bv") (BaseBVRepr (knownNat @32))
amt <- freshConstant sym (userSymbol' "amt") (BaseBVRepr (knownNat @32))
bv1 <- bvRol sym bv amt
bv2 <- bvRor sym bv1 amt
bv3 <- bvRol sym bv =<< bvLit sym knownNat (BV.mkBV knownNat 20)
bv == bv2 @? "syntactic equality"
res1 <- checkSatisfiable s "test" =<< notPred sym =<< bvEq sym bv bv2
isUnsat res1 @? "unsat"
res2 <- checkSatisfiable s "test" =<< notPred sym =<< bvEq sym bv bv3
isSat res2 @? "sat"
testRotate3 :: TestTree
testRotate3 = testCase "rotate test3" $ withOnlineZ3 $ \sym s -> do
bv <- freshConstant sym (userSymbol' "bv") (BaseBVRepr (knownNat @7))
amt <- freshConstant sym (userSymbol' "amt") (BaseBVRepr (knownNat @7))
bv1 <- bvRol sym bv amt
bv2 <- bvRor sym bv1 amt
bv3 <- bvRol sym bv =<< bvLit sym knownNat (BV.mkBV knownNat 3)
Note , because 7 is not a power of two , this simplification does n't quite
-- work out... it would probably be significant work to make it do so.
-- bv == bv2 @? "syntactic equality"
res1 <- checkSatisfiable s "test" =<< notPred sym =<< bvEq sym bv bv2
isUnsat res1 @? "unsat"
res2 <- checkSatisfiable s "test" =<< notPred sym =<< bvEq sym bv bv3
isSat res2 @? "sat"
testSymbolPrimeCharZ3 :: TestTree
testSymbolPrimeCharZ3 = testCase "z3 symbol prime (') char" $
withZ3 $ \sym s -> do
x <- freshConstant sym (userSymbol' "x'") knownRepr
y <- freshConstant sym (userSymbol' "y'") knownRepr
p <- intLt sym x y
assume (sessionWriter s) p
runCheckSat s $ \res -> isSat res @? "sat"
expectFailure :: IO a -> IO ()
expectFailure f = try @SomeException f >>= \case
Left _ -> return ()
Right _ -> assertFailure "expectFailure"
testBoundVarAsFree :: TestTree
testBoundVarAsFree = testCase "boundvarasfree" $ withOnlineZ3 $ \sym s -> do
x <- freshBoundVar sym (userSymbol' "x") BaseBoolRepr
y <- freshBoundVar sym (userSymbol' "y") BaseBoolRepr
pz <- freshConstant sym (userSymbol' "pz") BaseBoolRepr
let px = varExpr sym x
let py = varExpr sym y
expectFailure $ checkSatisfiable s "test" px
expectFailure $ checkSatisfiable s "test" py
checkSatisfiable s "test" pz >>= \res -> isSat res @? "sat"
inNewFrameWithVars s [Some x] $ do
checkSatisfiable s "test" px >>= \res -> isSat res @? "sat"
expectFailure $ checkSatisfiable s "test" py
-- Outside the scope of inNewFrameWithVars we can no longer
-- use the bound variable as free
expectFailure $ checkSatisfiable s "test" px
expectFailure $ checkSatisfiable s "test" py
roundingTest ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
roundingTest sym solver =
do r <- freshConstant sym (userSymbol' "r") BaseRealRepr
let runErrTest nm op errOp =
do diff <- realAbs sym =<< realSub sym r =<< integerToReal sym =<< op sym r
p' <- notPred sym =<< errOp diff
res <- checkSatisfiable solver nm p'
isUnsat res @? nm
runErrTest "floor" realFloor (\diff -> realLt sym diff =<< realLit sym 1)
runErrTest "ceiling" realCeil (\diff -> realLt sym diff =<< realLit sym 1)
runErrTest "trunc" realTrunc (\diff -> realLt sym diff =<< realLit sym 1)
runErrTest "rna" realRound (\diff -> realLe sym diff =<< realLit sym 0.5)
runErrTest "rne" realRoundEven (\diff -> realLe sym diff =<< realLit sym 0.5)
-- floor test
do ri <- integerToReal sym =<< realFloor sym r
p <- realLe sym ri r
res <- checkSatisfiable solver "floorTest" =<< notPred sym p
isUnsat res @? "floorTest"
-- ceiling test
do ri <- integerToReal sym =<< realCeil sym r
p <- realLe sym r ri
res <- checkSatisfiable solver "ceilingTest" =<< notPred sym p
isUnsat res @? "ceilingTest"
-- truncate test
do ri <- integerToReal sym =<< realTrunc sym r
rabs <- realAbs sym r
riabs <- realAbs sym ri
p <- realLe sym riabs rabs
res <- checkSatisfiable solver "truncateTest" =<< notPred sym p
isUnsat res @? "truncateTest"
-- round away test
do ri <- integerToReal sym =<< realRound sym r
diff <- realAbs sym =<< realSub sym r ri
ptie <- realEq sym diff =<< realLit sym 0.5
rabs <- realAbs sym r
iabs <- realAbs sym ri
plarge <- realGt sym iabs rabs
res <- checkSatisfiable solver "rnaTest" =<<
andPred sym ptie =<< notPred sym plarge
isUnsat res @? "rnaTest"
-- round-to-even test
do i <- realRoundEven sym r
ri <- integerToReal sym i
diff <- realAbs sym =<< realSub sym r ri
ptie <- realEq sym diff =<< realLit sym 0.5
ieven <- intDivisible sym i 2
res <- checkSatisfiable solver "rneTest" =<<
andPred sym ptie =<< notPred sym ieven
isUnsat res @? "rneTest"
zeroTupleTest ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
zeroTupleTest sym solver =
do u <- freshConstant sym (userSymbol' "u") (BaseStructRepr Ctx.Empty)
s <- mkStruct sym Ctx.Empty
f <- freshTotalUninterpFn sym (userSymbol' "f")
(Ctx.Empty Ctx.:> BaseStructRepr Ctx.Empty)
BaseBoolRepr
fu <- applySymFn sym f (Ctx.Empty Ctx.:> u)
fs <- applySymFn sym f (Ctx.Empty Ctx.:> s)
p <- eqPred sym fu fs
res1 <- checkSatisfiable solver "test" p
isSat res1 @? "sat"
res2 <- checkSatisfiable solver "test" =<< notPred sym p
isUnsat res2 @? "unsat"
oneTupleTest ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
oneTupleTest sym solver =
do u <- freshConstant sym (userSymbol' "u") (BaseStructRepr (Ctx.Empty Ctx.:> BaseBoolRepr))
s <- mkStruct sym (Ctx.Empty Ctx.:> backendPred sym False)
f <- freshTotalUninterpFn sym (userSymbol' "f")
(Ctx.Empty Ctx.:> BaseStructRepr (Ctx.Empty Ctx.:> BaseBoolRepr))
BaseBoolRepr
fu <- applySymFn sym f (Ctx.Empty Ctx.:> u)
fs <- applySymFn sym f (Ctx.Empty Ctx.:> s)
p <- eqPred sym fu fs
res1 <- checkSatisfiable solver "test" p
isSat res1 @? "sat"
res2 <- checkSatisfiable solver "test" =<< notPred sym p
isSat res2 @? "neg sat"
pairTest ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
pairTest sym solver =
do u <- freshConstant sym (userSymbol' "u") (BaseStructRepr (Ctx.Empty Ctx.:> BaseBoolRepr Ctx.:> BaseRealRepr))
r <- realLit sym 42.0
s <- mkStruct sym (Ctx.Empty Ctx.:> backendPred sym True Ctx.:> r )
p <- structEq sym u s
res1 <- checkSatisfiable solver "test" p
isSat res1 @? "sat"
res2 <- checkSatisfiable solver "test" =<< notPred sym p
isSat res2 @? "neg sat"
stringTest1 ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
stringTest1 sym solver = withChecklist "string1" $
length 9
length 7
length 5
x <- stringLit sym (UnicodeLiteral bsx)
y <- freshConstant sym (userSymbol' "str") (BaseStringRepr UnicodeRepr)
z <- stringLit sym (UnicodeLiteral bsz)
w <- stringLit sym (UnicodeLiteral bsw)
s <- stringConcat sym x =<< stringConcat sym y z
s' <- stringConcat sym s w
l <- stringLength sym s'
n <- intLit sym 25
p <- intEq sym n l
checkSatisfiableWithModel solver "test" p $ \case
Sat fn ->
do UnicodeLiteral slit <- groundEval fn s'
llit <- groundEval fn n
slit `checkValues`
(Empty
:> Val "model string length" (fromIntegral . Text.length) llit
:> Got "expected prefix" (Text.isPrefixOf bsx)
:> Got "expected suffix" (Text.isSuffixOf (bsz <> bsw))
)
_ -> fail "expected satisfiable model"
p2 <- intEq sym l =<< intLit sym 20
checkSatisfiableWithModel solver "test" p2 $ \case
Unsat () -> return ()
_ -> fail "expected unsatifiable model"
stringTest2 ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
stringTest2 sym solver = withChecklist "string2" $
do let bsx = "asdf\nasdf"
let bsz = "qwe\x1c\&rty"
let bsw = "QQ\"QQ"
q <- freshConstant sym (userSymbol' "q") BaseBoolRepr
x <- stringLit sym (UnicodeLiteral bsx)
z <- stringLit sym (UnicodeLiteral bsz)
w <- stringLit sym (UnicodeLiteral bsw)
a <- freshConstant sym (userSymbol' "stra") (BaseStringRepr UnicodeRepr)
b <- freshConstant sym (userSymbol' "strb") (BaseStringRepr UnicodeRepr)
ax <- stringConcat sym x a
zw <- stringIte sym q z w
bzw <- stringConcat sym b zw
l <- stringLength sym zw
n <- intLit sym 7
p1 <- stringEq sym ax bzw
p2 <- intLt sym l n
p <- andPred sym p1 p2
checkSatisfiableWithModel solver "test" p $ \case
Sat fn ->
do axlit <- groundEval fn ax
bzwlit <- groundEval fn bzw
qlit <- groundEval fn q
TC.check "correct ite" (False ==) qlit
TC.check "equal strings" (axlit ==) bzwlit
_ -> fail "expected satisfable model"
stringTest3 ::
(OnlineSolver solver) =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
stringTest3 sym solver = withChecklist "string3" $
do let bsz = "qwe\x1c\&rtyQQ\"QQ"
z <- stringLit sym (UnicodeLiteral bsz)
a <- freshConstant sym (userSymbol' "stra") (BaseStringRepr UnicodeRepr)
b <- freshConstant sym (userSymbol' "strb") (BaseStringRepr UnicodeRepr)
c <- freshConstant sym (userSymbol' "strc") (BaseStringRepr UnicodeRepr)
pfx <- stringIsPrefixOf sym a z
sfx <- stringIsSuffixOf sym b z
cnt1 <- stringContains sym z c
cnt2 <- notPred sym =<< stringContains sym c =<< stringLit sym (UnicodeLiteral "Q")
cnt3 <- notPred sym =<< stringContains sym c =<< stringLit sym (UnicodeLiteral "q")
cnt <- andPred sym cnt1 =<< andPred sym cnt2 cnt3
lena <- stringLength sym a
lenb <- stringLength sym b
lenc <- stringLength sym c
n <- intLit sym 9
rnga <- intEq sym lena n
rngb <- intEq sym lenb n
rngc <- intEq sym lenc =<< intLit sym 6
rng <- andPred sym rnga =<< andPred sym rngb rngc
p <- andPred sym pfx =<<
andPred sym sfx =<<
andPred sym cnt rng
checkSatisfiableWithModel solver "test" p $ \case
Sat fn ->
do alit <- fromUnicodeLit <$> groundEval fn a
blit <- fromUnicodeLit <$> groundEval fn b
clit <- fromUnicodeLit <$> groundEval fn c
bsz `checkValues`
(Empty
:> Val "correct prefix" (Text.take 9) alit
:> Val "correct suffix" (Text.reverse . Text.take 9 . Text.reverse) blit
:> Val "correct middle" (Text.take 6 . Text.drop 1) clit
)
_ -> fail "expected satisfable model"
stringTest4 ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
stringTest4 sym solver = withChecklist "string4" $
do let bsx = "str"
x <- stringLit sym (UnicodeLiteral bsx)
a <- freshConstant sym (userSymbol' "stra") (BaseStringRepr UnicodeRepr)
i <- stringIndexOf sym a x =<< intLit sym 5
zero <- intLit sym 0
p <- intLe sym zero i
checkSatisfiableWithModel solver "test" p $ \case
Sat fn ->
do alit <- fromUnicodeLit <$> groundEval fn a
ilit <- groundEval fn i
TC.check "correct index" (Text.isPrefixOf bsx) (Text.drop (fromIntegral ilit) alit)
TC.check "index large enough" (>= 5) ilit
_ -> fail "expected satisfable model"
np <- notPred sym p
lena <- stringLength sym a
fv <- intLit sym 10
plen <- intLe sym fv lena
q <- andPred sym np plen
checkSatisfiableWithModel solver "test" q $ \case
Sat fn ->
do alit <- fromUnicodeLit <$> groundEval fn a
ilit <- groundEval fn i
TC.check "substring not found" (not . Text.isInfixOf bsx) (Text.drop 5 alit)
TC.check "expected neg one index" (== (-1)) ilit
_ -> fail "expected satisfable model"
stringTest5 ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
stringTest5 sym solver = withChecklist "string5" $
do a <- freshConstant sym (userSymbol' "a") (BaseStringRepr UnicodeRepr)
off <- freshConstant sym (userSymbol' "off") BaseIntegerRepr
len <- freshConstant sym (userSymbol' "len") BaseIntegerRepr
n5 <- intLit sym 5
n20 <- intLit sym 20
let qlit = "qwerty"
sub <- stringSubstring sym a off len
p1 <- stringEq sym sub =<< stringLit sym (UnicodeLiteral qlit)
p2 <- intLe sym n5 off
p3 <- intLe sym n20 =<< stringLength sym a
p <- andPred sym p1 =<< andPred sym p2 p3
checkSatisfiableWithModel solver "test" p $ \case
Sat fn ->
do alit <- fromUnicodeLit <$> groundEval fn a
offlit <- groundEval fn off
lenlit <- groundEval fn len
let q = Text.take (fromIntegral lenlit) (Text.drop (fromIntegral offlit) alit)
TC.check "correct substring" (qlit ==) q
_ -> fail "expected satisfable model"
-- This test verifies that we can correctly round-trip the
-- '\' character. It is a bit of a corner case, since it
is is involved in the codepoint escape sequences ' \u{abcd } ' .
stringTest6 ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
stringTest6 sym solver = withChecklist "string6" $
do let conn = solverConn solver
x <- freshConstant sym (safeSymbol "x") (BaseStringRepr UnicodeRepr)
l <- stringLength sym x
intLit sym 1 >>= isEq sym l >>= assume conn
stringLit sym (UnicodeLiteral (Text.pack "\\")) >>= isEq sym x >>= assume conn
checkAndGetModel solver "test" >>= \case
Sat ge -> do
v <- groundEval ge x
TC.check "correct string" (v ==) (UnicodeLiteral (Text.pack "\\"))
_ -> fail "unsatisfiable"
This test asks the solver to produce a sequence of 200 unique characters
-- This helps to ensure that we can correclty recieve and send back to the
-- solver enough characters to exhaust the standard printable ASCII sequence,
-- which ensures that we are testing nontrivial escape sequences.
--
-- We don't verify that any particular string is returned because the solvers
-- make different choices about what characters to return.
stringTest7 ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
stringTest7 sym solver = withChecklist "string6" $
do chars <- getChars sym solver 200
TC.check "correct number of characters" (length chars ==) 200
getChars ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
Integer ->
IO [Char]
getChars sym solver bound = do
let conn = solverConn solver
Create string var and constrain its length to 1
x <- freshConstant sym (safeSymbol "x") (BaseStringRepr UnicodeRepr)
l <- stringLength sym x
intLit sym 1 >>= isEq sym l >>= assume conn
-- Recursively generate characters
let getModelsRecursive n
| n >= bound = return ""
| otherwise =
checkAndGetModel solver "test" >>= \case
Sat ge -> do
v <- groundEval ge x
-- Exclude value
stringLit sym v >>= isEq sym x >>= notPred sym >>= assume conn
let c = Text.head $ fromUnicodeLit v
cs <- getModelsRecursive (n+1)
return (c:cs)
_ -> return []
cs <- getModelsRecursive 0
return cs
multidimArrayTest ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
multidimArrayTest sym solver =
do f <- freshConstant sym (userSymbol' "a") $
BaseArrayRepr (Ctx.empty Ctx.:> BaseBoolRepr Ctx.:> BaseBoolRepr) BaseBoolRepr
f' <- arrayUpdate sym f (Ctx.empty Ctx.:> falsePred sym Ctx.:> falsePred sym) (falsePred sym)
p <- arrayLookup sym f' (Ctx.empty Ctx.:> truePred sym Ctx.:> truePred sym)
checkSatisfiable solver "test" p >>= \case
Sat _ -> return ()
_ -> fail "expected satisfiable model"
forallTest ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
forallTest sym solver =
do x <- freshConstant sym (userSymbol' "x") BaseBoolRepr
y <- freshBoundVar sym (userSymbol' "y") BaseBoolRepr
p <- forallPred sym y =<< orPred sym x (varExpr sym y)
np <- notPred sym p
checkSatisfiableWithModel solver "test" p $ \case
Sat fn ->
do b <- groundEval fn x
(b == True) @? "true result"
_ -> fail "expected satisfible model"
checkSatisfiableWithModel solver "test" np $ \case
Sat fn ->
do b <- groundEval fn x
(b == False) @? "false result"
_ -> fail "expected satisfible model"
binderTupleTest1 ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
binderTupleTest1 sym solver =
do var <- freshBoundVar sym (safeSymbol "v")
(BaseStructRepr (Ctx.Empty Ctx.:> BaseBoolRepr))
p0 <- existsPred sym var (truePred sym)
res <- checkSatisfiable solver "test" p0
isSat res @? "sat"
binderTupleTest2 ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
binderTupleTest2 sym solver =
do x <- freshBoundVar sym (userSymbol' "x")
(BaseStructRepr (Ctx.Empty Ctx.:> BaseIntegerRepr Ctx.:> BaseBoolRepr))
p <- forallPred sym x =<< structEq sym (varExpr sym x) (varExpr sym x)
np <- notPred sym p
checkSatisfiableWithModel solver "test" np $ \case
Unsat _ -> return ()
_ -> fail "expected UNSAT"
| A regression test for # 182 .
issue182Test ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
issue182Test sym solver = do
let w = knownNat @64
arr <- freshConstant sym (safeSymbol "arr")
(BaseArrayRepr (Ctx.Empty Ctx.:> BaseIntegerRepr)
(BaseBVRepr w))
idxInt <- intLit sym 0
let idx = Ctx.Empty Ctx.:> idxInt
let arrLookup = arrayLookup sym arr idx
elt <- arrLookup
bvZero <- bvLit sym w (BV.zero w)
p <- bvEq sym elt bvZero
checkSatisfiableWithModel solver "test" p $ \case
Sat fn ->
do elt' <- arrLookup
eltEval <- groundEval fn elt'
(eltEval == BV.zero w) @? "non-zero result"
_ -> fail "expected satisfible model"
-- | These tests simply ensure that no exceptions are raised.
testSolverInfo :: TestTree
testSolverInfo = testGroup "solver info queries" $
[ testCase "test get solver version" $ withOnlineZ3 $ \_ proc -> do
let conn = solverConn proc
getVersion conn
_ <- versionResult conn
pure ()
, testCase "test get solver name" $ withOnlineZ3 $ \_ proc -> do
let conn = solverConn proc
getName conn
nm <- nameResult conn
nm @?= "Z3"
]
testSolverVersion :: TestTree
testSolverVersion = testCase "test solver version bounds" $
withOnlineZ3 $ \_ proc -> do
let bnd = emptySolverBounds{ lower = Just $(ver "0") }
checkSolverVersion' (Map.singleton "Z3" bnd) proc >> return ()
testBVDomainArithScale :: TestTree
testBVDomainArithScale = testCase "bv domain arith scale" $
withSym FloatIEEERepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") (BaseBVRepr $ knownNat @8)
e0 <- bvZext sym (knownNat @16) x
e1 <- bvNeg sym e0
e2 <- bvSub sym e1 =<< bvLit sym knownRepr (BV.mkBV knownNat 1)
e3 <- bvUgt sym e2 =<< bvLit sym knownRepr (BV.mkBV knownNat 256)
e3 @?= truePred sym
testBVSwap :: TestTree
testBVSwap = testCase "test bvSwap" $
withSym FloatIEEERepr $ \sym -> do
e0 <- bvSwap sym (knownNat @2) =<< bvLit sym knownRepr (BV.mkBV knownNat 1)
e1 <- bvLit sym knownRepr (BV.mkBV knownNat 256)
e0 @?= e1
testBVBitreverse :: TestTree
testBVBitreverse = testCase "test bvBitreverse" $
withSym FloatIEEERepr $ \sym -> do
e0 <- bvBitreverse sym =<< bvLit sym (knownNat @8) (BV.mkBV knownNat 1)
e1 <- bvLit sym knownRepr (BV.mkBV knownNat 128)
e0 @?= e1
-- Test unsafeSetAbstractValue on a simple symbolic expression
testUnsafeSetAbstractValue1 :: TestTree
testUnsafeSetAbstractValue1 = testCase "test unsafeSetAbstractValue1" $
withSym FloatIEEERepr $ \sym -> do
let w = knownNat @8
e1A <- freshConstant sym (userSymbol' "x1") (BaseBVRepr w)
let e1A' = unsafeSetAbstractValue (WUB.BVDArith (WUBA.range w 2 2)) e1A
unsignedBVBounds e1A' @?= Just (2, 2)
e1B <- bvAdd sym e1A' =<< bvLit sym w (BV.one w)
case asBV e1B of
Just bv -> bv @?= BV.mkBV w 3
Nothing -> assertFailure $ unlines
[ "unsafeSetAbstractValue doesn't work as expected for a"
, "simple symbolic expression"
]
-- Test unsafeSetAbstractValue on a compound symbolic expression
testUnsafeSetAbstractValue2 :: TestTree
testUnsafeSetAbstractValue2 = testCase "test unsafeSetAbstractValue2" $
withSym FloatIEEERepr $ \sym -> do
let w = knownNat @8
e2A <- freshConstant sym (userSymbol' "x2A") (BaseBVRepr w)
e2B <- freshConstant sym (userSymbol' "x2B") (BaseBVRepr w)
e2C <- bvAdd sym e2A e2B
(_, e2C') <- annotateTerm sym $ unsafeSetAbstractValue (WUB.BVDArith (WUBA.range w 2 2)) e2C
unsignedBVBounds e2C' @?= Just (2, 2)
e2D <- bvAdd sym e2C' =<< bvLit sym w (BV.one w)
case asBV e2D of
Just bv -> bv @?= BV.mkBV w 3
Nothing -> assertFailure $ unlines
[ "unsafeSetAbstractValue doesn't work as expected for a"
, "compound symbolic expression"
]
testResolveSymBV :: WURB.SearchStrategy -> TestTree
testResolveSymBV searchStrat =
testProperty ("test resolveSymBV (" ++ show (PP.pretty searchStrat) ++ ")") $
H.property $ do
let w = knownNat @8
lb <- H.forAll $ HGen.word8 $ HRange.constant 0 maxBound
ub <- H.forAll $ HGen.word8 $ HRange.constant lb maxBound
rbv <- liftIO $ withYices $ \sym proc -> do
bv <- freshConstant sym (safeSymbol "bv") knownRepr
p1 <- bvUge sym bv =<< bvLit sym w (BV.mkBV w (toInteger lb))
p2 <- bvUle sym bv =<< bvLit sym w (BV.mkBV w (toInteger ub))
p3 <- andPred sym p1 p2
assume (solverConn proc) p3
WURB.resolveSymBV sym searchStrat w proc bv
case rbv of
WURB.BVConcrete bv -> do
let bv' = fromInteger $ BV.asUnsigned bv
lb H.=== bv'
ub H.=== bv'
WURB.BVSymbolic bounds -> do
let (lb', ub') = WUBA.ubounds bounds
lb H.=== fromInteger lb'
ub H.=== fromInteger ub'
----------------------------------------------------------------------
main :: IO ()
main = do
testLevel <- TestLevel . fromMaybe "0" <$> lookupEnv "CI_TEST_LEVEL"
let solverNames = SolverName <$> [ "cvc4", "cvc5", "yices", "z3" ]
solvers <- reportSolverVersions testLevel id
=<< (zip solverNames <$> mapM getSolverVersion solverNames)
let z3Tests =
let skipPre4_8_11 why =
let shouldSkip = case lookup (SolverName "z3") solvers of
Just (SolverVersion v) -> any (`elem` [ "4.8.8", "4.8.9", "4.8.10" ]) $ words v
Nothing -> True
in if shouldSkip then expectFailBecause why else id
incompatZ3Strings = "unicode and string escaping not supported for older Z3 versions; upgrade to at least 4.8.11"
in
[
testUninterpretedFunctionScope
, testRotate1
, testRotate2
, testRotate3
, testBoundVarAsFree
, testSolverInfo
, testSolverVersion
, testFloatUnsat0
, testFloatUnsat1
, testFloatUnsat2
, testFloatSat0
, testFloatSat1
, testFloatToBinary
, testFloatFromBinary
, testBVIteNesting
, testSymbolPrimeCharZ3
, testCase "Z3 0-tuple" $ withOnlineZ3 zeroTupleTest
, testCase "Z3 1-tuple" $ withOnlineZ3 oneTupleTest
, testCase "Z3 pair" $ withOnlineZ3 pairTest
, testCase "Z3 forall binder" $ withOnlineZ3 forallTest
, skipPre4_8_11 incompatZ3Strings $ testCase "Z3 string1" $ withOnlineZ3 stringTest1
, testCase "Z3 string2" $ withOnlineZ3 stringTest2
, skipPre4_8_11 incompatZ3Strings $ testCase "Z3 string3" $ withOnlineZ3 stringTest3
, skipPre4_8_11 incompatZ3Strings $ testCase "Z3 string4" $ withOnlineZ3 stringTest4
, skipPre4_8_11 incompatZ3Strings $ testCase "Z3 string5" $ withOnlineZ3 stringTest5
, skipPre4_8_11 incompatZ3Strings $ testCase "Z3 string6" $ withOnlineZ3 stringTest6
this test apparently passes on older Z3 despite the escaping changes ...
, testCase "Z3 string7" $ withOnlineZ3 stringTest7
, testCase "Z3 binder tuple1" $ withOnlineZ3 binderTupleTest1
, testCase "Z3 binder tuple2" $ withOnlineZ3 binderTupleTest2
, testCase "Z3 rounding" $ withOnlineZ3 roundingTest
, testCase "Z3 multidim array"$ withOnlineZ3 multidimArrayTest
, testCase "Z3 #182 test case" $ withOnlineZ3 issue182Test
, arrayCopyTest
, arraySetTest
, arrayCopySetTest
]
let cvc4Tests =
let skipPre1_8 why =
let shouldSkip = case lookup (SolverName "cvc4") solvers of
Just (SolverVersion v) -> any (`elem` [ "1.7" ]) $ words v
Nothing -> True
in if shouldSkip then expectFailBecause why else id
unsuppStrings = "unicode and string escaping not supported for older CVC4 versions; upgrade to at least 1.8"
in
[
ignoreTestBecause "This test stalls the solver for some reason; line-buffering issue?" $
testCase "CVC4 0-tuple" $ withCVC4 zeroTupleTest
, testCase "CVC4 1-tuple" $ withCVC4 oneTupleTest
, testCase "CVC4 pair" $ withCVC4 pairTest
, testCase "CVC4 forall binder" $ withCVC4 forallTest
, testCase "CVC4 string1" $ withCVC4 stringTest1
, testCase "CVC4 string2" $ withCVC4 stringTest2
, skipPre1_8 unsuppStrings $ testCase "CVC4 string3" $ withCVC4 stringTest3
, testCase "CVC4 string4" $ withCVC4 stringTest4
, testCase "CVC4 string5" $ withCVC4 stringTest5
, skipPre1_8 unsuppStrings $ testCase "CVC4 string6" $ withCVC4 stringTest6
, testCase "CVC4 string7" $ withCVC4 stringTest7
, testCase "CVC4 binder tuple1" $ withCVC4 binderTupleTest1
, testCase "CVC4 binder tuple2" $ withCVC4 binderTupleTest2
, testCase "CVC4 rounding" $ withCVC4 roundingTest
, testCase "CVC4 multidim array"$ withCVC4 multidimArrayTest
, testCase "CVC4 #182 test case" $ withCVC4 issue182Test
]
let yicesTests =
[
testResolveSymBV WURB.ExponentialSearch
, testResolveSymBV WURB.BinarySearch
, testCase "Yices 0-tuple" $ withYices zeroTupleTest
, testCase "Yices 1-tuple" $ withYices oneTupleTest
, testCase "Yices pair" $ withYices pairTest
, testCase "Yices rounding" $ withYices roundingTest
, testCase "Yices #182 test case" $ withYices issue182Test
]
let cvc5Tests = cvc4Tests
let skipIfNotPresent nm = if SolverName nm `elem` (fst <$> solvers) then id
else fmap (ignoreTestBecause (nm <> " not present"))
defaultMain $ testGroup "Tests" $
[ testInterpretedFloatReal
, testFloatUninterpreted
, testInterpretedFloatIEEE
, testFloatBinarySimplification
, testRealFloatBinarySimplification
, testFloatCastSimplification
, testFloatCastNoSimplification
, testBVSelectShl
, testBVSelectLshr
, testBVOrShlZext
, testBVDomainArithScale
, testBVSwap
, testBVBitreverse
, testUnsafeSetAbstractValue1
, testUnsafeSetAbstractValue2
]
<> (skipIfNotPresent "cvc4" cvc4Tests)
<> (skipIfNotPresent "cvc5" cvc5Tests)
<> (skipIfNotPresent "yices" yicesTests)
<> (skipIfNotPresent "z3" z3Tests)
| null | https://raw.githubusercontent.com/GaloisInc/what4/79ad25c3a207503c8da36dd3ce9e8618f2704a60/what4/test/ExprBuilderSMTLib2.hs | haskell | # LANGUAGE ExplicitForAll #
# LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
debugOutputFiles = True
exists y . (x + 2.0) + (x + 2.0) < y
Floating point literal: 2.0
work out... it would probably be significant work to make it do so.
bv == bv2 @? "syntactic equality"
Outside the scope of inNewFrameWithVars we can no longer
use the bound variable as free
floor test
ceiling test
truncate test
round away test
round-to-even test
This test verifies that we can correctly round-trip the
'\' character. It is a bit of a corner case, since it
This helps to ensure that we can correclty recieve and send back to the
solver enough characters to exhaust the standard printable ASCII sequence,
which ensures that we are testing nontrivial escape sequences.
We don't verify that any particular string is returned because the solvers
make different choices about what characters to return.
Recursively generate characters
Exclude value
| These tests simply ensure that no exceptions are raised.
Test unsafeSetAbstractValue on a simple symbolic expression
Test unsafeSetAbstractValue on a compound symbolic expression
-------------------------------------------------------------------- | # LANGUAGE DataKinds #
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
# LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
for instance
import ProbeSolvers
import Test.Tasty
import Test.Tasty.Checklist as TC
import Test.Tasty.ExpectedFailure
import Test.Tasty.Hedgehog.Alt
import Test.Tasty.HUnit
import Control.Exception (bracket, try, finally, SomeException)
import Control.Monad (void)
import Control.Monad.IO.Class (MonadIO(..))
import qualified Data.BitVector.Sized as BV
import Data.Foldable
import qualified Data.Map as Map
import Data.Maybe ( fromMaybe )
import Data.Parameterized.Context ( pattern Empty, pattern (:>) )
import qualified Data.Text as Text
import qualified Hedgehog as H
import qualified Hedgehog.Gen as HGen
import qualified Hedgehog.Range as HRange
import qualified Prettyprinter as PP
import System.Environment ( lookupEnv )
import qualified Data.Parameterized.Context as Ctx
import Data.Parameterized.Nonce
import Data.Parameterized.Some
import System.IO
import LibBF
import What4.BaseTypes
import What4.Config
import What4.Expr
import What4.Interface
import What4.InterpretedFloatingPoint
import What4.Protocol.Online
import What4.Protocol.SMTLib2
import What4.SatResult
import What4.Solver.Adapter
import qualified What4.Solver.CVC4 as CVC4
import qualified What4.Solver.Z3 as Z3
import qualified What4.Solver.Yices as Yices
import qualified What4.Utils.BVDomain as WUB
import qualified What4.Utils.BVDomain.Arith as WUBA
import qualified What4.Utils.ResolveBounds.BV as WURB
import What4.Utils.StringLiteral
import What4.Utils.Versions (ver, SolverBounds(..), emptySolverBounds)
data SomePred = forall t . SomePred (BoolExpr t)
deriving instance Show SomePred
type SimpleExprBuilder t fs = ExprBuilder t EmptyExprBuilderState fs
instance TestShow Text.Text where testShow = show
instance TestShow (StringLiteral Unicode) where testShow = show
debugOutputFiles :: Bool
debugOutputFiles = False
maybeClose :: Maybe Handle -> IO ()
maybeClose Nothing = return ()
maybeClose (Just h) = hClose h
userSymbol' :: String -> SolverSymbol
userSymbol' s = case userSymbol s of
Left e -> error $ show e
Right symbol -> symbol
withSym :: FloatModeRepr fm -> (forall t . SimpleExprBuilder t (Flags fm) -> IO a) -> IO a
withSym floatMode pred_gen = withIONonceGenerator $ \gen ->
pred_gen =<< newExprBuilder floatMode EmptyExprBuilderState gen
withYices :: (forall t. SimpleExprBuilder t (Flags FloatReal) -> SolverProcess t Yices.Connection -> IO a) -> IO a
withYices action = withSym FloatRealRepr $ \sym ->
do extendConfig Yices.yicesOptions (getConfiguration sym)
bracket
(do h <- if debugOutputFiles then Just <$> openFile "yices.out" WriteMode else return Nothing
s <- startSolverProcess Yices.yicesDefaultFeatures h sym
return (h,s))
(\(h,s) -> void $ try @SomeException (shutdownSolverProcess s `finally` maybeClose h))
(\(_,s) -> action sym s)
withZ3 :: (forall t . SimpleExprBuilder t (Flags FloatIEEE) -> Session t Z3.Z3 -> IO ()) -> IO ()
withZ3 action = withIONonceGenerator $ \nonce_gen -> do
sym <- newExprBuilder FloatIEEERepr EmptyExprBuilderState nonce_gen
extendConfig Z3.z3Options (getConfiguration sym)
Z3.withZ3 sym "z3" defaultLogData { logCallbackVerbose = (\_ -> putStrLn) } (action sym)
withOnlineZ3
:: (forall t . SimpleExprBuilder t (Flags FloatIEEE) -> SolverProcess t (Writer Z3.Z3) -> IO a)
-> IO a
withOnlineZ3 action = withSym FloatIEEERepr $ \sym -> do
extendConfig Z3.z3Options (getConfiguration sym)
bracket
(do h <- if debugOutputFiles then Just <$> openFile "z3.out" WriteMode else return Nothing
s <- startSolverProcess (defaultFeatures Z3.Z3) h sym
return (h,s))
(\(h,s) -> void $ try @SomeException (shutdownSolverProcess s `finally` maybeClose h))
(\(_,s) -> action sym s)
withCVC4
:: (forall t . SimpleExprBuilder t (Flags FloatReal) -> SolverProcess t (Writer CVC4.CVC4) -> IO a)
-> IO a
withCVC4 action = withSym FloatRealRepr $ \sym -> do
extendConfig CVC4.cvc4Options (getConfiguration sym)
bracket
(do h <- if debugOutputFiles then Just <$> openFile "cvc4.out" WriteMode else return Nothing
s <- startSolverProcess (defaultFeatures CVC4.CVC4) h sym
return (h,s))
(\(h,s) -> void $ try @SomeException (shutdownSolverProcess s `finally` maybeClose h))
(\(_,s) -> action sym s)
withModel
:: Session t Z3.Z3
-> BoolExpr t
-> ((forall tp . What4.Expr.Expr t tp -> IO (GroundValue tp)) -> IO ())
-> IO ()
withModel s p action = do
assume (sessionWriter s) p
runCheckSat s $ \case
Sat (GroundEvalFn {..}, _) -> action groundEval
Unsat _ -> "unsat" @?= ("sat" :: String)
Unknown -> "unknown" @?= ("sat" :: String)
iFloatTestPred
:: ( forall t
. (IsInterpretedFloatExprBuilder (SimpleExprBuilder t fs))
=> SimpleExprBuilder t fs
-> IO SomePred
)
iFloatTestPred sym = do
x <- freshFloatConstant sym (userSymbol' "x") SingleFloatRepr
e0 <- iFloatLitSingle sym 2.0
e1 <- iFloatAdd @_ @SingleFloat sym RNE x e0
e2 <- iFloatAdd @_ @SingleFloat sym RTZ e1 e1
y <- freshFloatBoundVar sym (userSymbol' "y") SingleFloatRepr
e3 <- iFloatLt @_ @SingleFloat sym e2 $ varExpr sym y
SomePred <$> existsPred sym y e3
floatSinglePrecision :: FloatPrecisionRepr Prec32
floatSinglePrecision = knownRepr
floatDoublePrecision :: FloatPrecisionRepr Prec64
floatDoublePrecision = knownRepr
floatSingleType :: BaseTypeRepr (BaseFloatType Prec32)
floatSingleType = BaseFloatRepr floatSinglePrecision
floatDoubleType :: BaseTypeRepr (BaseFloatType Prec64)
floatDoubleType = BaseFloatRepr floatDoublePrecision
testInterpretedFloatReal :: TestTree
testInterpretedFloatReal = testCase "Float interpreted as real" $ do
actual <- withSym FloatRealRepr iFloatTestPred
expected <- withSym FloatRealRepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- realLit sym 2.0
e1 <- realAdd sym x e0
e2 <- realAdd sym e1 e1
y <- freshBoundVar sym (userSymbol' "y") knownRepr
e3 <- realLt sym e2 $ varExpr sym y
SomePred <$> existsPred sym y e3
show actual @?= show expected
testFloatUninterpreted :: TestTree
testFloatUninterpreted = testCase "Float uninterpreted" $ do
actual <- withSym FloatUninterpretedRepr iFloatTestPred
expected <- withSym FloatUninterpretedRepr $ \sym -> do
let bvtp = BaseBVRepr $ knownNat @32
rne_rm <- intLit sym $ toInteger $ fromEnum RNE
rtz_rm <- intLit sym $ toInteger $ fromEnum RTZ
x <- freshConstant sym (userSymbol' "x") knownRepr
e1 <- bvLit sym knownRepr (BV.mkBV knownRepr (bfToBits (float32 NearEven) (bfFromInt 2)))
add_fn <- freshTotalUninterpFn
sym
(userSymbol' "uninterpreted_float_add")
(Ctx.empty Ctx.:> BaseIntegerRepr Ctx.:> bvtp Ctx.:> bvtp)
bvtp
e2 <- applySymFn sym add_fn $ Ctx.empty Ctx.:> rne_rm Ctx.:> x Ctx.:> e1
e3 <- applySymFn sym add_fn $ Ctx.empty Ctx.:> rtz_rm Ctx.:> e2 Ctx.:> e2
y <- freshBoundVar sym (userSymbol' "y") knownRepr
lt_fn <- freshTotalUninterpFn sym
(userSymbol' "uninterpreted_float_lt")
(Ctx.empty Ctx.:> bvtp Ctx.:> bvtp)
BaseBoolRepr
e4 <- applySymFn sym lt_fn $ Ctx.empty Ctx.:> e3 Ctx.:> varExpr sym y
SomePred <$> existsPred sym y e4
show actual @?= show expected
testInterpretedFloatIEEE :: TestTree
testInterpretedFloatIEEE = testCase "Float interpreted as IEEE float" $ do
actual <- withSym FloatIEEERepr iFloatTestPred
expected <- withSym FloatIEEERepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- floatLitRational sym floatSinglePrecision 2.0
e1 <- floatAdd sym RNE x e0
e2 <- floatAdd sym RTZ e1 e1
y <- freshBoundVar sym (userSymbol' "y") knownRepr
e3 <- floatLt sym e2 $ varExpr sym y
SomePred <$> existsPred sym y e3
show actual @?= show expected
x < = 0.5 & & x > = 1.5
testFloatUnsat0 :: TestTree
testFloatUnsat0 = testCase "Unsat float formula" $ withZ3 $ \sym s -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- floatLitRational sym floatSinglePrecision 0.5
e1 <- floatLitRational sym knownRepr 1.5
p0 <- floatLe sym x e0
p1 <- floatGe sym x e1
assume (sessionWriter s) p0
assume (sessionWriter s) p1
runCheckSat s $ \res -> isUnsat res @? "unsat"
x * x < 0
testFloatUnsat1 :: TestTree
testFloatUnsat1 = testCase "Unsat float formula" $ withZ3 $ \sym s -> do
x <- freshConstant sym (userSymbol' "x") floatSingleType
e0 <- floatMul sym RNE x x
p0 <- floatIsNeg sym e0
assume (sessionWriter s) p0
runCheckSat s $ \res -> isUnsat res @? "unsat"
x + y > = x & & x ! = infinity & & y > 0 with rounding to + infinity
testFloatUnsat2 :: TestTree
testFloatUnsat2 = testCase "Sat float formula" $ withZ3 $ \sym s -> do
x <- freshConstant sym (userSymbol' "x") floatSingleType
y <- freshConstant sym (userSymbol' "y") knownRepr
p0 <- notPred sym =<< floatIsInf sym x
p1 <- floatIsPos sym y
p2 <- notPred sym =<< floatIsZero sym y
e0 <- floatAdd sym RTP x y
p3 <- floatGe sym x e0
p4 <- foldlM (andPred sym) (truePred sym) [p1, p2, p3]
assume (sessionWriter s) p4
runCheckSat s $ \res -> isSat res @? "sat"
assume (sessionWriter s) p0
runCheckSat s $ \res -> isUnsat res @? "unsat"
x = = 2.5 & & y = = + infinity
testFloatSat0 :: TestTree
testFloatSat0 = testCase "Sat float formula" $ withZ3 $ \sym s -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- floatLitRational sym floatSinglePrecision 2.5
p0 <- floatEq sym x e0
y <- freshConstant sym (userSymbol' "y") knownRepr
e1 <- floatPInf sym floatSinglePrecision
p1 <- floatEq sym y e1
p2 <- andPred sym p0 p1
withModel s p2 $ \groundEval -> do
(@?=) (bfFromDouble 2.5) =<< groundEval x
y_val <- groundEval y
assertBool ("expected y = +infinity, actual y = " ++ show y_val) $
bfIsInf y_val && bfIsPos y_val
x > = 0.5 & & x < = 1.5
testFloatSat1 :: TestTree
testFloatSat1 = testCase "Sat float formula" $ withZ3 $ \sym s -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- floatLitRational sym floatSinglePrecision 0.5
e1 <- floatLitRational sym knownRepr 1.5
p0 <- floatGe sym x e0
p1 <- floatLe sym x e1
p2 <- andPred sym p0 p1
withModel s p2 $ \groundEval -> do
x_val <- groundEval x
assertBool ("expected x in [0.5, 1.5], actual x = " ++ show x_val) $
bfFromDouble 0.5 <= x_val && x_val <= bfFromDouble 1.5
testFloatToBinary :: TestTree
testFloatToBinary = testCase "float to binary" $ withZ3 $ \sym s -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
y <- freshConstant sym (userSymbol' "y") knownRepr
e0 <- floatToBinary sym x
e1 <- bvAdd sym e0 y
e2 <- floatFromBinary sym floatSinglePrecision e1
p0 <- floatNe sym x e2
assume (sessionWriter s) p0
runCheckSat s $ \res -> isSat res @? "sat"
p1 <- notPred sym =<< bvIsNonzero sym y
assume (sessionWriter s) p1
runCheckSat s $ \res -> isUnsat res @? "unsat"
testFloatFromBinary :: TestTree
testFloatFromBinary = testCase "float from binary" $ withZ3 $ \sym s -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- floatFromBinary sym floatSinglePrecision x
e1 <- floatToBinary sym e0
p0 <- bvNe sym x e1
assume (sessionWriter s) p0
runCheckSat s $ \res -> isSat res @? "sat"
p1 <- notPred sym =<< floatIsNaN sym e0
assume (sessionWriter s) p1
runCheckSat s $ \res -> isUnsat res @? "unsat"
testFloatBinarySimplification :: TestTree
testFloatBinarySimplification = testCase "float binary simplification" $
withSym FloatIEEERepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- floatToBinary sym x
e1 <- floatFromBinary sym floatSinglePrecision e0
e1 @?= x
testRealFloatBinarySimplification :: TestTree
testRealFloatBinarySimplification =
testCase "real float binary simplification" $
withSym FloatRealRepr $ \sym -> do
x <- freshFloatConstant sym (userSymbol' "x") SingleFloatRepr
e0 <- iFloatToBinary sym SingleFloatRepr x
e1 <- iFloatFromBinary sym SingleFloatRepr e0
e1 @?= x
testFloatCastSimplification :: TestTree
testFloatCastSimplification = testCase "float cast simplification" $
withSym FloatIEEERepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") floatSingleType
e0 <- floatCast sym floatDoublePrecision RNE x
e1 <- floatCast sym floatSinglePrecision RNE e0
e1 @?= x
testFloatCastNoSimplification :: TestTree
testFloatCastNoSimplification = testCase "float cast no simplification" $
withSym FloatIEEERepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") floatDoubleType
e0 <- floatCast sym floatSinglePrecision RNE x
e1 <- floatCast sym floatDoublePrecision RNE e0
e1 /= x @? ""
testBVSelectShl :: TestTree
testBVSelectShl = testCase "select shl simplification" $
withSym FloatIEEERepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- bvLit sym (knownNat @64) (BV.zero knownNat)
e1 <- bvConcat sym e0 x
e2 <- bvShl sym e1 =<< bvLit sym knownRepr (BV.mkBV knownNat 64)
e3 <- bvSelect sym (knownNat @64) (knownNat @64) e2
e3 @?= x
testBVSelectLshr :: TestTree
testBVSelectLshr = testCase "select lshr simplification" $
withSym FloatIEEERepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") knownRepr
e0 <- bvConcat sym x =<< bvLit sym (knownNat @64) (BV.zero knownNat)
e1 <- bvLshr sym e0 =<< bvLit sym knownRepr (BV.mkBV knownNat 64)
e2 <- bvSelect sym (knownNat @0) (knownNat @64) e1
e2 @?= x
testBVOrShlZext :: TestTree
testBVOrShlZext = testCase "bv or-shl-zext -> concat simplification" $
withSym FloatIEEERepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") (BaseBVRepr $ knownNat @8)
y <- freshConstant sym (userSymbol' "y") (BaseBVRepr $ knownNat @8)
e0 <- bvZext sym (knownNat @16) x
e1 <- bvShl sym e0 =<< bvLit sym knownRepr (BV.mkBV knownNat 8)
e2 <- bvZext sym (knownNat @16) y
e3 <- bvOrBits sym e1 e2
show e3 @?= "bvConcat cx@0:bv cy@1:bv"
e4 <- bvOrBits sym e2 e1
show e4 @?= show e3
arrayCopyTest :: TestTree
arrayCopyTest = testCase "arrayCopy" $ withZ3 $ \sym s -> do
a <- freshConstant sym (userSymbol' "a") (BaseArrayRepr (Ctx.singleton (BaseBVRepr $ knownNat @64)) (BaseBVRepr $ knownNat @8))
b <- freshConstant sym (userSymbol' "b") knownRepr
i <- freshConstant sym (userSymbol' "i") (BaseBVRepr $ knownNat @64)
j <- freshConstant sym (userSymbol' "j") knownRepr
k <- freshConstant sym (userSymbol' "k") knownRepr
n <- freshConstant sym (userSymbol' "n") knownRepr
copy_a_i_b_j_n <- arrayCopy sym a i b j n
add_i_k <- bvAdd sym i k
copy_a_i_b_j_n_at_add_i_k <- arrayLookup sym copy_a_i_b_j_n (Ctx.singleton add_i_k)
add_j_k <- bvAdd sym j k
b_at_add_j_k <- arrayLookup sym b (Ctx.singleton add_j_k)
assume (sessionWriter s) =<< bvUle sym i =<< bvLit sym knownRepr (BV.mkBV knownNat 1024)
assume (sessionWriter s) =<< bvUle sym j =<< bvLit sym knownRepr (BV.mkBV knownNat 1024)
assume (sessionWriter s) =<< bvUle sym n =<< bvLit sym knownRepr (BV.mkBV knownNat 1024)
assume (sessionWriter s) =<< bvNe sym copy_a_i_b_j_n_at_add_i_k b_at_add_j_k
runCheckSat s $ \res -> isSat res @? "sat"
assume (sessionWriter s) =<< bvUlt sym k n
runCheckSat s $ \res -> isUnsat res @? "unsat"
arraySetTest :: TestTree
arraySetTest = testCase "arraySet" $ withZ3 $ \sym s -> do
a <- freshConstant sym (userSymbol' "a") knownRepr
i <- freshConstant sym (userSymbol' "i") (BaseBVRepr $ knownNat @64)
j <- freshConstant sym (userSymbol' "j") knownRepr
n <- freshConstant sym (userSymbol' "n") knownRepr
v <- freshConstant sym (userSymbol' "v") (BaseBVRepr $ knownNat @8)
set_a_i_v_n <- arraySet sym a i v n
add_i_j <- bvAdd sym i j
set_a_i_v_n_at_add_i_j <- arrayLookup sym set_a_i_v_n (Ctx.singleton add_i_j)
assume (sessionWriter s) =<< bvUle sym i =<< bvLit sym knownRepr (BV.mkBV knownNat 1024)
assume (sessionWriter s) =<< bvUle sym n =<< bvLit sym knownRepr (BV.mkBV knownNat 1024)
assume (sessionWriter s) =<< bvNe sym v set_a_i_v_n_at_add_i_j
runCheckSat s $ \res -> isSat res @? "sat"
assume (sessionWriter s) =<< bvUlt sym j n
runCheckSat s $ \res -> isUnsat res @? "unsat"
arrayCopySetTest :: TestTree
arrayCopySetTest = testCase "arrayCopy/arraySet" $ withZ3 $ \sym s -> do
a <- freshConstant sym (userSymbol' "a") knownRepr
i <- freshConstant sym (userSymbol' "i") (BaseBVRepr $ knownNat @64)
n <- freshConstant sym (userSymbol' "n") knownRepr
v <- freshConstant sym (userSymbol' "v") (BaseBVRepr $ knownNat @8)
const_v <- constantArray sym (Ctx.singleton (BaseBVRepr $ knownNat @64)) v
z <- bvLit sym knownRepr $ BV.mkBV knownNat 0
copy_a_i_v_n <- arrayCopy sym a i const_v z n
set_a_i_v_n <- arraySet sym a i v n
assume (sessionWriter s) =<< bvUle sym i =<< bvLit sym knownRepr (BV.mkBV knownNat 1024)
assume (sessionWriter s) =<< bvUle sym n =<< bvLit sym knownRepr (BV.mkBV knownNat 1024)
p <- notPred sym =<< arrayEq sym copy_a_i_v_n set_a_i_v_n
assume (sessionWriter s) p
runCheckSat s $ \res -> isUnsat res @? "unsat"
testUninterpretedFunctionScope :: TestTree
testUninterpretedFunctionScope = testCase "uninterpreted function scope" $
withOnlineZ3 $ \sym s -> do
fn <- freshTotalUninterpFn sym (userSymbol' "f") knownRepr BaseIntegerRepr
x <- freshConstant sym (userSymbol' "x") BaseIntegerRepr
y <- freshConstant sym (userSymbol' "y") BaseIntegerRepr
e0 <- applySymFn sym fn (Ctx.empty Ctx.:> x)
e1 <- applySymFn sym fn (Ctx.empty Ctx.:> y)
p0 <- intEq sym x y
p1 <- notPred sym =<< intEq sym e0 e1
p2 <- andPred sym p0 p1
res1 <- checkSatisfiable s "test" p2
isUnsat res1 @? "unsat"
res2 <- checkSatisfiable s "test" p2
isUnsat res2 @? "unsat"
testBVIteNesting :: TestTree
testBVIteNesting = testCase "nested bitvector ites" $ withZ3 $ \sym s -> do
bv0 <- bvLit sym (knownNat @32) (BV.zero knownNat)
let setSymBit bv idx = do
c1 <- freshConstant sym (userSymbol' ("c1_" ++ show idx)) knownRepr
c2 <- freshConstant sym (userSymbol' ("c2_" ++ show idx)) knownRepr
c3 <- freshConstant sym (userSymbol' ("c3_" ++ show idx)) knownRepr
tt1 <- freshConstant sym (userSymbol' ("tt1_" ++ show idx)) knownRepr
tt2 <- freshConstant sym (userSymbol' ("tt2_" ++ show idx)) knownRepr
tt3 <- freshConstant sym (userSymbol' ("tt3_" ++ show idx)) knownRepr
tt4 <- freshConstant sym (userSymbol' ("tt4_" ++ show idx)) knownRepr
ite1 <- itePred sym c1 tt1 tt2
ite2 <- itePred sym c2 tt3 tt4
ite3 <- itePred sym c3 ite1 ite2
bvSet sym bv idx ite3
bv1 <- foldlM setSymBit bv0 [0..31]
p <- testBitBV sym 0 bv1
assume (sessionWriter s) p
runCheckSat s $ \res -> isSat res @? "sat"
testRotate1 :: TestTree
testRotate1 = testCase "rotate test1" $ withOnlineZ3 $ \sym s -> do
bv <- freshConstant sym (userSymbol' "bv") (BaseBVRepr (knownNat @32))
bv1 <- bvRol sym bv =<< bvLit sym knownNat (BV.mkBV knownNat 8)
bv2 <- bvRol sym bv1 =<< bvLit sym knownNat (BV.mkBV knownNat 16)
bv3 <- bvRol sym bv2 =<< bvLit sym knownNat (BV.mkBV knownNat 8)
bv4 <- bvRor sym bv2 =<< bvLit sym knownNat (BV.mkBV knownNat 24)
bv5 <- bvRor sym bv2 =<< bvLit sym knownNat (BV.mkBV knownNat 28)
res <- checkSatisfiable s "test" =<< notPred sym =<< bvEq sym bv bv3
isUnsat res @? "unsat1"
res1 <- checkSatisfiable s "test" =<< notPred sym =<< bvEq sym bv bv4
isUnsat res1 @? "unsat2"
res2 <- checkSatisfiable s "test" =<< notPred sym =<< bvEq sym bv bv5
isSat res2 @? "sat"
testRotate2 :: TestTree
testRotate2 = testCase "rotate test2" $ withOnlineZ3 $ \sym s -> do
bv <- freshConstant sym (userSymbol' "bv") (BaseBVRepr (knownNat @32))
amt <- freshConstant sym (userSymbol' "amt") (BaseBVRepr (knownNat @32))
bv1 <- bvRol sym bv amt
bv2 <- bvRor sym bv1 amt
bv3 <- bvRol sym bv =<< bvLit sym knownNat (BV.mkBV knownNat 20)
bv == bv2 @? "syntactic equality"
res1 <- checkSatisfiable s "test" =<< notPred sym =<< bvEq sym bv bv2
isUnsat res1 @? "unsat"
res2 <- checkSatisfiable s "test" =<< notPred sym =<< bvEq sym bv bv3
isSat res2 @? "sat"
testRotate3 :: TestTree
testRotate3 = testCase "rotate test3" $ withOnlineZ3 $ \sym s -> do
bv <- freshConstant sym (userSymbol' "bv") (BaseBVRepr (knownNat @7))
amt <- freshConstant sym (userSymbol' "amt") (BaseBVRepr (knownNat @7))
bv1 <- bvRol sym bv amt
bv2 <- bvRor sym bv1 amt
bv3 <- bvRol sym bv =<< bvLit sym knownNat (BV.mkBV knownNat 3)
Note , because 7 is not a power of two , this simplification does n't quite
res1 <- checkSatisfiable s "test" =<< notPred sym =<< bvEq sym bv bv2
isUnsat res1 @? "unsat"
res2 <- checkSatisfiable s "test" =<< notPred sym =<< bvEq sym bv bv3
isSat res2 @? "sat"
testSymbolPrimeCharZ3 :: TestTree
testSymbolPrimeCharZ3 = testCase "z3 symbol prime (') char" $
withZ3 $ \sym s -> do
x <- freshConstant sym (userSymbol' "x'") knownRepr
y <- freshConstant sym (userSymbol' "y'") knownRepr
p <- intLt sym x y
assume (sessionWriter s) p
runCheckSat s $ \res -> isSat res @? "sat"
expectFailure :: IO a -> IO ()
expectFailure f = try @SomeException f >>= \case
Left _ -> return ()
Right _ -> assertFailure "expectFailure"
testBoundVarAsFree :: TestTree
testBoundVarAsFree = testCase "boundvarasfree" $ withOnlineZ3 $ \sym s -> do
x <- freshBoundVar sym (userSymbol' "x") BaseBoolRepr
y <- freshBoundVar sym (userSymbol' "y") BaseBoolRepr
pz <- freshConstant sym (userSymbol' "pz") BaseBoolRepr
let px = varExpr sym x
let py = varExpr sym y
expectFailure $ checkSatisfiable s "test" px
expectFailure $ checkSatisfiable s "test" py
checkSatisfiable s "test" pz >>= \res -> isSat res @? "sat"
inNewFrameWithVars s [Some x] $ do
checkSatisfiable s "test" px >>= \res -> isSat res @? "sat"
expectFailure $ checkSatisfiable s "test" py
expectFailure $ checkSatisfiable s "test" px
expectFailure $ checkSatisfiable s "test" py
roundingTest ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
roundingTest sym solver =
do r <- freshConstant sym (userSymbol' "r") BaseRealRepr
let runErrTest nm op errOp =
do diff <- realAbs sym =<< realSub sym r =<< integerToReal sym =<< op sym r
p' <- notPred sym =<< errOp diff
res <- checkSatisfiable solver nm p'
isUnsat res @? nm
runErrTest "floor" realFloor (\diff -> realLt sym diff =<< realLit sym 1)
runErrTest "ceiling" realCeil (\diff -> realLt sym diff =<< realLit sym 1)
runErrTest "trunc" realTrunc (\diff -> realLt sym diff =<< realLit sym 1)
runErrTest "rna" realRound (\diff -> realLe sym diff =<< realLit sym 0.5)
runErrTest "rne" realRoundEven (\diff -> realLe sym diff =<< realLit sym 0.5)
do ri <- integerToReal sym =<< realFloor sym r
p <- realLe sym ri r
res <- checkSatisfiable solver "floorTest" =<< notPred sym p
isUnsat res @? "floorTest"
do ri <- integerToReal sym =<< realCeil sym r
p <- realLe sym r ri
res <- checkSatisfiable solver "ceilingTest" =<< notPred sym p
isUnsat res @? "ceilingTest"
do ri <- integerToReal sym =<< realTrunc sym r
rabs <- realAbs sym r
riabs <- realAbs sym ri
p <- realLe sym riabs rabs
res <- checkSatisfiable solver "truncateTest" =<< notPred sym p
isUnsat res @? "truncateTest"
do ri <- integerToReal sym =<< realRound sym r
diff <- realAbs sym =<< realSub sym r ri
ptie <- realEq sym diff =<< realLit sym 0.5
rabs <- realAbs sym r
iabs <- realAbs sym ri
plarge <- realGt sym iabs rabs
res <- checkSatisfiable solver "rnaTest" =<<
andPred sym ptie =<< notPred sym plarge
isUnsat res @? "rnaTest"
do i <- realRoundEven sym r
ri <- integerToReal sym i
diff <- realAbs sym =<< realSub sym r ri
ptie <- realEq sym diff =<< realLit sym 0.5
ieven <- intDivisible sym i 2
res <- checkSatisfiable solver "rneTest" =<<
andPred sym ptie =<< notPred sym ieven
isUnsat res @? "rneTest"
zeroTupleTest ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
zeroTupleTest sym solver =
do u <- freshConstant sym (userSymbol' "u") (BaseStructRepr Ctx.Empty)
s <- mkStruct sym Ctx.Empty
f <- freshTotalUninterpFn sym (userSymbol' "f")
(Ctx.Empty Ctx.:> BaseStructRepr Ctx.Empty)
BaseBoolRepr
fu <- applySymFn sym f (Ctx.Empty Ctx.:> u)
fs <- applySymFn sym f (Ctx.Empty Ctx.:> s)
p <- eqPred sym fu fs
res1 <- checkSatisfiable solver "test" p
isSat res1 @? "sat"
res2 <- checkSatisfiable solver "test" =<< notPred sym p
isUnsat res2 @? "unsat"
oneTupleTest ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
oneTupleTest sym solver =
do u <- freshConstant sym (userSymbol' "u") (BaseStructRepr (Ctx.Empty Ctx.:> BaseBoolRepr))
s <- mkStruct sym (Ctx.Empty Ctx.:> backendPred sym False)
f <- freshTotalUninterpFn sym (userSymbol' "f")
(Ctx.Empty Ctx.:> BaseStructRepr (Ctx.Empty Ctx.:> BaseBoolRepr))
BaseBoolRepr
fu <- applySymFn sym f (Ctx.Empty Ctx.:> u)
fs <- applySymFn sym f (Ctx.Empty Ctx.:> s)
p <- eqPred sym fu fs
res1 <- checkSatisfiable solver "test" p
isSat res1 @? "sat"
res2 <- checkSatisfiable solver "test" =<< notPred sym p
isSat res2 @? "neg sat"
pairTest ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
pairTest sym solver =
do u <- freshConstant sym (userSymbol' "u") (BaseStructRepr (Ctx.Empty Ctx.:> BaseBoolRepr Ctx.:> BaseRealRepr))
r <- realLit sym 42.0
s <- mkStruct sym (Ctx.Empty Ctx.:> backendPred sym True Ctx.:> r )
p <- structEq sym u s
res1 <- checkSatisfiable solver "test" p
isSat res1 @? "sat"
res2 <- checkSatisfiable solver "test" =<< notPred sym p
isSat res2 @? "neg sat"
stringTest1 ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
stringTest1 sym solver = withChecklist "string1" $
length 9
length 7
length 5
x <- stringLit sym (UnicodeLiteral bsx)
y <- freshConstant sym (userSymbol' "str") (BaseStringRepr UnicodeRepr)
z <- stringLit sym (UnicodeLiteral bsz)
w <- stringLit sym (UnicodeLiteral bsw)
s <- stringConcat sym x =<< stringConcat sym y z
s' <- stringConcat sym s w
l <- stringLength sym s'
n <- intLit sym 25
p <- intEq sym n l
checkSatisfiableWithModel solver "test" p $ \case
Sat fn ->
do UnicodeLiteral slit <- groundEval fn s'
llit <- groundEval fn n
slit `checkValues`
(Empty
:> Val "model string length" (fromIntegral . Text.length) llit
:> Got "expected prefix" (Text.isPrefixOf bsx)
:> Got "expected suffix" (Text.isSuffixOf (bsz <> bsw))
)
_ -> fail "expected satisfiable model"
p2 <- intEq sym l =<< intLit sym 20
checkSatisfiableWithModel solver "test" p2 $ \case
Unsat () -> return ()
_ -> fail "expected unsatifiable model"
stringTest2 ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
stringTest2 sym solver = withChecklist "string2" $
do let bsx = "asdf\nasdf"
let bsz = "qwe\x1c\&rty"
let bsw = "QQ\"QQ"
q <- freshConstant sym (userSymbol' "q") BaseBoolRepr
x <- stringLit sym (UnicodeLiteral bsx)
z <- stringLit sym (UnicodeLiteral bsz)
w <- stringLit sym (UnicodeLiteral bsw)
a <- freshConstant sym (userSymbol' "stra") (BaseStringRepr UnicodeRepr)
b <- freshConstant sym (userSymbol' "strb") (BaseStringRepr UnicodeRepr)
ax <- stringConcat sym x a
zw <- stringIte sym q z w
bzw <- stringConcat sym b zw
l <- stringLength sym zw
n <- intLit sym 7
p1 <- stringEq sym ax bzw
p2 <- intLt sym l n
p <- andPred sym p1 p2
checkSatisfiableWithModel solver "test" p $ \case
Sat fn ->
do axlit <- groundEval fn ax
bzwlit <- groundEval fn bzw
qlit <- groundEval fn q
TC.check "correct ite" (False ==) qlit
TC.check "equal strings" (axlit ==) bzwlit
_ -> fail "expected satisfable model"
stringTest3 ::
(OnlineSolver solver) =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
stringTest3 sym solver = withChecklist "string3" $
do let bsz = "qwe\x1c\&rtyQQ\"QQ"
z <- stringLit sym (UnicodeLiteral bsz)
a <- freshConstant sym (userSymbol' "stra") (BaseStringRepr UnicodeRepr)
b <- freshConstant sym (userSymbol' "strb") (BaseStringRepr UnicodeRepr)
c <- freshConstant sym (userSymbol' "strc") (BaseStringRepr UnicodeRepr)
pfx <- stringIsPrefixOf sym a z
sfx <- stringIsSuffixOf sym b z
cnt1 <- stringContains sym z c
cnt2 <- notPred sym =<< stringContains sym c =<< stringLit sym (UnicodeLiteral "Q")
cnt3 <- notPred sym =<< stringContains sym c =<< stringLit sym (UnicodeLiteral "q")
cnt <- andPred sym cnt1 =<< andPred sym cnt2 cnt3
lena <- stringLength sym a
lenb <- stringLength sym b
lenc <- stringLength sym c
n <- intLit sym 9
rnga <- intEq sym lena n
rngb <- intEq sym lenb n
rngc <- intEq sym lenc =<< intLit sym 6
rng <- andPred sym rnga =<< andPred sym rngb rngc
p <- andPred sym pfx =<<
andPred sym sfx =<<
andPred sym cnt rng
checkSatisfiableWithModel solver "test" p $ \case
Sat fn ->
do alit <- fromUnicodeLit <$> groundEval fn a
blit <- fromUnicodeLit <$> groundEval fn b
clit <- fromUnicodeLit <$> groundEval fn c
bsz `checkValues`
(Empty
:> Val "correct prefix" (Text.take 9) alit
:> Val "correct suffix" (Text.reverse . Text.take 9 . Text.reverse) blit
:> Val "correct middle" (Text.take 6 . Text.drop 1) clit
)
_ -> fail "expected satisfable model"
stringTest4 ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
stringTest4 sym solver = withChecklist "string4" $
do let bsx = "str"
x <- stringLit sym (UnicodeLiteral bsx)
a <- freshConstant sym (userSymbol' "stra") (BaseStringRepr UnicodeRepr)
i <- stringIndexOf sym a x =<< intLit sym 5
zero <- intLit sym 0
p <- intLe sym zero i
checkSatisfiableWithModel solver "test" p $ \case
Sat fn ->
do alit <- fromUnicodeLit <$> groundEval fn a
ilit <- groundEval fn i
TC.check "correct index" (Text.isPrefixOf bsx) (Text.drop (fromIntegral ilit) alit)
TC.check "index large enough" (>= 5) ilit
_ -> fail "expected satisfable model"
np <- notPred sym p
lena <- stringLength sym a
fv <- intLit sym 10
plen <- intLe sym fv lena
q <- andPred sym np plen
checkSatisfiableWithModel solver "test" q $ \case
Sat fn ->
do alit <- fromUnicodeLit <$> groundEval fn a
ilit <- groundEval fn i
TC.check "substring not found" (not . Text.isInfixOf bsx) (Text.drop 5 alit)
TC.check "expected neg one index" (== (-1)) ilit
_ -> fail "expected satisfable model"
stringTest5 ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
stringTest5 sym solver = withChecklist "string5" $
do a <- freshConstant sym (userSymbol' "a") (BaseStringRepr UnicodeRepr)
off <- freshConstant sym (userSymbol' "off") BaseIntegerRepr
len <- freshConstant sym (userSymbol' "len") BaseIntegerRepr
n5 <- intLit sym 5
n20 <- intLit sym 20
let qlit = "qwerty"
sub <- stringSubstring sym a off len
p1 <- stringEq sym sub =<< stringLit sym (UnicodeLiteral qlit)
p2 <- intLe sym n5 off
p3 <- intLe sym n20 =<< stringLength sym a
p <- andPred sym p1 =<< andPred sym p2 p3
checkSatisfiableWithModel solver "test" p $ \case
Sat fn ->
do alit <- fromUnicodeLit <$> groundEval fn a
offlit <- groundEval fn off
lenlit <- groundEval fn len
let q = Text.take (fromIntegral lenlit) (Text.drop (fromIntegral offlit) alit)
TC.check "correct substring" (qlit ==) q
_ -> fail "expected satisfable model"
is is involved in the codepoint escape sequences ' \u{abcd } ' .
stringTest6 ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
stringTest6 sym solver = withChecklist "string6" $
do let conn = solverConn solver
x <- freshConstant sym (safeSymbol "x") (BaseStringRepr UnicodeRepr)
l <- stringLength sym x
intLit sym 1 >>= isEq sym l >>= assume conn
stringLit sym (UnicodeLiteral (Text.pack "\\")) >>= isEq sym x >>= assume conn
checkAndGetModel solver "test" >>= \case
Sat ge -> do
v <- groundEval ge x
TC.check "correct string" (v ==) (UnicodeLiteral (Text.pack "\\"))
_ -> fail "unsatisfiable"
This test asks the solver to produce a sequence of 200 unique characters
stringTest7 ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
stringTest7 sym solver = withChecklist "string6" $
do chars <- getChars sym solver 200
TC.check "correct number of characters" (length chars ==) 200
getChars ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
Integer ->
IO [Char]
getChars sym solver bound = do
let conn = solverConn solver
Create string var and constrain its length to 1
x <- freshConstant sym (safeSymbol "x") (BaseStringRepr UnicodeRepr)
l <- stringLength sym x
intLit sym 1 >>= isEq sym l >>= assume conn
let getModelsRecursive n
| n >= bound = return ""
| otherwise =
checkAndGetModel solver "test" >>= \case
Sat ge -> do
v <- groundEval ge x
stringLit sym v >>= isEq sym x >>= notPred sym >>= assume conn
let c = Text.head $ fromUnicodeLit v
cs <- getModelsRecursive (n+1)
return (c:cs)
_ -> return []
cs <- getModelsRecursive 0
return cs
multidimArrayTest ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
multidimArrayTest sym solver =
do f <- freshConstant sym (userSymbol' "a") $
BaseArrayRepr (Ctx.empty Ctx.:> BaseBoolRepr Ctx.:> BaseBoolRepr) BaseBoolRepr
f' <- arrayUpdate sym f (Ctx.empty Ctx.:> falsePred sym Ctx.:> falsePred sym) (falsePred sym)
p <- arrayLookup sym f' (Ctx.empty Ctx.:> truePred sym Ctx.:> truePred sym)
checkSatisfiable solver "test" p >>= \case
Sat _ -> return ()
_ -> fail "expected satisfiable model"
forallTest ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
forallTest sym solver =
do x <- freshConstant sym (userSymbol' "x") BaseBoolRepr
y <- freshBoundVar sym (userSymbol' "y") BaseBoolRepr
p <- forallPred sym y =<< orPred sym x (varExpr sym y)
np <- notPred sym p
checkSatisfiableWithModel solver "test" p $ \case
Sat fn ->
do b <- groundEval fn x
(b == True) @? "true result"
_ -> fail "expected satisfible model"
checkSatisfiableWithModel solver "test" np $ \case
Sat fn ->
do b <- groundEval fn x
(b == False) @? "false result"
_ -> fail "expected satisfible model"
binderTupleTest1 ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
binderTupleTest1 sym solver =
do var <- freshBoundVar sym (safeSymbol "v")
(BaseStructRepr (Ctx.Empty Ctx.:> BaseBoolRepr))
p0 <- existsPred sym var (truePred sym)
res <- checkSatisfiable solver "test" p0
isSat res @? "sat"
binderTupleTest2 ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
binderTupleTest2 sym solver =
do x <- freshBoundVar sym (userSymbol' "x")
(BaseStructRepr (Ctx.Empty Ctx.:> BaseIntegerRepr Ctx.:> BaseBoolRepr))
p <- forallPred sym x =<< structEq sym (varExpr sym x) (varExpr sym x)
np <- notPred sym p
checkSatisfiableWithModel solver "test" np $ \case
Unsat _ -> return ()
_ -> fail "expected UNSAT"
| A regression test for # 182 .
issue182Test ::
OnlineSolver solver =>
SimpleExprBuilder t fs ->
SolverProcess t solver ->
IO ()
issue182Test sym solver = do
let w = knownNat @64
arr <- freshConstant sym (safeSymbol "arr")
(BaseArrayRepr (Ctx.Empty Ctx.:> BaseIntegerRepr)
(BaseBVRepr w))
idxInt <- intLit sym 0
let idx = Ctx.Empty Ctx.:> idxInt
let arrLookup = arrayLookup sym arr idx
elt <- arrLookup
bvZero <- bvLit sym w (BV.zero w)
p <- bvEq sym elt bvZero
checkSatisfiableWithModel solver "test" p $ \case
Sat fn ->
do elt' <- arrLookup
eltEval <- groundEval fn elt'
(eltEval == BV.zero w) @? "non-zero result"
_ -> fail "expected satisfible model"
testSolverInfo :: TestTree
testSolverInfo = testGroup "solver info queries" $
[ testCase "test get solver version" $ withOnlineZ3 $ \_ proc -> do
let conn = solverConn proc
getVersion conn
_ <- versionResult conn
pure ()
, testCase "test get solver name" $ withOnlineZ3 $ \_ proc -> do
let conn = solverConn proc
getName conn
nm <- nameResult conn
nm @?= "Z3"
]
testSolverVersion :: TestTree
testSolverVersion = testCase "test solver version bounds" $
withOnlineZ3 $ \_ proc -> do
let bnd = emptySolverBounds{ lower = Just $(ver "0") }
checkSolverVersion' (Map.singleton "Z3" bnd) proc >> return ()
testBVDomainArithScale :: TestTree
testBVDomainArithScale = testCase "bv domain arith scale" $
withSym FloatIEEERepr $ \sym -> do
x <- freshConstant sym (userSymbol' "x") (BaseBVRepr $ knownNat @8)
e0 <- bvZext sym (knownNat @16) x
e1 <- bvNeg sym e0
e2 <- bvSub sym e1 =<< bvLit sym knownRepr (BV.mkBV knownNat 1)
e3 <- bvUgt sym e2 =<< bvLit sym knownRepr (BV.mkBV knownNat 256)
e3 @?= truePred sym
testBVSwap :: TestTree
testBVSwap = testCase "test bvSwap" $
withSym FloatIEEERepr $ \sym -> do
e0 <- bvSwap sym (knownNat @2) =<< bvLit sym knownRepr (BV.mkBV knownNat 1)
e1 <- bvLit sym knownRepr (BV.mkBV knownNat 256)
e0 @?= e1
testBVBitreverse :: TestTree
testBVBitreverse = testCase "test bvBitreverse" $
withSym FloatIEEERepr $ \sym -> do
e0 <- bvBitreverse sym =<< bvLit sym (knownNat @8) (BV.mkBV knownNat 1)
e1 <- bvLit sym knownRepr (BV.mkBV knownNat 128)
e0 @?= e1
testUnsafeSetAbstractValue1 :: TestTree
testUnsafeSetAbstractValue1 = testCase "test unsafeSetAbstractValue1" $
withSym FloatIEEERepr $ \sym -> do
let w = knownNat @8
e1A <- freshConstant sym (userSymbol' "x1") (BaseBVRepr w)
let e1A' = unsafeSetAbstractValue (WUB.BVDArith (WUBA.range w 2 2)) e1A
unsignedBVBounds e1A' @?= Just (2, 2)
e1B <- bvAdd sym e1A' =<< bvLit sym w (BV.one w)
case asBV e1B of
Just bv -> bv @?= BV.mkBV w 3
Nothing -> assertFailure $ unlines
[ "unsafeSetAbstractValue doesn't work as expected for a"
, "simple symbolic expression"
]
testUnsafeSetAbstractValue2 :: TestTree
testUnsafeSetAbstractValue2 = testCase "test unsafeSetAbstractValue2" $
withSym FloatIEEERepr $ \sym -> do
let w = knownNat @8
e2A <- freshConstant sym (userSymbol' "x2A") (BaseBVRepr w)
e2B <- freshConstant sym (userSymbol' "x2B") (BaseBVRepr w)
e2C <- bvAdd sym e2A e2B
(_, e2C') <- annotateTerm sym $ unsafeSetAbstractValue (WUB.BVDArith (WUBA.range w 2 2)) e2C
unsignedBVBounds e2C' @?= Just (2, 2)
e2D <- bvAdd sym e2C' =<< bvLit sym w (BV.one w)
case asBV e2D of
Just bv -> bv @?= BV.mkBV w 3
Nothing -> assertFailure $ unlines
[ "unsafeSetAbstractValue doesn't work as expected for a"
, "compound symbolic expression"
]
testResolveSymBV :: WURB.SearchStrategy -> TestTree
testResolveSymBV searchStrat =
testProperty ("test resolveSymBV (" ++ show (PP.pretty searchStrat) ++ ")") $
H.property $ do
let w = knownNat @8
lb <- H.forAll $ HGen.word8 $ HRange.constant 0 maxBound
ub <- H.forAll $ HGen.word8 $ HRange.constant lb maxBound
rbv <- liftIO $ withYices $ \sym proc -> do
bv <- freshConstant sym (safeSymbol "bv") knownRepr
p1 <- bvUge sym bv =<< bvLit sym w (BV.mkBV w (toInteger lb))
p2 <- bvUle sym bv =<< bvLit sym w (BV.mkBV w (toInteger ub))
p3 <- andPred sym p1 p2
assume (solverConn proc) p3
WURB.resolveSymBV sym searchStrat w proc bv
case rbv of
WURB.BVConcrete bv -> do
let bv' = fromInteger $ BV.asUnsigned bv
lb H.=== bv'
ub H.=== bv'
WURB.BVSymbolic bounds -> do
let (lb', ub') = WUBA.ubounds bounds
lb H.=== fromInteger lb'
ub H.=== fromInteger ub'
main :: IO ()
main = do
testLevel <- TestLevel . fromMaybe "0" <$> lookupEnv "CI_TEST_LEVEL"
let solverNames = SolverName <$> [ "cvc4", "cvc5", "yices", "z3" ]
solvers <- reportSolverVersions testLevel id
=<< (zip solverNames <$> mapM getSolverVersion solverNames)
let z3Tests =
let skipPre4_8_11 why =
let shouldSkip = case lookup (SolverName "z3") solvers of
Just (SolverVersion v) -> any (`elem` [ "4.8.8", "4.8.9", "4.8.10" ]) $ words v
Nothing -> True
in if shouldSkip then expectFailBecause why else id
incompatZ3Strings = "unicode and string escaping not supported for older Z3 versions; upgrade to at least 4.8.11"
in
[
testUninterpretedFunctionScope
, testRotate1
, testRotate2
, testRotate3
, testBoundVarAsFree
, testSolverInfo
, testSolverVersion
, testFloatUnsat0
, testFloatUnsat1
, testFloatUnsat2
, testFloatSat0
, testFloatSat1
, testFloatToBinary
, testFloatFromBinary
, testBVIteNesting
, testSymbolPrimeCharZ3
, testCase "Z3 0-tuple" $ withOnlineZ3 zeroTupleTest
, testCase "Z3 1-tuple" $ withOnlineZ3 oneTupleTest
, testCase "Z3 pair" $ withOnlineZ3 pairTest
, testCase "Z3 forall binder" $ withOnlineZ3 forallTest
, skipPre4_8_11 incompatZ3Strings $ testCase "Z3 string1" $ withOnlineZ3 stringTest1
, testCase "Z3 string2" $ withOnlineZ3 stringTest2
, skipPre4_8_11 incompatZ3Strings $ testCase "Z3 string3" $ withOnlineZ3 stringTest3
, skipPre4_8_11 incompatZ3Strings $ testCase "Z3 string4" $ withOnlineZ3 stringTest4
, skipPre4_8_11 incompatZ3Strings $ testCase "Z3 string5" $ withOnlineZ3 stringTest5
, skipPre4_8_11 incompatZ3Strings $ testCase "Z3 string6" $ withOnlineZ3 stringTest6
this test apparently passes on older Z3 despite the escaping changes ...
, testCase "Z3 string7" $ withOnlineZ3 stringTest7
, testCase "Z3 binder tuple1" $ withOnlineZ3 binderTupleTest1
, testCase "Z3 binder tuple2" $ withOnlineZ3 binderTupleTest2
, testCase "Z3 rounding" $ withOnlineZ3 roundingTest
, testCase "Z3 multidim array"$ withOnlineZ3 multidimArrayTest
, testCase "Z3 #182 test case" $ withOnlineZ3 issue182Test
, arrayCopyTest
, arraySetTest
, arrayCopySetTest
]
let cvc4Tests =
let skipPre1_8 why =
let shouldSkip = case lookup (SolverName "cvc4") solvers of
Just (SolverVersion v) -> any (`elem` [ "1.7" ]) $ words v
Nothing -> True
in if shouldSkip then expectFailBecause why else id
unsuppStrings = "unicode and string escaping not supported for older CVC4 versions; upgrade to at least 1.8"
in
[
ignoreTestBecause "This test stalls the solver for some reason; line-buffering issue?" $
testCase "CVC4 0-tuple" $ withCVC4 zeroTupleTest
, testCase "CVC4 1-tuple" $ withCVC4 oneTupleTest
, testCase "CVC4 pair" $ withCVC4 pairTest
, testCase "CVC4 forall binder" $ withCVC4 forallTest
, testCase "CVC4 string1" $ withCVC4 stringTest1
, testCase "CVC4 string2" $ withCVC4 stringTest2
, skipPre1_8 unsuppStrings $ testCase "CVC4 string3" $ withCVC4 stringTest3
, testCase "CVC4 string4" $ withCVC4 stringTest4
, testCase "CVC4 string5" $ withCVC4 stringTest5
, skipPre1_8 unsuppStrings $ testCase "CVC4 string6" $ withCVC4 stringTest6
, testCase "CVC4 string7" $ withCVC4 stringTest7
, testCase "CVC4 binder tuple1" $ withCVC4 binderTupleTest1
, testCase "CVC4 binder tuple2" $ withCVC4 binderTupleTest2
, testCase "CVC4 rounding" $ withCVC4 roundingTest
, testCase "CVC4 multidim array"$ withCVC4 multidimArrayTest
, testCase "CVC4 #182 test case" $ withCVC4 issue182Test
]
let yicesTests =
[
testResolveSymBV WURB.ExponentialSearch
, testResolveSymBV WURB.BinarySearch
, testCase "Yices 0-tuple" $ withYices zeroTupleTest
, testCase "Yices 1-tuple" $ withYices oneTupleTest
, testCase "Yices pair" $ withYices pairTest
, testCase "Yices rounding" $ withYices roundingTest
, testCase "Yices #182 test case" $ withYices issue182Test
]
let cvc5Tests = cvc4Tests
let skipIfNotPresent nm = if SolverName nm `elem` (fst <$> solvers) then id
else fmap (ignoreTestBecause (nm <> " not present"))
defaultMain $ testGroup "Tests" $
[ testInterpretedFloatReal
, testFloatUninterpreted
, testInterpretedFloatIEEE
, testFloatBinarySimplification
, testRealFloatBinarySimplification
, testFloatCastSimplification
, testFloatCastNoSimplification
, testBVSelectShl
, testBVSelectLshr
, testBVOrShlZext
, testBVDomainArithScale
, testBVSwap
, testBVBitreverse
, testUnsafeSetAbstractValue1
, testUnsafeSetAbstractValue2
]
<> (skipIfNotPresent "cvc4" cvc4Tests)
<> (skipIfNotPresent "cvc5" cvc5Tests)
<> (skipIfNotPresent "yices" yicesTests)
<> (skipIfNotPresent "z3" z3Tests)
|
d0309562488c7c21458351241036cac9ee73fee9d4f55f6b9323e6f6203cd39f | williamleferrand/accretio | ys_graph.ml |
* Accretio is an API , a sandbox and a runtime for social playbooks
*
* Copyright ( C ) 2015
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation , either version 3 of the
* License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Affero General Public License for more details .
*
* You should have received a copy of the GNU Affero General Public License
* along with this program . If not , see < / > .
* Accretio is an API, a sandbox and a runtime for social playbooks
*
* Copyright (C) 2015 William Le Ferrand
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see </>.
*)
module type VERTEX =
sig
type vertex
val load : int64 -> vertex Lwt.t
end
module type EDGE =
sig
type edge
end
| null | https://raw.githubusercontent.com/williamleferrand/accretio/394f855e9c2a6a18f0c2da35058d5a01aacf6586/library/server/ys_graph.ml | ocaml |
* Accretio is an API , a sandbox and a runtime for social playbooks
*
* Copyright ( C ) 2015
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation , either version 3 of the
* License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Affero General Public License for more details .
*
* You should have received a copy of the GNU Affero General Public License
* along with this program . If not , see < / > .
* Accretio is an API, a sandbox and a runtime for social playbooks
*
* Copyright (C) 2015 William Le Ferrand
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see </>.
*)
module type VERTEX =
sig
type vertex
val load : int64 -> vertex Lwt.t
end
module type EDGE =
sig
type edge
end
| |
1f4691c3eea750a5610db821c6c659e5efb0aad91dacbc6daa4e6e51f986146e | facebookincubator/hsthrift | Strict.hs | Copyright ( c ) Facebook , Inc. and its affiliates .
module Util.HashMap.Strict
( mapKeys
) where
import Control.Arrow (first)
import Data.Hashable
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HashMap
| Transform a HashMap by applying a function to every key .
mapKeys :: (Eq b, Hashable b) => (a -> b) -> HashMap a v -> HashMap b v
mapKeys mapper =
HashMap.fromList . map (first mapper) . HashMap.toList
| null | https://raw.githubusercontent.com/facebookincubator/hsthrift/d3ff75d487e9d0c2904d18327373b603456e7a01/common/util/Util/HashMap/Strict.hs | haskell | Copyright ( c ) Facebook , Inc. and its affiliates .
module Util.HashMap.Strict
( mapKeys
) where
import Control.Arrow (first)
import Data.Hashable
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HashMap
| Transform a HashMap by applying a function to every key .
mapKeys :: (Eq b, Hashable b) => (a -> b) -> HashMap a v -> HashMap b v
mapKeys mapper =
HashMap.fromList . map (first mapper) . HashMap.toList
| |
9d9c4304b465b93178c3fd14740f71c52728656ad36c69bfe833e078cb1189b6 | ku-fpg/hermit | Dictionary.hs | # LANGUAGE ScopedTypeVariables #
module HERMIT.Shell.Dictionary
( mkDictionary
, addToDictionary
, pp_dictionary
) where
import Data.Dynamic
import Data.List
import Data.Map (Map, fromList, toList, fromListWith)
import HERMIT.External
import HERMIT.PrettyPrinter.Common
import qualified HERMIT.PrettyPrinter.AST as AST
import qualified HERMIT.PrettyPrinter.Clean as Clean
import qualified HERMIT.PrettyPrinter.GHC as GHCPP
--------------------------------------------------------------------------
| A ' Dictionary ' is a collection of ' Dynamic 's .
Looking up a ' Dynamic ' ( via an ' ExternalName ' key ) returns a list , as there
-- can be multiple 'Dynamic's with the same name.
type Dictionary = Map ExternalName [Dynamic]
-- | Build a 'Data.Map' from names to 'Dynamic' values.
toDictionary :: [External] -> Dictionary
toDictionary = fromListWith (++) . map toEntry
toEntry :: External -> (ExternalName, [Dynamic])
toEntry e = (externName e, [externDyn e])
addToDictionary :: External -> Dictionary -> Dictionary
addToDictionary ex d = fromListWith (++) $ toEntry ex : toList d
| Create a dictionary from a list of ' External 's .
mkDictionary :: [External] -> Dictionary
mkDictionary externs = toDictionary externs'
where
msg = layoutTxt 60 (map (show . fst) dictionaryOfTags)
externs' = externs ++
[ external "help" (help_command externs' "help")
[ "(this message)" ] .+ Query .+ Shell
, external "help" (help_command externs')
([ "help <command>|<category>|categories|all|<search-string>"
, "Displays help about a command, or all commands in a category."
, "Multiple items may match."
, ""
, "Categories: " ++ head msg
] ++ map (" " ++) (tail msg)) .+ Query .+ Shell
]
--------------------------------------------------------------------------
-- | The pretty-printing dictionaries.
pp_dictionary :: Map String PrettyPrinter
pp_dictionary = fromList
[ ("clean", Clean.pretty)
, ("ast", AST.pretty)
, ("ghc", GHCPP.pretty)
]
--------------------------------------------------------------------------
make_help :: [External] -> [String]
make_help = concatMap snd . toList . toHelp
help_command :: [External] -> String -> String
help_command exts m
| [(ct :: CmdTag,"")] <- reads m
= unlines $ make_help $ filter (tagMatch ct) exts
help_command exts "all"
= unlines $ make_help exts
help_command _ "categories" = unlines $
[ "Categories" ] ++
[ "----------" ] ++
[ txt ++ " " ++ replicate (16 - length txt) '.' ++ " " ++ desc
| (cmd,desc) <- dictionaryOfTags
, let txt = show cmd
]
help_command exts m = unlines $ make_help $ pathPrefix m
where pathPrefix p = filter (isInfixOf p . externName) exts
layoutTxt :: Int -> [String] -> [String]
layoutTxt n (w1:w2:ws) | length w1 + length w2 >= n = w1 : layoutTxt n (w2:ws)
| otherwise = layoutTxt n ((w1 ++ " " ++ w2) : ws)
layoutTxt _ other = other
--------------------------------------------------------------------------
| null | https://raw.githubusercontent.com/ku-fpg/hermit/3e7be430fae74a9e3860b8b574f36efbf9648dec/src/HERMIT/Shell/Dictionary.hs | haskell | ------------------------------------------------------------------------
can be multiple 'Dynamic's with the same name.
| Build a 'Data.Map' from names to 'Dynamic' values.
------------------------------------------------------------------------
| The pretty-printing dictionaries.
------------------------------------------------------------------------
------------------------------------------------------------------------ | # LANGUAGE ScopedTypeVariables #
module HERMIT.Shell.Dictionary
( mkDictionary
, addToDictionary
, pp_dictionary
) where
import Data.Dynamic
import Data.List
import Data.Map (Map, fromList, toList, fromListWith)
import HERMIT.External
import HERMIT.PrettyPrinter.Common
import qualified HERMIT.PrettyPrinter.AST as AST
import qualified HERMIT.PrettyPrinter.Clean as Clean
import qualified HERMIT.PrettyPrinter.GHC as GHCPP
| A ' Dictionary ' is a collection of ' Dynamic 's .
Looking up a ' Dynamic ' ( via an ' ExternalName ' key ) returns a list , as there
type Dictionary = Map ExternalName [Dynamic]
toDictionary :: [External] -> Dictionary
toDictionary = fromListWith (++) . map toEntry
toEntry :: External -> (ExternalName, [Dynamic])
toEntry e = (externName e, [externDyn e])
addToDictionary :: External -> Dictionary -> Dictionary
addToDictionary ex d = fromListWith (++) $ toEntry ex : toList d
| Create a dictionary from a list of ' External 's .
mkDictionary :: [External] -> Dictionary
mkDictionary externs = toDictionary externs'
where
msg = layoutTxt 60 (map (show . fst) dictionaryOfTags)
externs' = externs ++
[ external "help" (help_command externs' "help")
[ "(this message)" ] .+ Query .+ Shell
, external "help" (help_command externs')
([ "help <command>|<category>|categories|all|<search-string>"
, "Displays help about a command, or all commands in a category."
, "Multiple items may match."
, ""
, "Categories: " ++ head msg
] ++ map (" " ++) (tail msg)) .+ Query .+ Shell
]
pp_dictionary :: Map String PrettyPrinter
pp_dictionary = fromList
[ ("clean", Clean.pretty)
, ("ast", AST.pretty)
, ("ghc", GHCPP.pretty)
]
make_help :: [External] -> [String]
make_help = concatMap snd . toList . toHelp
help_command :: [External] -> String -> String
help_command exts m
| [(ct :: CmdTag,"")] <- reads m
= unlines $ make_help $ filter (tagMatch ct) exts
help_command exts "all"
= unlines $ make_help exts
help_command _ "categories" = unlines $
[ "Categories" ] ++
[ "----------" ] ++
[ txt ++ " " ++ replicate (16 - length txt) '.' ++ " " ++ desc
| (cmd,desc) <- dictionaryOfTags
, let txt = show cmd
]
help_command exts m = unlines $ make_help $ pathPrefix m
where pathPrefix p = filter (isInfixOf p . externName) exts
layoutTxt :: Int -> [String] -> [String]
layoutTxt n (w1:w2:ws) | length w1 + length w2 >= n = w1 : layoutTxt n (w2:ws)
| otherwise = layoutTxt n ((w1 ++ " " ++ w2) : ws)
layoutTxt _ other = other
|
e568b36f8200bf15eeb6f426b8df1f4dc656d936554f64e77d73800cf00102d6 | vonzhou/LearnYouHaskellForGreatGood | example.hs | import Data.List
isIn :: (Eq a) => [a] -> [a] -> Bool
needle `isIn` haystack = any (needle `isPrefixOf`) (tails haystack)
| null | https://raw.githubusercontent.com/vonzhou/LearnYouHaskellForGreatGood/439d848deac53ef6da6df433078b7f1dcf54d18d/chapter6/example.hs | haskell | import Data.List
isIn :: (Eq a) => [a] -> [a] -> Bool
needle `isIn` haystack = any (needle `isPrefixOf`) (tails haystack)
| |
7ca6d3e09f37773b643e20eba2f462e45579cc3322681631e35cb784eecf7fdd | typelead/eta | tc069.hs | module ShouldSucceed where
x = 'a'
(y:ys) = ['a','b','c'] where p = x
| null | https://raw.githubusercontent.com/typelead/eta/97ee2251bbc52294efbf60fa4342ce6f52c0d25c/tests/suite/typecheck/compile/tc069.hs | haskell | module ShouldSucceed where
x = 'a'
(y:ys) = ['a','b','c'] where p = x
| |
76108a97370b4171994f7ec6120709cf26930188d4917733758b1e577e85e190 | ksrky/Plato | SrcToPs.hs | module Plato.Transl.SrcToPs where
import Plato.Types.Error
import Plato.Types.Fixity
import Plato.Types.Location
import Plato.Types.Monad
import Plato.Types.Name
import Plato.Types.Name.Global
import Plato.Types.Name.Reader
import Plato.Parsing.FixResol
import Plato.Parsing.Monad
import Plato.Parsing.Parser
import Plato.Parsing.Rename
import Plato.Syntax.Parsing
import Control.Exception.Safe
import Control.Monad.RWS
import Control.Monad.Reader
import qualified Data.Map.Strict as M
import qualified Data.Text as T
src2ps :: MonadThrow m => T.Text -> Plato m (FixityEnv Name, Program RdrName)
src2ps inp = do
file <- asks plt_fileName
(res, st) <- eitherToMonadThrow (parse file inp parser)
return (ust_fixityEnv (parser_ust st), res)
psCanon :: MonadThrow m => [ModuleName] -> FixityEnv Name -> Program RdrName -> Plato m (Program GlbName)
psCanon imp_modns fixenv prg = do
glbenv <- filterGlbNameEnv imp_modns <$> gets plt_glbNameEnv
(prg', glbenv') <- renameTopDecls prg glbenv
fixenv' <- M.union (renameFixityEnv glbenv' fixenv) <$> gets plt_fixityEnv
modify $ \s -> s{plt_fixityEnv = fixenv', plt_glbNameEnv = plt_glbNameEnv s `M.union` glbenv'}
resolveFixity fixenv' prg'
exp2ps :: MonadThrow m => T.Text -> Plato m (Program GlbName)
exp2ps inp = do
(expr, _) <- eitherToMonadThrow (parseLine inp exprParser)
glbenv <- gets plt_glbNameEnv
expr' <- runReaderT (rename `traverse` expr) (glbenv, 0)
fixenv <- gets plt_fixityEnv
topd <- Eval <$> runReaderT (resolve expr') fixenv
return $ Program{ps_moduleDecl = Nothing, ps_importDecls = [], ps_topDecls = [noLoc topd]}
getModuleName :: Program GlbName -> ModuleName
getModuleName prg = case ps_moduleDecl prg of
Just (L _ modn) -> modn
Nothing -> unreachable "getModuleName"
importModules :: Program RdrName -> [Located ModuleName]
importModules = ps_importDecls | null | https://raw.githubusercontent.com/ksrky/Plato/02b1a043efa92cf2093ff7d721a607d8abe9d876/src/Plato/Transl/SrcToPs.hs | haskell | module Plato.Transl.SrcToPs where
import Plato.Types.Error
import Plato.Types.Fixity
import Plato.Types.Location
import Plato.Types.Monad
import Plato.Types.Name
import Plato.Types.Name.Global
import Plato.Types.Name.Reader
import Plato.Parsing.FixResol
import Plato.Parsing.Monad
import Plato.Parsing.Parser
import Plato.Parsing.Rename
import Plato.Syntax.Parsing
import Control.Exception.Safe
import Control.Monad.RWS
import Control.Monad.Reader
import qualified Data.Map.Strict as M
import qualified Data.Text as T
src2ps :: MonadThrow m => T.Text -> Plato m (FixityEnv Name, Program RdrName)
src2ps inp = do
file <- asks plt_fileName
(res, st) <- eitherToMonadThrow (parse file inp parser)
return (ust_fixityEnv (parser_ust st), res)
psCanon :: MonadThrow m => [ModuleName] -> FixityEnv Name -> Program RdrName -> Plato m (Program GlbName)
psCanon imp_modns fixenv prg = do
glbenv <- filterGlbNameEnv imp_modns <$> gets plt_glbNameEnv
(prg', glbenv') <- renameTopDecls prg glbenv
fixenv' <- M.union (renameFixityEnv glbenv' fixenv) <$> gets plt_fixityEnv
modify $ \s -> s{plt_fixityEnv = fixenv', plt_glbNameEnv = plt_glbNameEnv s `M.union` glbenv'}
resolveFixity fixenv' prg'
exp2ps :: MonadThrow m => T.Text -> Plato m (Program GlbName)
exp2ps inp = do
(expr, _) <- eitherToMonadThrow (parseLine inp exprParser)
glbenv <- gets plt_glbNameEnv
expr' <- runReaderT (rename `traverse` expr) (glbenv, 0)
fixenv <- gets plt_fixityEnv
topd <- Eval <$> runReaderT (resolve expr') fixenv
return $ Program{ps_moduleDecl = Nothing, ps_importDecls = [], ps_topDecls = [noLoc topd]}
getModuleName :: Program GlbName -> ModuleName
getModuleName prg = case ps_moduleDecl prg of
Just (L _ modn) -> modn
Nothing -> unreachable "getModuleName"
importModules :: Program RdrName -> [Located ModuleName]
importModules = ps_importDecls | |
a53f356473dbdb7d7aefd9c540760450e88e734db7ab3035ddefbe7e5b39789a | emina/rosette | itunes100_4.rkt | #lang rosette
(require (only-in racket/runtime-path define-runtime-path))
(require "../dom.rkt")
(require "../websynth.rkt")
(require "../websynthlib.rkt")
(define-runtime-path html (build-path ".." "html/itunes_top100_v2.html"))
(define dom (read-DOMNode html))
(define-tags (tags dom))
(define max_zpath_depth (depth dom))
; Record 0 fields
(define-symbolic r0f0zpath tag? #:length max_zpath_depth)
(define-symbolic r0f1zpath tag? #:length max_zpath_depth)
(define-symbolic r0fieldmask boolean? #:length max_zpath_depth)
; Record 1 fields
(define-symbolic r1f0zpath tag? #:length max_zpath_depth)
(define-symbolic r1f1zpath tag? #:length max_zpath_depth)
(define-symbolic r1fieldmask boolean? #:length max_zpath_depth)
; Record 2 fields
(define-symbolic r2f0zpath tag? #:length max_zpath_depth)
(define-symbolic r2f1zpath tag? #:length max_zpath_depth)
(define-symbolic r2fieldmask boolean? #:length max_zpath_depth)
; Record 3 fields
(define-symbolic r3f0zpath tag? #:length max_zpath_depth)
(define-symbolic r3f1zpath tag? #:length max_zpath_depth)
(define-symbolic r3fieldmask boolean? #:length max_zpath_depth)
; Cross-record Mask
(define-symbolic recordmask boolean? #:length max_zpath_depth)
(current-bitwidth #f)
(define (demonstration)
; Record 0 zpath asserts
(assert (path? r0f0zpath dom "Sail"))
(assert (path? r0f1zpath dom "AWOLNATION"))
; Record 1 zpath asserts
(assert (path? r1f0zpath dom "I Won't Give Up"))
(assert (path? r1f1zpath dom "Jason Mraz"))
; Record 2 zpath asserts
(assert (path? r2f0zpath dom "Diamonds"))
(assert (path? r2f1zpath dom "Rihanna"))
; Record 3 zpath asserts
(assert (path? r3f0zpath dom "What Christmas Means to Me"))
(assert (path? r3f1zpath dom "Cee Lo Green"))
; Record 0 Field Mask Generation
(generate-mask r0f0zpath r0f1zpath r0fieldmask max_zpath_depth)
; Record 1 Field Mask Generation
(generate-mask r1f0zpath r1f1zpath r1fieldmask max_zpath_depth)
; Record 2 Field Mask Generation
(generate-mask r2f0zpath r2f1zpath r2fieldmask max_zpath_depth)
; Record 3 Field Mask Generation
(generate-mask r3f0zpath r3f1zpath r3fieldmask max_zpath_depth)
; Record Mask
(generate-mask r0f0zpath r1f0zpath recordmask max_zpath_depth))
; Solve
(define (scrape)
(define sol (solve (demonstration)))
; Record 0 zpaths
; Record 1 zpaths
; Record 2 zpaths
; Record 3 zpaths
Construct final zpaths
(define r0f0zpath_list (map label (evaluate r0f0zpath sol)))
(define generalized_r0f0zpath_list
(apply-mask r0f0zpath_list (evaluate recordmask sol)))
(define field0_zpath (synthsis_solution->zpath generalized_r0f0zpath_list))
(define r0f1zpath_list (map label (evaluate r0f1zpath sol)))
(define generalized_r0f1zpath_list
(apply-mask r0f1zpath_list (evaluate recordmask sol)))
(define field1_zpath (synthsis_solution->zpath generalized_r0f1zpath_list))
(zip
(DOM-Flatten (DOM-XPath dom field0_zpath))
(DOM-Flatten (DOM-XPath dom field1_zpath))
))
(scrape)
| null | https://raw.githubusercontent.com/emina/rosette/a64e2bccfe5876c5daaf4a17c5a28a49e2fbd501/sdsl/websynth/benchmarks/itunes100_4.rkt | racket | Record 0 fields
Record 1 fields
Record 2 fields
Record 3 fields
Cross-record Mask
Record 0 zpath asserts
Record 1 zpath asserts
Record 2 zpath asserts
Record 3 zpath asserts
Record 0 Field Mask Generation
Record 1 Field Mask Generation
Record 2 Field Mask Generation
Record 3 Field Mask Generation
Record Mask
Solve
Record 0 zpaths
Record 1 zpaths
Record 2 zpaths
Record 3 zpaths | #lang rosette
(require (only-in racket/runtime-path define-runtime-path))
(require "../dom.rkt")
(require "../websynth.rkt")
(require "../websynthlib.rkt")
(define-runtime-path html (build-path ".." "html/itunes_top100_v2.html"))
(define dom (read-DOMNode html))
(define-tags (tags dom))
(define max_zpath_depth (depth dom))
(define-symbolic r0f0zpath tag? #:length max_zpath_depth)
(define-symbolic r0f1zpath tag? #:length max_zpath_depth)
(define-symbolic r0fieldmask boolean? #:length max_zpath_depth)
(define-symbolic r1f0zpath tag? #:length max_zpath_depth)
(define-symbolic r1f1zpath tag? #:length max_zpath_depth)
(define-symbolic r1fieldmask boolean? #:length max_zpath_depth)
(define-symbolic r2f0zpath tag? #:length max_zpath_depth)
(define-symbolic r2f1zpath tag? #:length max_zpath_depth)
(define-symbolic r2fieldmask boolean? #:length max_zpath_depth)
(define-symbolic r3f0zpath tag? #:length max_zpath_depth)
(define-symbolic r3f1zpath tag? #:length max_zpath_depth)
(define-symbolic r3fieldmask boolean? #:length max_zpath_depth)
(define-symbolic recordmask boolean? #:length max_zpath_depth)
(current-bitwidth #f)
(define (demonstration)
(assert (path? r0f0zpath dom "Sail"))
(assert (path? r0f1zpath dom "AWOLNATION"))
(assert (path? r1f0zpath dom "I Won't Give Up"))
(assert (path? r1f1zpath dom "Jason Mraz"))
(assert (path? r2f0zpath dom "Diamonds"))
(assert (path? r2f1zpath dom "Rihanna"))
(assert (path? r3f0zpath dom "What Christmas Means to Me"))
(assert (path? r3f1zpath dom "Cee Lo Green"))
(generate-mask r0f0zpath r0f1zpath r0fieldmask max_zpath_depth)
(generate-mask r1f0zpath r1f1zpath r1fieldmask max_zpath_depth)
(generate-mask r2f0zpath r2f1zpath r2fieldmask max_zpath_depth)
(generate-mask r3f0zpath r3f1zpath r3fieldmask max_zpath_depth)
(generate-mask r0f0zpath r1f0zpath recordmask max_zpath_depth))
(define (scrape)
(define sol (solve (demonstration)))
Construct final zpaths
(define r0f0zpath_list (map label (evaluate r0f0zpath sol)))
(define generalized_r0f0zpath_list
(apply-mask r0f0zpath_list (evaluate recordmask sol)))
(define field0_zpath (synthsis_solution->zpath generalized_r0f0zpath_list))
(define r0f1zpath_list (map label (evaluate r0f1zpath sol)))
(define generalized_r0f1zpath_list
(apply-mask r0f1zpath_list (evaluate recordmask sol)))
(define field1_zpath (synthsis_solution->zpath generalized_r0f1zpath_list))
(zip
(DOM-Flatten (DOM-XPath dom field0_zpath))
(DOM-Flatten (DOM-XPath dom field1_zpath))
))
(scrape)
|
da7f853cdc013f0484d6a55dde272daf5d10d641c5de6ebb46781973f883c3db | sharplispers/linedit | complete.lisp | Copyright ( c ) 2003 , 2004 Nikodemus Siivola ,
;;;;
;;;; Permission is hereby granted, free of charge, to any person obtaining
;;;; a copy of this software and associated documentation files (the
" Software " ) , to deal in the Software without restriction , including
;;;; without limitation the rights to use, copy, modify, merge, publish,
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
;;;; the following conditions:
;;;;
;;;; The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software .
;;;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
;;;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT .
;;;; IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT ,
;;;; TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
;;;; SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
(in-package :linedit)
(defun pathname-directory-pathname (pathname)
(make-pathname :name nil :type nil
:defaults pathname))
(defun underlying-directory-p (pathname)
(case (file-kind pathname)
(:directory t)
(:symbolic-link
(file-kind (merge-pathnames (read-link pathname) pathname)))))
(defun logical-pathname-p (pathname)
(typep (pathname pathname) 'logical-pathname))
(defun logical-pathname-complete (string)
(values (list string) (length string)))
#+nil
(defun logical-pathname-complete (string)
(let* ((host (pathname-host string))
(rest (subseq string (1+ (mismatch host string))))
(rules (remove-if-not (lambda (rule)
(mismatch rest (first rule)))))
(physicals (mapcar (lambda (rule)
(namestring
(translate-pathname string
(first rule)
(second rule))))
rules))
(matches (apply #'append (mapcar #'directory-complete physicals)))
(logicals (mapcar (lambda (physical)
(let ((rule (find-if (lambda (rule)
(misma
(flet ((maybe-translate-logical-pathname (string)
(handler-case
(translate-logical-pathname string)
(error ()
(return-from logical-pathname-complete (values nil 0))))))
(directory-complete
(namestring
(maybe-translate-logical-pathname string)))))
FIXME : refactor chared code with directory complete
(loop with all
with common
with max
for cand in matches
do (let ((diff (mismatch string cand)))
(when (and diff (> diff (length string)))
(setf common (if common
(subseq common 0 (mismatch common cand))
cand)
max (max max (length cand))
all (cons cand all))))
finally (if (or (null common)
(<= (length common) (length string)))
(return (values all max))
(return (values (list common) (length common))))))))))))))))
;;; We can't easily do zsh-style tab-completion of ~us into ~user, but
;;; at least we can expand ~ and ~user. The other bug here at the
moment is that ~nonexistant will complete to the same as ~.
(defun tilde-expand-string (string)
"Returns the supplied string, with a prefix of ~ or ~user expanded
to the appropriate home directory."
(if (and (> (length string) 0)
(eql (schar string 0) #\~))
(flet ((chop (s)
(subseq s 0 (1- (length s)))))
(let* ((slash-index (loop for i below (length string)
when (eql (schar string i) #\/)
return i))
(suffix (and slash-index (subseq string slash-index)))
(uname (subseq string 1 slash-index))
(homedir (or (cdr (assoc :home (user-info uname)))
(chop (namestring
(or (probe-file (user-homedir-pathname))
(return-from tilde-expand-string
string)))))))
(concatenate 'string homedir (or suffix ""))))
string))
(defun directory-complete (string)
(declare (simple-string string))
(let* ((common nil)
(all nil)
(max 0)
(string (tilde-expand-string string))
(dir (pathname-directory-pathname string))
(namefun (if (relative-pathname-p string)
#'namestring
(lambda (x) (namestring (merge-pathnames x))))))
(unless (and (underlying-directory-p dir)
(not (wild-pathname-p dir)))
(return-from directory-complete (values nil 0)))
(with-directory-iterator (next dir)
(loop for entry = (next)
while entry
do (let* ((full (funcall namefun entry))
(diff (mismatch string full)))
(dbg "~& completed: ~A, diff: ~A~%" full diff)
(unless (and diff (< diff (length string)))
(dbg "~& common ~A mismatch ~A~&" common
(mismatch common full))
(setf common (if common
(subseq common 0 (mismatch common full))
full)
max (max max (length full))
all (cons full all))))))
(dbg "~&common: ~A~%" common)
(if (or (null common)
(<= (length common) (length string)))
(values all max)
(values (list common) (length common)))))
(defun lisp-complete (string editor)
(declare (simple-string string))
(when (plusp (length string))
(if (in-quoted-string-p editor)
(if (logical-pathname-p string)
(logical-pathname-complete string)
(directory-complete string))
(let* ((length (length string))
(first-colon (position #\: string))
(last-colon (position #\: string :from-end t))
(state (and first-colon
(if (< first-colon last-colon)
:internal
:external)))
(package (and first-colon
(find-package (if (plusp first-colon)
(string-upcase
(subseq string 0 first-colon))
:keyword))))
(hash (make-hash-table :test #'equal))
(common nil)
(max-len 0))
(labels ((stringify (symbol)
(if (upper-case-p (schar string 0))
(string symbol)
(string-downcase (string symbol))))
(push-name (name)
(setf common (if common
(subseq name 0 (mismatch common name))
name)
max-len (max max-len (length name))
(gethash name hash) name))
(select-symbol (symbol match)
(let ((name (stringify symbol))
(end (length match)))
(equal match (subseq name 0 end)))
(push-name (concat string (subseq name end)))))))
;; Skip empty strings
(when (plusp length)
(if package
;; Symbols with explicit package prefixes.
(let* ((start (1+ last-colon))
(match (subseq string start)))
(ecase state
(:internal (do-internal-symbols (sym package)
(select-symbol sym match)))
(:external (do-external-symbols (sym package)
(select-symbol sym match)))))
Symbols without explicit package prefix
(dolist (package (list-all-packages))
(if (eq *package* package)
(do-symbols (sym)
(select-symbol sym string))
;; Package names
(dolist (name (cons (package-name package)
(package-nicknames package)))
(select-symbol name string))))))
;; Return list of matches to caller
(if (> (length common) (length string))
(values (list common) (length common))
(let (list)
(maphash (lambda (key val)
(declare (ignore val))
(push key list))
hash)
(values list max-len))))))))
| null | https://raw.githubusercontent.com/sharplispers/linedit/0561c97dfca2f5854fcc66558a567a9875ddcb8f/complete.lisp | lisp |
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
without limitation the rights to use, copy, modify, merge, publish,
the following conditions:
The above copyright notice and this permission notice shall be included
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
We can't easily do zsh-style tab-completion of ~us into ~user, but
at least we can expand ~ and ~user. The other bug here at the
Skip empty strings
Symbols with explicit package prefixes.
Package names
Return list of matches to caller | Copyright ( c ) 2003 , 2004 Nikodemus Siivola ,
" Software " ) , to deal in the Software without restriction , including
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT .
CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT ,
(in-package :linedit)
(defun pathname-directory-pathname (pathname)
(make-pathname :name nil :type nil
:defaults pathname))
(defun underlying-directory-p (pathname)
(case (file-kind pathname)
(:directory t)
(:symbolic-link
(file-kind (merge-pathnames (read-link pathname) pathname)))))
(defun logical-pathname-p (pathname)
(typep (pathname pathname) 'logical-pathname))
(defun logical-pathname-complete (string)
(values (list string) (length string)))
#+nil
(defun logical-pathname-complete (string)
(let* ((host (pathname-host string))
(rest (subseq string (1+ (mismatch host string))))
(rules (remove-if-not (lambda (rule)
(mismatch rest (first rule)))))
(physicals (mapcar (lambda (rule)
(namestring
(translate-pathname string
(first rule)
(second rule))))
rules))
(matches (apply #'append (mapcar #'directory-complete physicals)))
(logicals (mapcar (lambda (physical)
(let ((rule (find-if (lambda (rule)
(misma
(flet ((maybe-translate-logical-pathname (string)
(handler-case
(translate-logical-pathname string)
(error ()
(return-from logical-pathname-complete (values nil 0))))))
(directory-complete
(namestring
(maybe-translate-logical-pathname string)))))
FIXME : refactor chared code with directory complete
(loop with all
with common
with max
for cand in matches
do (let ((diff (mismatch string cand)))
(when (and diff (> diff (length string)))
(setf common (if common
(subseq common 0 (mismatch common cand))
cand)
max (max max (length cand))
all (cons cand all))))
finally (if (or (null common)
(<= (length common) (length string)))
(return (values all max))
(return (values (list common) (length common))))))))))))))))
moment is that ~nonexistant will complete to the same as ~.
(defun tilde-expand-string (string)
"Returns the supplied string, with a prefix of ~ or ~user expanded
to the appropriate home directory."
(if (and (> (length string) 0)
(eql (schar string 0) #\~))
(flet ((chop (s)
(subseq s 0 (1- (length s)))))
(let* ((slash-index (loop for i below (length string)
when (eql (schar string i) #\/)
return i))
(suffix (and slash-index (subseq string slash-index)))
(uname (subseq string 1 slash-index))
(homedir (or (cdr (assoc :home (user-info uname)))
(chop (namestring
(or (probe-file (user-homedir-pathname))
(return-from tilde-expand-string
string)))))))
(concatenate 'string homedir (or suffix ""))))
string))
(defun directory-complete (string)
(declare (simple-string string))
(let* ((common nil)
(all nil)
(max 0)
(string (tilde-expand-string string))
(dir (pathname-directory-pathname string))
(namefun (if (relative-pathname-p string)
#'namestring
(lambda (x) (namestring (merge-pathnames x))))))
(unless (and (underlying-directory-p dir)
(not (wild-pathname-p dir)))
(return-from directory-complete (values nil 0)))
(with-directory-iterator (next dir)
(loop for entry = (next)
while entry
do (let* ((full (funcall namefun entry))
(diff (mismatch string full)))
(dbg "~& completed: ~A, diff: ~A~%" full diff)
(unless (and diff (< diff (length string)))
(dbg "~& common ~A mismatch ~A~&" common
(mismatch common full))
(setf common (if common
(subseq common 0 (mismatch common full))
full)
max (max max (length full))
all (cons full all))))))
(dbg "~&common: ~A~%" common)
(if (or (null common)
(<= (length common) (length string)))
(values all max)
(values (list common) (length common)))))
(defun lisp-complete (string editor)
(declare (simple-string string))
(when (plusp (length string))
(if (in-quoted-string-p editor)
(if (logical-pathname-p string)
(logical-pathname-complete string)
(directory-complete string))
(let* ((length (length string))
(first-colon (position #\: string))
(last-colon (position #\: string :from-end t))
(state (and first-colon
(if (< first-colon last-colon)
:internal
:external)))
(package (and first-colon
(find-package (if (plusp first-colon)
(string-upcase
(subseq string 0 first-colon))
:keyword))))
(hash (make-hash-table :test #'equal))
(common nil)
(max-len 0))
(labels ((stringify (symbol)
(if (upper-case-p (schar string 0))
(string symbol)
(string-downcase (string symbol))))
(push-name (name)
(setf common (if common
(subseq name 0 (mismatch common name))
name)
max-len (max max-len (length name))
(gethash name hash) name))
(select-symbol (symbol match)
(let ((name (stringify symbol))
(end (length match)))
(equal match (subseq name 0 end)))
(push-name (concat string (subseq name end)))))))
(when (plusp length)
(if package
(let* ((start (1+ last-colon))
(match (subseq string start)))
(ecase state
(:internal (do-internal-symbols (sym package)
(select-symbol sym match)))
(:external (do-external-symbols (sym package)
(select-symbol sym match)))))
Symbols without explicit package prefix
(dolist (package (list-all-packages))
(if (eq *package* package)
(do-symbols (sym)
(select-symbol sym string))
(dolist (name (cons (package-name package)
(package-nicknames package)))
(select-symbol name string))))))
(if (> (length common) (length string))
(values (list common) (length common))
(let (list)
(maphash (lambda (key val)
(declare (ignore val))
(push key list))
hash)
(values list max-len))))))))
|
4bd274a5e78a25f5f8f0b079a98ba33431e6e8411cbf23d76497f6729502cbc2 | yrashk/erlang | ex_slider.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
-module(ex_slider).
-behavoiur(wx_object).
-export([start/1, init/1, terminate/2, code_change/3,
handle_info/2, handle_call/3, handle_event/2]).
-include_lib("wx/include/wx.hrl").
-record(state,
{
parent,
config,
slider
}).
start(Config) ->
wx_object:start_link(?MODULE, Config, []).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
init(Config) ->
wx:batch(fun() -> do_init(Config) end).
do_init(Config) ->
Parent = proplists:get_value(parent, Config),
Panel = wxPanel:new(Parent, []),
%% Setup sizers
MainSizer = wxBoxSizer:new(?wxVERTICAL),
Sizer = wxStaticBoxSizer:new(?wxVERTICAL, Panel,
[{label, "wxSlider"}]),
Sizer2 = wxStaticBoxSizer:new(?wxVERTICAL, Panel,
[{label, "Inverse wxSlider"}]),
%% Setup slider
Min = 0,
Max = 100,
StartValue = 25,
Slider = wxSlider:new(Panel, 1, StartValue, Min, Max,
[{style, ?wxSL_HORIZONTAL bor
?wxSL_LABELS}]),
Slider2 = wxSlider:new(Panel, 2, StartValue, Min, Max,
[{style, ?wxSL_HORIZONTAL bor
?wxSL_LABELS bor
?wxSL_INVERSE}]),
%% Add to sizers
wxSizer:add(Sizer, Slider, [{flag, ?wxEXPAND}]),
wxSizer:add(Sizer2, Slider2, [{flag, ?wxEXPAND}]),
wxSizer:add(MainSizer, Sizer, [{flag, ?wxEXPAND}]),
wxSizer:add(MainSizer, Sizer2, [{flag, ?wxEXPAND}]),
wxPanel:setSizer(Panel, MainSizer),
{Panel, #state{parent=Panel, config=Config,
slider = Slider}}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Callbacks handled as normal gen_server callbacks
handle_info(Msg, State) ->
demo:format(State#state.config, "Got Info ~p\n",[Msg]),
{noreply, State}.
handle_call(Msg, _From, State) ->
demo:format(State#state.config,"Got Call ~p\n",[Msg]),
{reply, {error, nyi},State}.
%% Async Events are handled in handle_event as in handle_info
handle_event(#wx{event = #wxCommand{type = command_checkbox_clicked}},
State = #state{}) ->
{noreply, State};
handle_event(Ev = #wx{}, State = #state{}) ->
demo:format(State#state.config,"Got Event ~p\n",[Ev]),
{noreply, State}.
code_change(_, _, State) ->
{stop, ignore, State}.
terminate(_Reason, _State) ->
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Local functions
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
| null | https://raw.githubusercontent.com/yrashk/erlang/e1282325ed75e52a98d58f5bd9fb0fa27896173f/lib/wx/examples/demo/ex_slider.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
Setup sizers
Setup slider
Add to sizers
Callbacks handled as normal gen_server callbacks
Async Events are handled in handle_event as in handle_info
Local functions
| Copyright Ericsson AB 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(ex_slider).
-behavoiur(wx_object).
-export([start/1, init/1, terminate/2, code_change/3,
handle_info/2, handle_call/3, handle_event/2]).
-include_lib("wx/include/wx.hrl").
-record(state,
{
parent,
config,
slider
}).
start(Config) ->
wx_object:start_link(?MODULE, Config, []).
init(Config) ->
wx:batch(fun() -> do_init(Config) end).
do_init(Config) ->
Parent = proplists:get_value(parent, Config),
Panel = wxPanel:new(Parent, []),
MainSizer = wxBoxSizer:new(?wxVERTICAL),
Sizer = wxStaticBoxSizer:new(?wxVERTICAL, Panel,
[{label, "wxSlider"}]),
Sizer2 = wxStaticBoxSizer:new(?wxVERTICAL, Panel,
[{label, "Inverse wxSlider"}]),
Min = 0,
Max = 100,
StartValue = 25,
Slider = wxSlider:new(Panel, 1, StartValue, Min, Max,
[{style, ?wxSL_HORIZONTAL bor
?wxSL_LABELS}]),
Slider2 = wxSlider:new(Panel, 2, StartValue, Min, Max,
[{style, ?wxSL_HORIZONTAL bor
?wxSL_LABELS bor
?wxSL_INVERSE}]),
wxSizer:add(Sizer, Slider, [{flag, ?wxEXPAND}]),
wxSizer:add(Sizer2, Slider2, [{flag, ?wxEXPAND}]),
wxSizer:add(MainSizer, Sizer, [{flag, ?wxEXPAND}]),
wxSizer:add(MainSizer, Sizer2, [{flag, ?wxEXPAND}]),
wxPanel:setSizer(Panel, MainSizer),
{Panel, #state{parent=Panel, config=Config,
slider = Slider}}.
handle_info(Msg, State) ->
demo:format(State#state.config, "Got Info ~p\n",[Msg]),
{noreply, State}.
handle_call(Msg, _From, State) ->
demo:format(State#state.config,"Got Call ~p\n",[Msg]),
{reply, {error, nyi},State}.
handle_event(#wx{event = #wxCommand{type = command_checkbox_clicked}},
State = #state{}) ->
{noreply, State};
handle_event(Ev = #wx{}, State = #state{}) ->
demo:format(State#state.config,"Got Event ~p\n",[Ev]),
{noreply, State}.
code_change(_, _, State) ->
{stop, ignore, State}.
terminate(_Reason, _State) ->
ok.
|
e8a52bce71bf26d32cf36fea4b3acfa502899a58d8ef0b640984b31a1b7484cc | gregtatcam/imaplet-lwt | response.ml |
* Copyright ( c ) 2013 - 2014 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2013-2014 Gregory Tsipenyuk <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
open Imaplet_types
open Lwt
module StatusResponse : sig
type response_type = Ok|Bad|No|Preauth|Bye
val ok : ?tag:string -> ?code:responseCode option -> string -> string
val bad : ?tag:string -> ?code:responseCode option -> string -> string
val no : ?tag:string -> ?code:responseCode option -> string -> string
val preauth : ?code:responseCode option -> string -> string
val bye : ?code:responseCode option -> string -> string
val untagged : string -> string
val any : string -> string
val continue : ?text:string -> unit -> string
end = struct
type response_type = Ok|Bad|No|Preauth|Bye
* use for ? ? *
let to_str x = x
* have to change the brackets , review neew to build
* structures that then are printed , maybe sexp TBD
*
* structures that then are printed, maybe sexp TBD
**)
let response_code code = match code with
| Some rc -> (match rc with
| RespCode_Alert -> "[ALERT"
| RespCode_Badcharset -> "[BADCHARSET"
| RespCode_Capability -> "[CAPABILITY"
| RespCode_Parse -> "[PARSE"
| RespCode_Permanentflags -> "[PERMANENTFLAGS"
| RespCode_Read_only -> "[READ-ONLY"
| RespCode_Read_write -> "[READ-WRITE"
| RespCode_Trycreate -> "[TRYCREATE"
| RespCode_Uidnext -> "[UIDNEXT"
| RespCode_Uidvalidity -> "[UIDVALIDITY"
| RespCode_Unseen -> "[UNSEEN"
| RespCode_Highestmodseq -> "[HIGHESTMODSEQ")
| None -> ""
let get_rtype = function
| Ok -> "OK"
| Bad -> "BAD"
| No -> "NO"
| Preauth -> "PREAUTH"
| Bye -> "BYE"
let get_response ?(tag="*") ?(code=None) ~rtype text =
let l = [tag; get_rtype rtype; response_code code; text] in
let acc = List.fold_left
(fun acc s -> if acc = "" then s else if s = "" then acc else acc ^ Regex.space ^ s) "" l in
match code with
|None-> to_str acc
|Some _ -> to_str (acc ^ "]")
let ok ?(tag="*") ?(code=None) text = get_response ~tag ~code ~rtype:Ok text
let bad ?(tag="*") ?(code=None) text = get_response ~tag ~code ~rtype:Bad text
let no ?(tag="*") ?(code=None) text = get_response ~tag ~code ~rtype:No text
let preauth ?(code=None) text = get_response ~tag:"*" ~code ~rtype:Preauth text
let bye ?(code=None) text = get_response ~tag:"*" ~code ~rtype:Bye text
let untagged text = to_str ("*" ^ Regex.space ^ text)
let any text = to_str text
let continue ?text () =
let pl = "+" in
let str = (match text with
| None -> pl
| Some t -> pl ^ Regex.space ^ t) in
to_str str
end
let buff_size = 4096
let get_pr str =
let str_len = String.length str in
if str_len >= 40 then (
let head = String.sub str 0 40 in
let tail = String.sub str (str_len - 40) 40 in
head ^ "..." ^ tail
) else
str
let write_compressed_block w strm buff_in offset_in len_in buff_out len_out =
let (fi,used_in,used_out) = Zlib.deflate strm buff_in offset_in len_in
buff_out 0 len_out Zlib.Z_SYNC_FLUSH in
Log_.log `Info2 (Printf.sprintf " -- writing compressed data %b %d %d %d %d\n"
fi used_in used_out offset_in len_in);
Lwt_io.write w (String.sub buff_out 0 used_out) >>
Lwt_io.flush w >>
return (fi,used_in,used_out)
let write_compressed w strm resp =
let len_resp = String.length resp in
Log_.log `Info2 (Printf.sprintf "--> writing compressed data:start %d\n" len_resp);
Log_.log `Info2 (Printf.sprintf "--> un-compressed data:start %s$$$$\n%!" (get_pr resp));
let buffout = String.create buff_size in
let rec _compress offset len =
write_compressed_block w strm resp offset len
buffout buff_size >>= fun (fi,used_in,used_out) ->
let offset = offset + used_in in
let len = len - used_in in
if len = 0 then (
Log_.log `Info2 (Printf.sprintf "<-- compression complete %d %d\n" offset len);
return ()
) else
_compress offset len
in
_compress 0 len_resp
let write_resp compress id w ?(tag="*") resp =
let send_wcrlf w str =
Log_.log `Info3 (Printf.sprintf "<-- %s: %s\n" (Int64.to_string id) str);
match compress with
| None -> Lwt_io.write w (str ^ Regex.crlf)
| Some (_,strm,_,_) -> write_compressed w strm (str ^ Regex.crlf)
in
match resp with
| Resp_Ok (code, s) -> send_wcrlf w (StatusResponse.ok ~tag ~code s)
| Resp_No (code, s) -> send_wcrlf w (StatusResponse.no ~tag ~code s)
| Resp_Bad (code, s) -> send_wcrlf w (StatusResponse.bad ~tag ~code s)
| Resp_Bye (code, s) -> send_wcrlf w (StatusResponse.bye ~code s)
| Resp_Preauth (code, s) -> send_wcrlf w (StatusResponse.preauth ~code s)
| Resp_Cont (text) -> send_wcrlf w (StatusResponse.continue ~text ())
| Resp_Untagged (text) -> send_wcrlf w (StatusResponse.untagged text)
| Resp_Any (text) -> send_wcrlf w (StatusResponse.any text)
let write_resp_untagged compress id writer text =
write_resp compress id writer (Resp_Untagged text)
let write_resp_untagged_vector compress id w resp =
let l = List.concat [["* "];resp;[Regex.crlf]] in
match compress with
| Some (_,strm,_,_) ->
let buff = String.concat "" l in
write_compressed w strm buff
| None ->
Lwt_list.iter_s (Lwt_io.write w) l
| null | https://raw.githubusercontent.com/gregtatcam/imaplet-lwt/d7b51253e79cffa97e98ab899ed833cd7cb44bb6/lib/commands/response.ml | ocaml |
* Copyright ( c ) 2013 - 2014 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2013-2014 Gregory Tsipenyuk <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
open Imaplet_types
open Lwt
module StatusResponse : sig
type response_type = Ok|Bad|No|Preauth|Bye
val ok : ?tag:string -> ?code:responseCode option -> string -> string
val bad : ?tag:string -> ?code:responseCode option -> string -> string
val no : ?tag:string -> ?code:responseCode option -> string -> string
val preauth : ?code:responseCode option -> string -> string
val bye : ?code:responseCode option -> string -> string
val untagged : string -> string
val any : string -> string
val continue : ?text:string -> unit -> string
end = struct
type response_type = Ok|Bad|No|Preauth|Bye
* use for ? ? *
let to_str x = x
* have to change the brackets , review neew to build
* structures that then are printed , maybe sexp TBD
*
* structures that then are printed, maybe sexp TBD
**)
let response_code code = match code with
| Some rc -> (match rc with
| RespCode_Alert -> "[ALERT"
| RespCode_Badcharset -> "[BADCHARSET"
| RespCode_Capability -> "[CAPABILITY"
| RespCode_Parse -> "[PARSE"
| RespCode_Permanentflags -> "[PERMANENTFLAGS"
| RespCode_Read_only -> "[READ-ONLY"
| RespCode_Read_write -> "[READ-WRITE"
| RespCode_Trycreate -> "[TRYCREATE"
| RespCode_Uidnext -> "[UIDNEXT"
| RespCode_Uidvalidity -> "[UIDVALIDITY"
| RespCode_Unseen -> "[UNSEEN"
| RespCode_Highestmodseq -> "[HIGHESTMODSEQ")
| None -> ""
let get_rtype = function
| Ok -> "OK"
| Bad -> "BAD"
| No -> "NO"
| Preauth -> "PREAUTH"
| Bye -> "BYE"
let get_response ?(tag="*") ?(code=None) ~rtype text =
let l = [tag; get_rtype rtype; response_code code; text] in
let acc = List.fold_left
(fun acc s -> if acc = "" then s else if s = "" then acc else acc ^ Regex.space ^ s) "" l in
match code with
|None-> to_str acc
|Some _ -> to_str (acc ^ "]")
let ok ?(tag="*") ?(code=None) text = get_response ~tag ~code ~rtype:Ok text
let bad ?(tag="*") ?(code=None) text = get_response ~tag ~code ~rtype:Bad text
let no ?(tag="*") ?(code=None) text = get_response ~tag ~code ~rtype:No text
let preauth ?(code=None) text = get_response ~tag:"*" ~code ~rtype:Preauth text
let bye ?(code=None) text = get_response ~tag:"*" ~code ~rtype:Bye text
let untagged text = to_str ("*" ^ Regex.space ^ text)
let any text = to_str text
let continue ?text () =
let pl = "+" in
let str = (match text with
| None -> pl
| Some t -> pl ^ Regex.space ^ t) in
to_str str
end
let buff_size = 4096
let get_pr str =
let str_len = String.length str in
if str_len >= 40 then (
let head = String.sub str 0 40 in
let tail = String.sub str (str_len - 40) 40 in
head ^ "..." ^ tail
) else
str
let write_compressed_block w strm buff_in offset_in len_in buff_out len_out =
let (fi,used_in,used_out) = Zlib.deflate strm buff_in offset_in len_in
buff_out 0 len_out Zlib.Z_SYNC_FLUSH in
Log_.log `Info2 (Printf.sprintf " -- writing compressed data %b %d %d %d %d\n"
fi used_in used_out offset_in len_in);
Lwt_io.write w (String.sub buff_out 0 used_out) >>
Lwt_io.flush w >>
return (fi,used_in,used_out)
let write_compressed w strm resp =
let len_resp = String.length resp in
Log_.log `Info2 (Printf.sprintf "--> writing compressed data:start %d\n" len_resp);
Log_.log `Info2 (Printf.sprintf "--> un-compressed data:start %s$$$$\n%!" (get_pr resp));
let buffout = String.create buff_size in
let rec _compress offset len =
write_compressed_block w strm resp offset len
buffout buff_size >>= fun (fi,used_in,used_out) ->
let offset = offset + used_in in
let len = len - used_in in
if len = 0 then (
Log_.log `Info2 (Printf.sprintf "<-- compression complete %d %d\n" offset len);
return ()
) else
_compress offset len
in
_compress 0 len_resp
let write_resp compress id w ?(tag="*") resp =
let send_wcrlf w str =
Log_.log `Info3 (Printf.sprintf "<-- %s: %s\n" (Int64.to_string id) str);
match compress with
| None -> Lwt_io.write w (str ^ Regex.crlf)
| Some (_,strm,_,_) -> write_compressed w strm (str ^ Regex.crlf)
in
match resp with
| Resp_Ok (code, s) -> send_wcrlf w (StatusResponse.ok ~tag ~code s)
| Resp_No (code, s) -> send_wcrlf w (StatusResponse.no ~tag ~code s)
| Resp_Bad (code, s) -> send_wcrlf w (StatusResponse.bad ~tag ~code s)
| Resp_Bye (code, s) -> send_wcrlf w (StatusResponse.bye ~code s)
| Resp_Preauth (code, s) -> send_wcrlf w (StatusResponse.preauth ~code s)
| Resp_Cont (text) -> send_wcrlf w (StatusResponse.continue ~text ())
| Resp_Untagged (text) -> send_wcrlf w (StatusResponse.untagged text)
| Resp_Any (text) -> send_wcrlf w (StatusResponse.any text)
let write_resp_untagged compress id writer text =
write_resp compress id writer (Resp_Untagged text)
let write_resp_untagged_vector compress id w resp =
let l = List.concat [["* "];resp;[Regex.crlf]] in
match compress with
| Some (_,strm,_,_) ->
let buff = String.concat "" l in
write_compressed w strm buff
| None ->
Lwt_list.iter_s (Lwt_io.write w) l
| |
8a2a7321ebf67201a53dc129130b7ca58fc798fdd6c9f98a88fd854154c319ae | onyx-platform/onyx | liveness.clj | (ns onyx.peer.liveness
(:require [onyx.protocol.task-state :as t :refer [evict-peer! get-messenger]]
[onyx.messaging.protocols.status-publisher :as status-pub]
[onyx.messaging.protocols.subscriber :as sub]
[onyx.messaging.protocols.publisher :as pub]))
(defn upstream-timed-out-peers [subscriber liveness-timeout-ns]
(let [curr-time (System/nanoTime)]
(->> subscriber
(sub/status-pubs)
(sequence (comp (filter (fn [[peer-id spub]]
(< (+ (status-pub/get-heartbeat spub)
liveness-timeout-ns)
curr-time)))
(map key))))))
(defn downstream-timed-out-peers [publishers timeout-ms]
(let [curr-time (System/nanoTime)]
(sequence (comp (mapcat pub/statuses)
(filter (fn [[peer-id status]]
(< (+ (:heartbeat status) timeout-ms)
curr-time)))
(map key))
publishers)))
| null | https://raw.githubusercontent.com/onyx-platform/onyx/74f9ae58cdbcfcb1163464595f1e6ae6444c9782/src/onyx/peer/liveness.clj | clojure | (ns onyx.peer.liveness
(:require [onyx.protocol.task-state :as t :refer [evict-peer! get-messenger]]
[onyx.messaging.protocols.status-publisher :as status-pub]
[onyx.messaging.protocols.subscriber :as sub]
[onyx.messaging.protocols.publisher :as pub]))
(defn upstream-timed-out-peers [subscriber liveness-timeout-ns]
(let [curr-time (System/nanoTime)]
(->> subscriber
(sub/status-pubs)
(sequence (comp (filter (fn [[peer-id spub]]
(< (+ (status-pub/get-heartbeat spub)
liveness-timeout-ns)
curr-time)))
(map key))))))
(defn downstream-timed-out-peers [publishers timeout-ms]
(let [curr-time (System/nanoTime)]
(sequence (comp (mapcat pub/statuses)
(filter (fn [[peer-id status]]
(< (+ (:heartbeat status) timeout-ms)
curr-time)))
(map key))
publishers)))
| |
e6866dc974877450aef23f76f875d7afbaff7194d92c85f4fc272d3d8bb0c132 | NorfairKing/hastory | Cli.hs | # LANGUAGE FlexibleContexts #
module Hastory.Cli
( hastoryCli,
)
where
import Control.Monad.IO.Unlift (MonadUnliftIO)
import Control.Monad.Reader
import Hastory.Cli.Commands.ChangeDir (change)
import Hastory.Cli.Commands.Gather (gather)
import Hastory.Cli.Commands.GenChangeWrapper (genChangeWrapperScript)
import Hastory.Cli.Commands.GenGatherWrapper (genGatherWrapperScript)
import Hastory.Cli.Commands.ListDir (listRecentDirs)
import Hastory.Cli.Commands.Register (register)
import Hastory.Cli.Commands.SuggestAlias (suggest)
import Hastory.Cli.Commands.Sync (sync)
import Hastory.Cli.OptParse
hastoryCli :: IO ()
hastoryCli = do
Instructions d sets <- getInstructions
runReaderT (dispatch d) sets
dispatch :: (MonadReader Settings m, MonadUnliftIO m) => Dispatch -> m ()
dispatch (DispatchGather _) = void gather
dispatch (DispatchGenGatherWrapperScript _) = liftIO genGatherWrapperScript
dispatch (DispatchChangeDir changeDirSettings) = change changeDirSettings
dispatch (DispatchListRecentDirs lrds) = listRecentDirs lrds
dispatch (DispatchGenChangeWrapperScript _) = liftIO genChangeWrapperScript
dispatch (DispatchSuggestAlias _) = suggest
dispatch (DispatchSync syncSettings) = sync syncSettings
dispatch (DispatchRegister registerSettings) = register registerSettings
| null | https://raw.githubusercontent.com/NorfairKing/hastory/35abbc79155bc7c5a0f6e0f3618c8b8bcd3889a1/hastory-cli/src/Hastory/Cli.hs | haskell | # LANGUAGE FlexibleContexts #
module Hastory.Cli
( hastoryCli,
)
where
import Control.Monad.IO.Unlift (MonadUnliftIO)
import Control.Monad.Reader
import Hastory.Cli.Commands.ChangeDir (change)
import Hastory.Cli.Commands.Gather (gather)
import Hastory.Cli.Commands.GenChangeWrapper (genChangeWrapperScript)
import Hastory.Cli.Commands.GenGatherWrapper (genGatherWrapperScript)
import Hastory.Cli.Commands.ListDir (listRecentDirs)
import Hastory.Cli.Commands.Register (register)
import Hastory.Cli.Commands.SuggestAlias (suggest)
import Hastory.Cli.Commands.Sync (sync)
import Hastory.Cli.OptParse
hastoryCli :: IO ()
hastoryCli = do
Instructions d sets <- getInstructions
runReaderT (dispatch d) sets
dispatch :: (MonadReader Settings m, MonadUnliftIO m) => Dispatch -> m ()
dispatch (DispatchGather _) = void gather
dispatch (DispatchGenGatherWrapperScript _) = liftIO genGatherWrapperScript
dispatch (DispatchChangeDir changeDirSettings) = change changeDirSettings
dispatch (DispatchListRecentDirs lrds) = listRecentDirs lrds
dispatch (DispatchGenChangeWrapperScript _) = liftIO genChangeWrapperScript
dispatch (DispatchSuggestAlias _) = suggest
dispatch (DispatchSync syncSettings) = sync syncSettings
dispatch (DispatchRegister registerSettings) = register registerSettings
| |
6eef3fbb0fa0370b4b58ad2e952d93b4ee399bcf83bdd97f310e488ee8135db0 | ocsigen/ocaml-eliom | test_nats.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
open Test;;
open Nat;;
Can compare nats less than 2**32
let equal_nat n1 n2 =
eq_nat n1 0 (num_digits_nat n1 0 1)
n2 0 (num_digits_nat n2 0 1);;
testing_function "num_digits_nat";;
test (-1) eq (false,not true);;
test 0 eq (true,not false);;
test 1
eq_int
(let r = make_nat 2 in
set_digit_nat r 1 1;
num_digits_nat r 0 1,1);;
testing_function "length_nat";;
test 1
eq_int
(let r = make_nat 2 in
set_digit_nat r 0 1;
length_nat r,2);;
testing_function "equal_nat";;
let zero_nat = make_nat 1 in
test 1
equal_nat (zero_nat,zero_nat);;
test 2
equal_nat (nat_of_int 1,nat_of_int 1);;
test 3
equal_nat (nat_of_string "2",nat_of_string "2");;
test 4
eq (equal_nat (nat_of_string "2")(nat_of_string "3"),false);;
testing_function "incr_nat";;
let zero = nat_of_int 0 in
let res = incr_nat zero 0 1 1 in
test 1
equal_nat (zero, nat_of_int 1) &&
test 2
eq (res,0);;
let n = nat_of_int 1 in
let res = incr_nat n 0 1 1 in
test 3
equal_nat (n, nat_of_int 2) &&
test 4
eq (res,0);;
testing_function "decr_nat";;
let n = nat_of_int 1 in
let res = decr_nat n 0 1 0 in
test 1
equal_nat (n, nat_of_int 0) &&
test 2
eq (res,1);;
let n = nat_of_int 2 in
let res = decr_nat n 0 1 0 in
test 3
equal_nat (n, nat_of_int 1) &&
test 4
eq (res,1);;
testing_function "is_zero_nat";;
let n = nat_of_int 1 in
test 1 eq (is_zero_nat n 0 1,false) &&
test 2 eq (is_zero_nat (make_nat 1) 0 1, true) &&
test 3 eq (is_zero_nat (make_nat 2) 0 2, true) &&
(let r = make_nat 2 in
set_digit_nat r 1 1;
test 4 eq (is_zero_nat r 0 1, true))
;;
testing_function "string_of_nat";;
let n = make_nat 4;;
test 1 eq_string (string_of_nat n, "0");;
complement_nat n 0 (if sixtyfour then 2 else 4);;
test 2 eq_string (string_of_nat n, "340282366920938463463374607431768211455");;
testing_function "string_of_nat && nat_of_string";;
for i = 1 to 20 do
let s = String.make i '0' in
String.set s 0 '1';
ignore (test i eq_string (string_of_nat (nat_of_string s), s))
done;;
let set_mult_digit_nat n1 d1 l1 n2 d2 l2 n3 d3 =
ignore (mult_digit_nat n1 d1 l1 n2 d2 l2 n3 d3)
;;
let s =
"33333333333333333333333333333333333333333333333333333333333333333333\
33333333333333333333333333333333333333333333333333333333333333333333"
in
test 21 equal_nat (
nat_of_string s,
(let nat = make_nat 15 in
set_digit_nat nat 0 3;
set_mult_digit_nat nat 0 15
(nat_of_string (String.sub s 0 135)) 0 14
(nat_of_int 10) 0;
nat))
;;
test 22 eq_string (string_of_nat(nat_of_string "1073741824"), "1073741824");;
testing_function "gcd_nat";;
for i = 1 to 20 do
let n1 = Random.int 1000000000
and n2 = Random.int 100000 in
let nat1 = nat_of_int n1
and nat2 = nat_of_int n2 in
ignore (gcd_nat nat1 0 1 nat2 0 1);
ignore (test i eq (int_of_nat nat1, gcd_int n1 n2))
done
;;
testing_function "sqrt_nat";;
test 1 equal_nat (sqrt_nat (nat_of_int 1) 0 1, nat_of_int 1);;
test 2 equal_nat (let n = nat_of_string "8589934592" in
sqrt_nat n 0 (length_nat n),
nat_of_string "92681");;
test 3 equal_nat (let n = nat_of_string "4294967295" in
sqrt_nat n 0 (length_nat n),
nat_of_string "65535");;
test 4 equal_nat (let n = nat_of_string "18446744065119617025" in
sqrt_nat n 0 (length_nat n),
nat_of_string "4294967295");;
test 5 equal_nat (sqrt_nat (nat_of_int 15) 0 1,
nat_of_int 3);;
| null | https://raw.githubusercontent.com/ocsigen/ocaml-eliom/497c6707f477cb3086dc6d8124384e74a8c379ae/testsuite/tests/lib-num/test_nats.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************ | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Test;;
open Nat;;
Can compare nats less than 2**32
let equal_nat n1 n2 =
eq_nat n1 0 (num_digits_nat n1 0 1)
n2 0 (num_digits_nat n2 0 1);;
testing_function "num_digits_nat";;
test (-1) eq (false,not true);;
test 0 eq (true,not false);;
test 1
eq_int
(let r = make_nat 2 in
set_digit_nat r 1 1;
num_digits_nat r 0 1,1);;
testing_function "length_nat";;
test 1
eq_int
(let r = make_nat 2 in
set_digit_nat r 0 1;
length_nat r,2);;
testing_function "equal_nat";;
let zero_nat = make_nat 1 in
test 1
equal_nat (zero_nat,zero_nat);;
test 2
equal_nat (nat_of_int 1,nat_of_int 1);;
test 3
equal_nat (nat_of_string "2",nat_of_string "2");;
test 4
eq (equal_nat (nat_of_string "2")(nat_of_string "3"),false);;
testing_function "incr_nat";;
let zero = nat_of_int 0 in
let res = incr_nat zero 0 1 1 in
test 1
equal_nat (zero, nat_of_int 1) &&
test 2
eq (res,0);;
let n = nat_of_int 1 in
let res = incr_nat n 0 1 1 in
test 3
equal_nat (n, nat_of_int 2) &&
test 4
eq (res,0);;
testing_function "decr_nat";;
let n = nat_of_int 1 in
let res = decr_nat n 0 1 0 in
test 1
equal_nat (n, nat_of_int 0) &&
test 2
eq (res,1);;
let n = nat_of_int 2 in
let res = decr_nat n 0 1 0 in
test 3
equal_nat (n, nat_of_int 1) &&
test 4
eq (res,1);;
testing_function "is_zero_nat";;
let n = nat_of_int 1 in
test 1 eq (is_zero_nat n 0 1,false) &&
test 2 eq (is_zero_nat (make_nat 1) 0 1, true) &&
test 3 eq (is_zero_nat (make_nat 2) 0 2, true) &&
(let r = make_nat 2 in
set_digit_nat r 1 1;
test 4 eq (is_zero_nat r 0 1, true))
;;
testing_function "string_of_nat";;
let n = make_nat 4;;
test 1 eq_string (string_of_nat n, "0");;
complement_nat n 0 (if sixtyfour then 2 else 4);;
test 2 eq_string (string_of_nat n, "340282366920938463463374607431768211455");;
testing_function "string_of_nat && nat_of_string";;
for i = 1 to 20 do
let s = String.make i '0' in
String.set s 0 '1';
ignore (test i eq_string (string_of_nat (nat_of_string s), s))
done;;
let set_mult_digit_nat n1 d1 l1 n2 d2 l2 n3 d3 =
ignore (mult_digit_nat n1 d1 l1 n2 d2 l2 n3 d3)
;;
let s =
"33333333333333333333333333333333333333333333333333333333333333333333\
33333333333333333333333333333333333333333333333333333333333333333333"
in
test 21 equal_nat (
nat_of_string s,
(let nat = make_nat 15 in
set_digit_nat nat 0 3;
set_mult_digit_nat nat 0 15
(nat_of_string (String.sub s 0 135)) 0 14
(nat_of_int 10) 0;
nat))
;;
test 22 eq_string (string_of_nat(nat_of_string "1073741824"), "1073741824");;
testing_function "gcd_nat";;
for i = 1 to 20 do
let n1 = Random.int 1000000000
and n2 = Random.int 100000 in
let nat1 = nat_of_int n1
and nat2 = nat_of_int n2 in
ignore (gcd_nat nat1 0 1 nat2 0 1);
ignore (test i eq (int_of_nat nat1, gcd_int n1 n2))
done
;;
testing_function "sqrt_nat";;
test 1 equal_nat (sqrt_nat (nat_of_int 1) 0 1, nat_of_int 1);;
test 2 equal_nat (let n = nat_of_string "8589934592" in
sqrt_nat n 0 (length_nat n),
nat_of_string "92681");;
test 3 equal_nat (let n = nat_of_string "4294967295" in
sqrt_nat n 0 (length_nat n),
nat_of_string "65535");;
test 4 equal_nat (let n = nat_of_string "18446744065119617025" in
sqrt_nat n 0 (length_nat n),
nat_of_string "4294967295");;
test 5 equal_nat (sqrt_nat (nat_of_int 15) 0 1,
nat_of_int 3);;
|
2233e70b89edbd1246c2da77525632cc74e0da44e46895ad0dc6c3488b3742b4 | gas2serra/mcclim-desktop | application.lisp | (in-package :desktop-internals)
;;;;
;;;; Global Variables
;;;;
(defvar *application* nil
"The current application")
;;;;
;;;; Application Classes
;;;;
;;;
;;; Application
;;;
(defclass application ()
((name :initarg :name
:reader application-name)
(pretty-name :initarg :pretty-name
:accessor application-pretty-name
:initform nil)
(icon :initarg :icon
:accessor application-icon
:initform nil)
(menu-p :initarg :menu-p
:initform t
:accessor application-menu-p)
(requires-args-p :initarg :requires-args-p
:initform nil
:accessor application-requires-args-p)
(configured-p :reader application-configured-p
:initform nil)))
;;;
;;; Application protocols
;;;
(defgeneric run-application (application &rest args))
(defgeneric launch-application (application &key args cb-fn))
(defgeneric note-application-start-running (application &rest args))
(defgeneric note-application-end-running (application &rest args))
(defgeneric configure-application (application &optional force-p))
(defgeneric ensure-application-configured (application))
(defgeneric need-reconfigure-application (application))
(defgeneric note-application-configured (application))
;;; protocol: launch/running
(defmethod launch-application ((application application) &key args cb-fn)
(with-slots (name) application
(bt:make-thread
#'(lambda ()
(unwind-protect
(let ((res (apply #'run-application application args)))
(when cb-fn
(funcall cb-fn res application args)))))
:name name)))
(defmethod run-application :around ((application application) &rest args)
(ensure-application-configured application)
(note-application-start-running application args)
(unwind-protect
(call-next-method)
(note-application-end-running application args)))
(defmethod note-application-start-running ((application application) &rest args)
(declare (ignore args))
(with-slots (name) application
(log-info (format nil "Start running ~A application" name))))
(defmethod note-application-end-running ((application application) &rest args)
(declare (ignore args))
(with-slots (name) application
(log-info (format nil "End runnig ~A application" name))))
;;; protocol: configure
(defmethod configure-application ((application application) &optional (force-p nil))
(declare (ignore application force-p)))
(defmethod configure-application :around ((application application) &optional (force-p nil))
(with-slots (configured-p) application
(when (or force-p (not configured-p))
(call-next-method)
(setf configured-p t)
(note-application-configured application))))
(defmethod ensure-application-configured ((application application))
(with-slots (configured-p) application
(when (not configured-p)
(configure-application application))))
(defmethod need-reconfigure-application ((application application))
(with-slots (configured-p) application
(setf configured-p nil)))
(defmethod note-application-configured ((application application))
(with-slots (name) application
(log-info (format nil "Configured ~A application" name))))
;;; initialize
(defmethod initialize-instance :after ((application application) &rest initargs)
(declare (ignore initargs))
(with-slots (name pretty-name) application
(unless pretty-name
(setf pretty-name name))))
;;; print-object
(defmethod print-object ((obj application) stream)
(print-unreadable-object (obj stream :type t :identity t)
(princ (application-name obj) stream)))
;;;
CL Application
;;;
(defclass cl-application (application)
((home-page :initarg :home-page
:accessor application-home-page
:initform nil)
(git-repo :initarg :git-repo
:accessor application-git-repo
:initform nil)
(system-name :initarg :system-name
:accessor application-system-name
:initform nil)
(debug-system-p :initarg :debug-system-p
:accessor application-debug-system-p
:initform nil)
(loaded-p :reader application-loaded-p
:initform nil)
(installed-p :reader application-installed-p
:initform nil)))
;;;
;;; protocols
;;;
(defgeneric load-application (application &optional force-p))
(defgeneric ensure-application-loaded (application))
(defgeneric need-reload-application (application))
(defgeneric note-application-loaded (application))
(defgeneric install-application (application &optional force-p))
(defgeneric ensure-application-installed (application))
(defgeneric need-reinstall-application (application))
(defgeneric note-application-installed (application))
;;; protocol: running
(defmethod run-application :around ((application cl-application) &rest args)
(declare (ignore args))
(swank/backend:call-with-debugger-hook
#'debugger-hook
(lambda ()
(call-next-method))))
;;; protocol: config
(defmethod configure-application :around ((application cl-application) &optional (force-p nil))
(declare (ignore force-p))
(ensure-application-loaded application)
(call-next-method))
;;; protocol: loading
(defmethod load-application :around ((application cl-application) &optional (force-p nil))
(ensure-application-installed application)
(with-slots (loaded-p) application
(when (or force-p (not loaded-p))
(call-next-method)
(setf loaded-p t)
(need-reconfigure-application application)
(note-application-loaded application))))
(defmethod ensure-application-loaded ((application cl-application))
(with-slots (loaded-p) application
(when (not loaded-p)
(load-application application))))
(defmethod need-reload-application ((application cl-application))
(with-slots (loaded-p) application
(setf loaded-p nil))
(need-reconfigure-application application))
(defmethod note-application-loaded ((application cl-application))
(with-slots (name) application
(log-info (format nil "Loaded ~A application" name))))
;;; protocol: installing
(defmethod install-application :around ((application cl-application) &optional (force-p nil))
(with-slots (installed-p system-name) application
(when (or force-p (not installed-p))
(unless (asdf:find-system system-name nil)
(call-next-method))
(setf installed-p t)
(need-reload-application application)
(note-application-installed application))))
(defmethod ensure-application-installed ((application cl-application))
(with-slots (installed-p) application
(when (not installed-p)
(install-application application))))
(defmethod need-reinstall-application ((application cl-application))
(with-slots (installed-p) application
(setf installed-p nil))
(need-reload-application application))
(defmethod note-application-installed ((application cl-application))
(with-slots (name) application
(log-info (format nil "Installed ~A application" name))))
;;;
;;; McClim Application
;;;
(defclass mcclim-application (cl-application)
((frame-class :initarg :frame-class
:accessor application-frame-class
:initform nil)))
;;;
;;; Link/Alias/Proxy Applications
;;;
(defclass link-application (application)
((reference :initarg :reference
:initform nil
:accessor application-link-reference)))
(defclass alias-application (link-application)
())
(defmethod launch-application ((application alias-application) &key args cb-fn)
(with-slots (reference) application
(funcall #'launch-application reference :args args :cb-fn cb-fn)))
(defmethod run-application ((application alias-application) &rest args)
(with-slots (reference) application
(apply #'run-application reference args)))
(defmethod configure-application ((application alias-application) &optional force-p)
(with-slots (reference) application
(configure-application reference force-p)))
(defclass proxy-application (link-application)
())
;;;
Shell Application
;;;
(defclass shell-application (application)
())
| null | https://raw.githubusercontent.com/gas2serra/mcclim-desktop/f85d19c57d76322ae3c05f98ae43bfc8c0d0a554/Core/src/application.lisp | lisp |
Global Variables
Application Classes
Application
Application protocols
protocol: launch/running
protocol: configure
initialize
print-object
protocols
protocol: running
protocol: config
protocol: loading
protocol: installing
McClim Application
Link/Alias/Proxy Applications
| (in-package :desktop-internals)
(defvar *application* nil
"The current application")
(defclass application ()
((name :initarg :name
:reader application-name)
(pretty-name :initarg :pretty-name
:accessor application-pretty-name
:initform nil)
(icon :initarg :icon
:accessor application-icon
:initform nil)
(menu-p :initarg :menu-p
:initform t
:accessor application-menu-p)
(requires-args-p :initarg :requires-args-p
:initform nil
:accessor application-requires-args-p)
(configured-p :reader application-configured-p
:initform nil)))
(defgeneric run-application (application &rest args))
(defgeneric launch-application (application &key args cb-fn))
(defgeneric note-application-start-running (application &rest args))
(defgeneric note-application-end-running (application &rest args))
(defgeneric configure-application (application &optional force-p))
(defgeneric ensure-application-configured (application))
(defgeneric need-reconfigure-application (application))
(defgeneric note-application-configured (application))
(defmethod launch-application ((application application) &key args cb-fn)
(with-slots (name) application
(bt:make-thread
#'(lambda ()
(unwind-protect
(let ((res (apply #'run-application application args)))
(when cb-fn
(funcall cb-fn res application args)))))
:name name)))
(defmethod run-application :around ((application application) &rest args)
(ensure-application-configured application)
(note-application-start-running application args)
(unwind-protect
(call-next-method)
(note-application-end-running application args)))
(defmethod note-application-start-running ((application application) &rest args)
(declare (ignore args))
(with-slots (name) application
(log-info (format nil "Start running ~A application" name))))
(defmethod note-application-end-running ((application application) &rest args)
(declare (ignore args))
(with-slots (name) application
(log-info (format nil "End runnig ~A application" name))))
(defmethod configure-application ((application application) &optional (force-p nil))
(declare (ignore application force-p)))
(defmethod configure-application :around ((application application) &optional (force-p nil))
(with-slots (configured-p) application
(when (or force-p (not configured-p))
(call-next-method)
(setf configured-p t)
(note-application-configured application))))
(defmethod ensure-application-configured ((application application))
(with-slots (configured-p) application
(when (not configured-p)
(configure-application application))))
(defmethod need-reconfigure-application ((application application))
(with-slots (configured-p) application
(setf configured-p nil)))
(defmethod note-application-configured ((application application))
(with-slots (name) application
(log-info (format nil "Configured ~A application" name))))
(defmethod initialize-instance :after ((application application) &rest initargs)
(declare (ignore initargs))
(with-slots (name pretty-name) application
(unless pretty-name
(setf pretty-name name))))
(defmethod print-object ((obj application) stream)
(print-unreadable-object (obj stream :type t :identity t)
(princ (application-name obj) stream)))
CL Application
(defclass cl-application (application)
((home-page :initarg :home-page
:accessor application-home-page
:initform nil)
(git-repo :initarg :git-repo
:accessor application-git-repo
:initform nil)
(system-name :initarg :system-name
:accessor application-system-name
:initform nil)
(debug-system-p :initarg :debug-system-p
:accessor application-debug-system-p
:initform nil)
(loaded-p :reader application-loaded-p
:initform nil)
(installed-p :reader application-installed-p
:initform nil)))
(defgeneric load-application (application &optional force-p))
(defgeneric ensure-application-loaded (application))
(defgeneric need-reload-application (application))
(defgeneric note-application-loaded (application))
(defgeneric install-application (application &optional force-p))
(defgeneric ensure-application-installed (application))
(defgeneric need-reinstall-application (application))
(defgeneric note-application-installed (application))
(defmethod run-application :around ((application cl-application) &rest args)
(declare (ignore args))
(swank/backend:call-with-debugger-hook
#'debugger-hook
(lambda ()
(call-next-method))))
(defmethod configure-application :around ((application cl-application) &optional (force-p nil))
(declare (ignore force-p))
(ensure-application-loaded application)
(call-next-method))
(defmethod load-application :around ((application cl-application) &optional (force-p nil))
(ensure-application-installed application)
(with-slots (loaded-p) application
(when (or force-p (not loaded-p))
(call-next-method)
(setf loaded-p t)
(need-reconfigure-application application)
(note-application-loaded application))))
(defmethod ensure-application-loaded ((application cl-application))
(with-slots (loaded-p) application
(when (not loaded-p)
(load-application application))))
(defmethod need-reload-application ((application cl-application))
(with-slots (loaded-p) application
(setf loaded-p nil))
(need-reconfigure-application application))
(defmethod note-application-loaded ((application cl-application))
(with-slots (name) application
(log-info (format nil "Loaded ~A application" name))))
(defmethod install-application :around ((application cl-application) &optional (force-p nil))
(with-slots (installed-p system-name) application
(when (or force-p (not installed-p))
(unless (asdf:find-system system-name nil)
(call-next-method))
(setf installed-p t)
(need-reload-application application)
(note-application-installed application))))
(defmethod ensure-application-installed ((application cl-application))
(with-slots (installed-p) application
(when (not installed-p)
(install-application application))))
(defmethod need-reinstall-application ((application cl-application))
(with-slots (installed-p) application
(setf installed-p nil))
(need-reload-application application))
(defmethod note-application-installed ((application cl-application))
(with-slots (name) application
(log-info (format nil "Installed ~A application" name))))
(defclass mcclim-application (cl-application)
((frame-class :initarg :frame-class
:accessor application-frame-class
:initform nil)))
(defclass link-application (application)
((reference :initarg :reference
:initform nil
:accessor application-link-reference)))
(defclass alias-application (link-application)
())
(defmethod launch-application ((application alias-application) &key args cb-fn)
(with-slots (reference) application
(funcall #'launch-application reference :args args :cb-fn cb-fn)))
(defmethod run-application ((application alias-application) &rest args)
(with-slots (reference) application
(apply #'run-application reference args)))
(defmethod configure-application ((application alias-application) &optional force-p)
(with-slots (reference) application
(configure-application reference force-p)))
(defclass proxy-application (link-application)
())
Shell Application
(defclass shell-application (application)
())
|
043c701ecda52e74a11ccb08246a33505fa2b29fc74a709bf513ce286e468dba | avik-das/garlic | argv.scm | ; While it would be possible to create a system where test programs are called
; with certain command line arguments, that's a lot of complexity for little
; benefit. Instead, just check that the `*argv*` variable exists.
;
; The name of the executable is constant, so we can rely on that.
(define (shadow-args)
(let ((*argv* 'argv))
(display *argv*) (newline) ))
(display *argv*) (newline)
(shadow-args)
(display *argv*) (newline)
| null | https://raw.githubusercontent.com/avik-das/garlic/5545f5a70f33c2ff9ec449ef66e6acc7881419dc/test/success/argv.scm | scheme | While it would be possible to create a system where test programs are called
with certain command line arguments, that's a lot of complexity for little
benefit. Instead, just check that the `*argv*` variable exists.
The name of the executable is constant, so we can rely on that. |
(define (shadow-args)
(let ((*argv* 'argv))
(display *argv*) (newline) ))
(display *argv*) (newline)
(shadow-args)
(display *argv*) (newline)
|
964187f12c1b91829ac6419af33e38cd399136ca75a954c209eaede4b431a2dd | tonyg/rmacs | topsort.rkt | #lang racket/base
(provide topsort)
(require racket/match)
(define (topsort edges
#:comparison [comparison equal?])
(define hash-ctor (cond [(eq? comparison equal?) hash]
[(eq? comparison eq?) hasheq]
[else (error 'topsort "Invalid comparison ~v" comparison)]))
(define-values (fwd rev)
(for/fold [(fwd (hash-ctor)) (rev (hash-ctor))]
[(edge edges)]
(match-define (list source target) edge)
(values (hash-set fwd source (hash-set (hash-ref fwd source hash-ctor) target #t))
(hash-set rev target (hash-set (hash-ref rev target hash-ctor) source #t)))))
(define roots (for/fold [(roots (hash-ctor))]
[(source (in-hash-keys fwd))]
(if (hash-has-key? rev source)
roots
(hash-set roots source #t))))
(if (hash-empty? roots)
(if (and (hash-empty? fwd) (hash-empty? rev))
'() ;; no nodes at all
#f) ;; no nodes without incoming edges -> cycle
(let/ec return
(define seen (hash-ctor))
(define busy (hash-ctor))
(define acc '())
(define (visit-nodes nodes)
(for ((n nodes))
(when (hash-has-key? busy n) (return #f)) ;; cycle
(when (not (hash-has-key? seen n))
(set! busy (hash-set busy n #t))
(visit-nodes (hash-keys (hash-ref fwd n hash-ctor)))
(set! seen (hash-set seen n #t))
(set! busy (hash-remove busy n))
(set! acc (cons n acc)))))
(visit-nodes (hash-keys roots))
acc)))
(module+ test
(require rackunit)
(check-equal? (topsort '()) '())
(check-equal? (topsort '((1 1))) #f)
(check-equal? (topsort '((1 0) (0 1))) #f)
(check-equal? (topsort '((1 2) (1 3) (3 2) (3 4) (4 0) (0 1))) #f)
(check-equal? (topsort '((1 2) (1 3) (3 2) (3 4) (4 1) (0 1))) #f)
(define (topsort-output-correct? input)
(define output (topsort input))
(for/and [(edge (in-list input))]
(match-define (list src dst) edge)
(positive? (- (length (member src output))
(length (member dst output))))))
(check-true (topsort-output-correct? '((1 2) (1 3) (3 2) (3 4) (0 1))))
)
| null | https://raw.githubusercontent.com/tonyg/rmacs/8c99dd5dfa22f1f34707bbe957de268dc6a7a632/rmacs/topsort.rkt | racket | no nodes at all
no nodes without incoming edges -> cycle
cycle | #lang racket/base
(provide topsort)
(require racket/match)
(define (topsort edges
#:comparison [comparison equal?])
(define hash-ctor (cond [(eq? comparison equal?) hash]
[(eq? comparison eq?) hasheq]
[else (error 'topsort "Invalid comparison ~v" comparison)]))
(define-values (fwd rev)
(for/fold [(fwd (hash-ctor)) (rev (hash-ctor))]
[(edge edges)]
(match-define (list source target) edge)
(values (hash-set fwd source (hash-set (hash-ref fwd source hash-ctor) target #t))
(hash-set rev target (hash-set (hash-ref rev target hash-ctor) source #t)))))
(define roots (for/fold [(roots (hash-ctor))]
[(source (in-hash-keys fwd))]
(if (hash-has-key? rev source)
roots
(hash-set roots source #t))))
(if (hash-empty? roots)
(if (and (hash-empty? fwd) (hash-empty? rev))
(let/ec return
(define seen (hash-ctor))
(define busy (hash-ctor))
(define acc '())
(define (visit-nodes nodes)
(for ((n nodes))
(when (not (hash-has-key? seen n))
(set! busy (hash-set busy n #t))
(visit-nodes (hash-keys (hash-ref fwd n hash-ctor)))
(set! seen (hash-set seen n #t))
(set! busy (hash-remove busy n))
(set! acc (cons n acc)))))
(visit-nodes (hash-keys roots))
acc)))
(module+ test
(require rackunit)
(check-equal? (topsort '()) '())
(check-equal? (topsort '((1 1))) #f)
(check-equal? (topsort '((1 0) (0 1))) #f)
(check-equal? (topsort '((1 2) (1 3) (3 2) (3 4) (4 0) (0 1))) #f)
(check-equal? (topsort '((1 2) (1 3) (3 2) (3 4) (4 1) (0 1))) #f)
(define (topsort-output-correct? input)
(define output (topsort input))
(for/and [(edge (in-list input))]
(match-define (list src dst) edge)
(positive? (- (length (member src output))
(length (member dst output))))))
(check-true (topsort-output-correct? '((1 2) (1 3) (3 2) (3 4) (0 1))))
)
|
4bf46be32faa31d270567db399a4ec7a896ee2d91762bbfd4ec407d8a0bed165 | haskell-mafia/boris | Queue.hs | # LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
module Boris.Http.Resource.Queue (
collection
) where
import Airship (Resource (..), defaultResource)
import Boris.Http.Airship
import Boris.Http.Representation.Queue
import Boris.Http.Version
import Boris.Queue (BuildQueue (..))
import qualified Boris.Queue as Q
import Mismi (runAWS, renderError)
import Mismi.Amazonka (Env)
import qualified Network.HTTP.Types as HTTP
import P
import System.IO (IO)
collection :: Env -> BuildQueue -> Resource IO
collection env q =
defaultResource {
allowedMethods = pure [HTTP.methodGet]
, contentTypesProvided = return . join $ [
withVersionJson $ \v -> case v of
V1 -> do
s <- webT renderError . runAWS env $ Q.size q
pure . jsonResponse $ GetQueue s
]
}
| null | https://raw.githubusercontent.com/haskell-mafia/boris/fb670071600e8b2d8dbb9191fcf6bf8488f83f5a/boris-http/src/Boris/Http/Resource/Queue.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE NoImplicitPrelude #
module Boris.Http.Resource.Queue (
collection
) where
import Airship (Resource (..), defaultResource)
import Boris.Http.Airship
import Boris.Http.Representation.Queue
import Boris.Http.Version
import Boris.Queue (BuildQueue (..))
import qualified Boris.Queue as Q
import Mismi (runAWS, renderError)
import Mismi.Amazonka (Env)
import qualified Network.HTTP.Types as HTTP
import P
import System.IO (IO)
collection :: Env -> BuildQueue -> Resource IO
collection env q =
defaultResource {
allowedMethods = pure [HTTP.methodGet]
, contentTypesProvided = return . join $ [
withVersionJson $ \v -> case v of
V1 -> do
s <- webT renderError . runAWS env $ Q.size q
pure . jsonResponse $ GetQueue s
]
}
|
b1372f05845d249c53eb40e6a202e7a769cfc2818c9d86e2cdf934179bf127da | peak6/mmd_core | service_locations.erl | -module(service_locations).
-export([start_link/0]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-behaviour(gen_server2).
-define(SERVER,?MODULE).
-define(SERVICE,'service.locations').
-include("mmd.hrl").
%%%===================================================================
%%% gen_server2 callbacks
%%%===================================================================
start_link() ->
gen_server:start_link({local,?SERVER},?MODULE,[],[]).
init([]) ->
services:regGlobal(?SERVICE),
{ok,nostate}.
handle_call({mmd,From,Msg=#channel_create{type=call,body=?raw(Bin)}},_From,State) ->
handle_call({mmd,From,Msg#channel_create{body=mmd_decode:decodeRawFull(Bin)}},_From,State);
handle_call({mmd,From,Msg=#channel_create{type=call,body= <<"detail">>}},_From,State) ->
UF = services:allServicesUnfiltered(),
Ret = dict:to_list(lists:foldl(
fun(S=#service{name=Name},Dict) ->
{service,DR} = ?DUMP_REC(service,S),
dict:append(Name,?map(DR),Dict)
end,
dict:new(),
UF)),
mmd_msg:reply(From,Msg,?map(Ret)),
{reply,ok,State};
handle_call({mmd,From,Msg=#channel_create{type=call}},_From,State) ->
mmd_msg:reply(From,Msg,?map(services:service2Nodes())),
{reply,ok,State};
handle_call({mmd,From,Msg=#channel_create{type=sub}},_From,State) ->
mmd_msg:error(From,Msg,?INVALID_REQUEST,"Subscribe not supported for: ~s",[?SERVICE]),
{reply,ok,State};
handle_call(M={mmd,_,#channel_close{}},_From,State) ->
?lwarn("Should have never received this: ~p",[M]),
{reply,ok,State};
handle_call(Request, From, State) ->
?lwarn("Unexpected handle_call(~p, ~p, ~p)",[Request,From,State]),
{reply, ok, State}.
handle_cast(Msg, State) ->
?lwarn("Unexpected handle_cast(~p, ~p)",[Msg,State]),
{noreply, State}.
handle_info(Info,State) ->
?lwarn("Unexpected handle_info(~p, ~p)",[Info,State]),
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
| null | https://raw.githubusercontent.com/peak6/mmd_core/f90469ea9eac8cd607aa6ec5b9ad6ff003a35572/src/service_locations.erl | erlang | ===================================================================
gen_server2 callbacks
=================================================================== | -module(service_locations).
-export([start_link/0]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-behaviour(gen_server2).
-define(SERVER,?MODULE).
-define(SERVICE,'service.locations').
-include("mmd.hrl").
start_link() ->
gen_server:start_link({local,?SERVER},?MODULE,[],[]).
init([]) ->
services:regGlobal(?SERVICE),
{ok,nostate}.
handle_call({mmd,From,Msg=#channel_create{type=call,body=?raw(Bin)}},_From,State) ->
handle_call({mmd,From,Msg#channel_create{body=mmd_decode:decodeRawFull(Bin)}},_From,State);
handle_call({mmd,From,Msg=#channel_create{type=call,body= <<"detail">>}},_From,State) ->
UF = services:allServicesUnfiltered(),
Ret = dict:to_list(lists:foldl(
fun(S=#service{name=Name},Dict) ->
{service,DR} = ?DUMP_REC(service,S),
dict:append(Name,?map(DR),Dict)
end,
dict:new(),
UF)),
mmd_msg:reply(From,Msg,?map(Ret)),
{reply,ok,State};
handle_call({mmd,From,Msg=#channel_create{type=call}},_From,State) ->
mmd_msg:reply(From,Msg,?map(services:service2Nodes())),
{reply,ok,State};
handle_call({mmd,From,Msg=#channel_create{type=sub}},_From,State) ->
mmd_msg:error(From,Msg,?INVALID_REQUEST,"Subscribe not supported for: ~s",[?SERVICE]),
{reply,ok,State};
handle_call(M={mmd,_,#channel_close{}},_From,State) ->
?lwarn("Should have never received this: ~p",[M]),
{reply,ok,State};
handle_call(Request, From, State) ->
?lwarn("Unexpected handle_call(~p, ~p, ~p)",[Request,From,State]),
{reply, ok, State}.
handle_cast(Msg, State) ->
?lwarn("Unexpected handle_cast(~p, ~p)",[Msg,State]),
{noreply, State}.
handle_info(Info,State) ->
?lwarn("Unexpected handle_info(~p, ~p)",[Info,State]),
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
|
39158d7ed8706e305d9fa5e5ed42a9a90a43818f4d338c5bc15696a99c6465db | roglo/mlrogue | init.mli | $ I d : init.mli , v 1.14 2010/04/27 10:15:30 deraugla Exp $
open Rogue;
open Rfield;
type init = [ NewGame of game | RestoreGame of string | ScoreOnly ];
value f :
array string ->
(string * init * option (player_species * bool) * option (string * int) *
bool * bool * bool)
;
| null | https://raw.githubusercontent.com/roglo/mlrogue/b73238bbbc8cd88c83579c3b72772a8c418020e5/init.mli | ocaml | $ I d : init.mli , v 1.14 2010/04/27 10:15:30 deraugla Exp $
open Rogue;
open Rfield;
type init = [ NewGame of game | RestoreGame of string | ScoreOnly ];
value f :
array string ->
(string * init * option (player_species * bool) * option (string * int) *
bool * bool * bool)
;
| |
fd77bfb62f60b4f8ca68f955f89b95c2257739e61368b118fe21a25f847124fc | gsakkas/rite | 3563.ml |
let rec clone x n = if n > 0 then x :: (clone x (n - 1)) else [];;
let padZero l1 l2 =
if (List.length l1) > (List.length l2)
then (l1, (List.append (clone 0 ((List.length l1) - (List.length l2))) l2))
else ((List.append (clone 0 ((List.length l2) - (List.length l1))) l1), l2);;
let rec removeZero l = match l with | 0::t -> removeZero t | _ -> l;;
let bigAdd l1 l2 =
let add (l1,l2) =
let f a x =
match (a, x) with
| ((b,c),(d,e)) ->
((((d + e) + b) / 10), ((((d + e) + b) mod 10) :: c)) in
let base = (0, []) in
let args = List.rev ((List.combine 0) :: (l1 0) :: l2) in
let (_,res) = List.fold_left f base args in res in
removeZero (add (padZero l1 l2));;
fix
let rec clone x n = if n > 0 then x : : ( clone x ( n - 1 ) ) else [ ] ; ;
let =
if ( l1 ) > ( l2 )
then ( l1 , ( List.append ( clone 0 ( ( List.length l1 ) - ( List.length l2 ) ) ) l2 ) )
else ( ( List.append ( clone 0 ( ( List.length l2 ) - ( List.length l1 ) ) ) l1 ) , l2 ) ; ;
let rec removeZero l = match l with | 0::t - > removeZero t | _ - > l ; ;
let bigAdd l1 l2 =
let add ( l1,l2 ) =
let f a x =
match ( a , x ) with
| ( ( b , c),(d , e ) ) - >
( ( ( ( d + e ) + b ) / 10 ) , ( ( ( ( d + e ) + b ) mod 10 ) : : c ) ) in
let base = ( 0 , [ ] ) in
let args = List.rev ( List.combine ( 0 : : l1 ) ( 0 : : l2 ) ) in
let ( _ , res ) = List.fold_left f base args in res in
removeZero ( add ( ) ) ; ;
let rec clone x n = if n > 0 then x :: (clone x (n - 1)) else [];;
let padZero l1 l2 =
if (List.length l1) > (List.length l2)
then (l1, (List.append (clone 0 ((List.length l1) - (List.length l2))) l2))
else ((List.append (clone 0 ((List.length l2) - (List.length l1))) l1), l2);;
let rec removeZero l = match l with | 0::t -> removeZero t | _ -> l;;
let bigAdd l1 l2 =
let add (l1,l2) =
let f a x =
match (a, x) with
| ((b,c),(d,e)) ->
((((d + e) + b) / 10), ((((d + e) + b) mod 10) :: c)) in
let base = (0, []) in
let args = List.rev (List.combine (0 :: l1) (0 :: l2)) in
let (_,res) = List.fold_left f base args in res in
removeZero (add (padZero l1 l2));;
*)
changed spans
( 18,25)-(18,59 )
List.combine ( 0 : : l1 )
( 0 : : l2 )
AppG [ AppG [ EmptyG , EmptyG],AppG [ EmptyG , EmptyG ] ]
(18,25)-(18,59)
List.combine (0 :: l1)
(0 :: l2)
AppG [AppG [EmptyG,EmptyG],AppG [EmptyG,EmptyG]]
*)
type error slice
( 13,5)-(19,52 )
( 13,11)-(16,64 )
( 13,13)-(16,64 )
( 14,7)-(16,64 )
( 14,13)-(14,19 )
( 14,17)-(14,18 )
( 18,5)-(19,52 )
( 18,16)-(18,24 )
( 18,16)-(18,59 )
( 18,25)-(18,59 )
( 18,26)-(18,42 )
( 18,27)-(18,39 )
( 18,40)-(18,41 )
( 19,19)-(19,33 )
( 19,19)-(19,45 )
( 19,34)-(19,35 )
( 19,41)-(19,45 )
(13,5)-(19,52)
(13,11)-(16,64)
(13,13)-(16,64)
(14,7)-(16,64)
(14,13)-(14,19)
(14,17)-(14,18)
(18,5)-(19,52)
(18,16)-(18,24)
(18,16)-(18,59)
(18,25)-(18,59)
(18,26)-(18,42)
(18,27)-(18,39)
(18,40)-(18,41)
(19,19)-(19,33)
(19,19)-(19,45)
(19,34)-(19,35)
(19,41)-(19,45)
*)
| null | https://raw.githubusercontent.com/gsakkas/rite/958a0ad2460e15734447bc07bd181f5d35956d3b/data/sp14_min/3563.ml | ocaml |
let rec clone x n = if n > 0 then x :: (clone x (n - 1)) else [];;
let padZero l1 l2 =
if (List.length l1) > (List.length l2)
then (l1, (List.append (clone 0 ((List.length l1) - (List.length l2))) l2))
else ((List.append (clone 0 ((List.length l2) - (List.length l1))) l1), l2);;
let rec removeZero l = match l with | 0::t -> removeZero t | _ -> l;;
let bigAdd l1 l2 =
let add (l1,l2) =
let f a x =
match (a, x) with
| ((b,c),(d,e)) ->
((((d + e) + b) / 10), ((((d + e) + b) mod 10) :: c)) in
let base = (0, []) in
let args = List.rev ((List.combine 0) :: (l1 0) :: l2) in
let (_,res) = List.fold_left f base args in res in
removeZero (add (padZero l1 l2));;
fix
let rec clone x n = if n > 0 then x : : ( clone x ( n - 1 ) ) else [ ] ; ;
let =
if ( l1 ) > ( l2 )
then ( l1 , ( List.append ( clone 0 ( ( List.length l1 ) - ( List.length l2 ) ) ) l2 ) )
else ( ( List.append ( clone 0 ( ( List.length l2 ) - ( List.length l1 ) ) ) l1 ) , l2 ) ; ;
let rec removeZero l = match l with | 0::t - > removeZero t | _ - > l ; ;
let bigAdd l1 l2 =
let add ( l1,l2 ) =
let f a x =
match ( a , x ) with
| ( ( b , c),(d , e ) ) - >
( ( ( ( d + e ) + b ) / 10 ) , ( ( ( ( d + e ) + b ) mod 10 ) : : c ) ) in
let base = ( 0 , [ ] ) in
let args = List.rev ( List.combine ( 0 : : l1 ) ( 0 : : l2 ) ) in
let ( _ , res ) = List.fold_left f base args in res in
removeZero ( add ( ) ) ; ;
let rec clone x n = if n > 0 then x :: (clone x (n - 1)) else [];;
let padZero l1 l2 =
if (List.length l1) > (List.length l2)
then (l1, (List.append (clone 0 ((List.length l1) - (List.length l2))) l2))
else ((List.append (clone 0 ((List.length l2) - (List.length l1))) l1), l2);;
let rec removeZero l = match l with | 0::t -> removeZero t | _ -> l;;
let bigAdd l1 l2 =
let add (l1,l2) =
let f a x =
match (a, x) with
| ((b,c),(d,e)) ->
((((d + e) + b) / 10), ((((d + e) + b) mod 10) :: c)) in
let base = (0, []) in
let args = List.rev (List.combine (0 :: l1) (0 :: l2)) in
let (_,res) = List.fold_left f base args in res in
removeZero (add (padZero l1 l2));;
*)
changed spans
( 18,25)-(18,59 )
List.combine ( 0 : : l1 )
( 0 : : l2 )
AppG [ AppG [ EmptyG , EmptyG],AppG [ EmptyG , EmptyG ] ]
(18,25)-(18,59)
List.combine (0 :: l1)
(0 :: l2)
AppG [AppG [EmptyG,EmptyG],AppG [EmptyG,EmptyG]]
*)
type error slice
( 13,5)-(19,52 )
( 13,11)-(16,64 )
( 13,13)-(16,64 )
( 14,7)-(16,64 )
( 14,13)-(14,19 )
( 14,17)-(14,18 )
( 18,5)-(19,52 )
( 18,16)-(18,24 )
( 18,16)-(18,59 )
( 18,25)-(18,59 )
( 18,26)-(18,42 )
( 18,27)-(18,39 )
( 18,40)-(18,41 )
( 19,19)-(19,33 )
( 19,19)-(19,45 )
( 19,34)-(19,35 )
( 19,41)-(19,45 )
(13,5)-(19,52)
(13,11)-(16,64)
(13,13)-(16,64)
(14,7)-(16,64)
(14,13)-(14,19)
(14,17)-(14,18)
(18,5)-(19,52)
(18,16)-(18,24)
(18,16)-(18,59)
(18,25)-(18,59)
(18,26)-(18,42)
(18,27)-(18,39)
(18,40)-(18,41)
(19,19)-(19,33)
(19,19)-(19,45)
(19,34)-(19,35)
(19,41)-(19,45)
*)
| |
e752b2dccbb639eb4b980353bdc9861101cf7c0e81c8a03e6cd7218f574ef876 | FreeProving/free-compiler | Scanner.hs | -- | This module contains a scanner for the intermediate language that takes
-- the source code and converts it into a token stream.
--
-- We convert the source code to a token stream such that
" FreeC.Frontend . IR.Parser " does not have to handle whitespace explicitly .
module FreeC.Frontend.IR.Scanner ( TokenWithPos(..), scan ) where
import Data.Char ( isNumber, isPunctuation, isSymbol )
import Text.Parsec ( (<|>), Parsec )
import qualified Text.Parsec as Parsec
import qualified Text.Parsec.Token as Parsec
import FreeC.Frontend.IR.Token
import FreeC.IR.SrcSpan
import FreeC.Monad.Reporter
import FreeC.Pretty
import FreeC.Util.Parsec
import FreeC.Util.Predicate ( (.&&.), (.||.) )
-------------------------------------------------------------------------------
-- Type Synonyms --
-------------------------------------------------------------------------------
| Type of parsers for IR lexeme of type
type Scanner a = Parsec String () a
-- | A 'Token' and its position in the source code.
data TokenWithPos
= TokenWithPos { getTokenPos :: Parsec.SourcePos, getToken :: Token }
-- | We need a show instance for tokens with positions such that the parser
-- can print unexpected tokens.
instance Show TokenWithPos where
show = showPretty . getToken
-- | Converts the given scanner for a token to a scanner for the same token
-- that attaches source location information.
tokenWithPos :: Scanner Token -> Scanner TokenWithPos
tokenWithPos scanner = TokenWithPos <$> Parsec.getPosition <*> scanner
-------------------------------------------------------------------------------
-- Character Classes --
-------------------------------------------------------------------------------
-- | Scanner for a lowercase character.
--
> < lower > : : = " a " | … | " z " | < any lowercase Unicode letter >
lowerScanner :: Scanner Char
lowerScanner = Parsec.lower
-- | Scanner for an uppercase character.
--
> < upper > : : = " A " | … | " Z " | < any upper- or titlecase letter >
upperScanner :: Scanner Char
upperScanner = Parsec.upper
-- | Scanner for an Unicode numeric character.
--
> < numeric > : : = < digit > | < any Unicode numeric character >
numericScanner :: Scanner Char
numericScanner = Parsec.satisfy isNumber
-------------------------------------------------------------------------------
-- Language Definition --
-------------------------------------------------------------------------------
-- | Block comments start with @"{- "@ and can be nested.
--
-- Block comments start and end with a space such that we are still
-- able to parser pragmas which start with @"{-#"@ and end with @"#-}"@
blockCommentStart :: String
blockCommentStart = "{- "
-- | Block comments end with @" -}"@ and can be nested.
--
-- Block comments start and end with a space such that we are still
-- able to parser pragmas which start with @"{-#"@ and end with @"#-}"@
blockCommentEnd :: String
blockCommentEnd = " -}"
-- | Line comments start with @"-- "@ and span the remaining line.
lineCommentStart :: String
lineCommentStart = "-- "
-- | Valid start characters of variable identifiers
( see ' VarIdent ' for the definition of @<varid>@ ) .
--
-- It matches the start of the identifier only, i.e., @<lower> | "_"@.
-- The remaining characters are scanned by 'identLetter'.
varIdentStart :: Scanner Char
varIdentStart = lowerScanner <|> Parsec.char '_'
-- | Valid start characters of constructor identifiers
( see ' ConIdent ' for the definition of @<conid>@ ) .
--
-- It matches the start of the identifier only, i.e., @<upper>@.
-- The remaining characters are scanned by 'identLetter'.
conIdentStart :: Scanner Char
conIdentStart = upperScanner
-- | Valid non-start characters of identifiers.
--
-- This scanner is used for both @<varid>@s and @<conid>@s
( see ' VarIdent ' and ' ConIdent ' respectively ) .
--
It matches only one character at a time and only the characters after
the first letter .
--
-- > <identletter> ::= <lower> | <upper> | <numeric> | "_" | "'"
--
-- The start of identifiers is scanned by 'varIdentStart' and 'conIdentStart'
-- respectively.
identLetter :: Scanner Char
identLetter
= lowerScanner <|> upperScanner <|> numericScanner <|> Parsec.oneOf "_'"
| Valid characters in symbolic names ( i.e. , in @<varsym>@ and @<consym>@ ,
see also VarIdent and ' ConIdent ' ) .
--
-- All Unicode symbol and punctuation characters except for parenthesis
-- are allowed in symbolic names. Parenthesis are not allowed since the
-- symbolic names are wrapped in parenthesis themselves.
--
> < symbol > : : = < any Unicode symbol or punctuation >
-- > <namesymbol> ::= <symbol> \ ( "(" | ")" )
nameSymbolChar :: Scanner Char
nameSymbolChar = Parsec.satisfy
((isSymbol .||. isPunctuation) .&&. (`notElem` ['(', ')']))
-- | Language definition for the intermediate language.
--
-- Contains the parameters for the 'tokenParser' for the IR.
languageDef :: Parsec.LanguageDef ()
languageDef = Parsec.LanguageDef
{ Parsec.commentStart = blockCommentStart
, Parsec.commentEnd = blockCommentEnd
, Parsec.commentLine = lineCommentStart
, Parsec.nestedComments = True
, Parsec.identStart = varIdentStart <|> conIdentStart
, Parsec.identLetter = identLetter
, Parsec.opStart = nameSymbolChar
, Parsec.opLetter = nameSymbolChar
, Parsec.reservedNames = [] -- Keywords are handled by 'identScanner'.
, Parsec.reservedOpNames = [] -- Handled by order in 'tokenScanner'.
, Parsec.caseSensitive = True
}
-------------------------------------------------------------------------------
Generated Lexical Parsers --
-------------------------------------------------------------------------------
-- | Contains lexical parsers for the intermediate language.
tokenParser :: Parsec.TokenParser ()
tokenParser = Parsec.makeTokenParser languageDef
| Scanner for zero or more whitespace characters or comments .
whitespaceScanner :: Scanner ()
whitespaceScanner = Parsec.whiteSpace tokenParser
| Scanner for ' ConIdent ' and ' VarIdent ' tokens .
identScanner :: Scanner Token
identScanner = mkIdentToken <$> Parsec.identifier tokenParser
| Scanner for ' ' and ' VarSymbol ' tokens .
symbolScanner :: Scanner Token
symbolScanner = Parsec.between (Parsec.char '(') (Parsec.char ')')
(mkSymbolToken <$> Parsec.option "" (Parsec.operator tokenParser))
| Scanner for ' IntToken 's .
integerScanner :: Scanner Token
integerScanner = IntToken <$> Parsec.integer tokenParser
| Scanner for ' StrToken 's .
stringScanner :: Scanner Token
stringScanner = StrToken <$> Parsec.stringLiteral tokenParser
-- | Scanners for tokens listed in 'specialSymbols'.
specialSymbolScanners :: [Scanner Token]
specialSymbolScanners = map
(\(symbol, token) -> Parsec.symbol tokenParser symbol >> return token)
specialSymbols
-- | Scanner for a single 'Token'.
tokenScanner :: Scanner TokenWithPos
tokenScanner = tokenWithPos
$ Parsec.choice
$ map Parsec.try
$ identScanner
: symbolScanner
: integerScanner
: stringScanner
: specialSymbolScanners
| A scanner for zero or more ' 's .
--
-- Whitespaces and comments before and between tokens are ignored.
tokenListScanner :: Scanner [TokenWithPos]
tokenListScanner = whitespaceScanner
*> Parsec.many (Parsec.lexeme tokenParser tokenScanner)
<* Parsec.eof
-- | Converts the given IR source code to a stream of IR tokens.
--
-- Reports a fatal error if there are unknown tokens.
scan :: MonadReporter r => SrcFile -> r [TokenWithPos]
scan srcFile = runParsecOrFail srcFile (srcFileContents srcFile)
tokenListScanner
| null | https://raw.githubusercontent.com/FreeProving/free-compiler/6931b9ca652a185a92dd824373f092823aea4ea9/src/lib/FreeC/Frontend/IR/Scanner.hs | haskell | | This module contains a scanner for the intermediate language that takes
the source code and converts it into a token stream.
We convert the source code to a token stream such that
-----------------------------------------------------------------------------
Type Synonyms --
-----------------------------------------------------------------------------
| A 'Token' and its position in the source code.
| We need a show instance for tokens with positions such that the parser
can print unexpected tokens.
| Converts the given scanner for a token to a scanner for the same token
that attaches source location information.
-----------------------------------------------------------------------------
Character Classes --
-----------------------------------------------------------------------------
| Scanner for a lowercase character.
| Scanner for an uppercase character.
| Scanner for an Unicode numeric character.
-----------------------------------------------------------------------------
Language Definition --
-----------------------------------------------------------------------------
| Block comments start with @"{- "@ and can be nested.
Block comments start and end with a space such that we are still
able to parser pragmas which start with @"{-#"@ and end with @"#-}"@
| Block comments end with @" -}"@ and can be nested.
Block comments start and end with a space such that we are still
able to parser pragmas which start with @"{-#"@ and end with @"#-}"@
| Line comments start with @"-- "@ and span the remaining line.
| Valid start characters of variable identifiers
It matches the start of the identifier only, i.e., @<lower> | "_"@.
The remaining characters are scanned by 'identLetter'.
| Valid start characters of constructor identifiers
It matches the start of the identifier only, i.e., @<upper>@.
The remaining characters are scanned by 'identLetter'.
| Valid non-start characters of identifiers.
This scanner is used for both @<varid>@s and @<conid>@s
> <identletter> ::= <lower> | <upper> | <numeric> | "_" | "'"
The start of identifiers is scanned by 'varIdentStart' and 'conIdentStart'
respectively.
All Unicode symbol and punctuation characters except for parenthesis
are allowed in symbolic names. Parenthesis are not allowed since the
symbolic names are wrapped in parenthesis themselves.
> <namesymbol> ::= <symbol> \ ( "(" | ")" )
| Language definition for the intermediate language.
Contains the parameters for the 'tokenParser' for the IR.
Keywords are handled by 'identScanner'.
Handled by order in 'tokenScanner'.
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
| Contains lexical parsers for the intermediate language.
| Scanners for tokens listed in 'specialSymbols'.
| Scanner for a single 'Token'.
Whitespaces and comments before and between tokens are ignored.
| Converts the given IR source code to a stream of IR tokens.
Reports a fatal error if there are unknown tokens. | " FreeC.Frontend . IR.Parser " does not have to handle whitespace explicitly .
module FreeC.Frontend.IR.Scanner ( TokenWithPos(..), scan ) where
import Data.Char ( isNumber, isPunctuation, isSymbol )
import Text.Parsec ( (<|>), Parsec )
import qualified Text.Parsec as Parsec
import qualified Text.Parsec.Token as Parsec
import FreeC.Frontend.IR.Token
import FreeC.IR.SrcSpan
import FreeC.Monad.Reporter
import FreeC.Pretty
import FreeC.Util.Parsec
import FreeC.Util.Predicate ( (.&&.), (.||.) )
| Type of parsers for IR lexeme of type
type Scanner a = Parsec String () a
data TokenWithPos
= TokenWithPos { getTokenPos :: Parsec.SourcePos, getToken :: Token }
instance Show TokenWithPos where
show = showPretty . getToken
tokenWithPos :: Scanner Token -> Scanner TokenWithPos
tokenWithPos scanner = TokenWithPos <$> Parsec.getPosition <*> scanner
> < lower > : : = " a " | … | " z " | < any lowercase Unicode letter >
lowerScanner :: Scanner Char
lowerScanner = Parsec.lower
> < upper > : : = " A " | … | " Z " | < any upper- or titlecase letter >
upperScanner :: Scanner Char
upperScanner = Parsec.upper
> < numeric > : : = < digit > | < any Unicode numeric character >
numericScanner :: Scanner Char
numericScanner = Parsec.satisfy isNumber
blockCommentStart :: String
blockCommentStart = "{- "
blockCommentEnd :: String
blockCommentEnd = " -}"
lineCommentStart :: String
lineCommentStart = "-- "
( see ' VarIdent ' for the definition of @<varid>@ ) .
varIdentStart :: Scanner Char
varIdentStart = lowerScanner <|> Parsec.char '_'
( see ' ConIdent ' for the definition of @<conid>@ ) .
conIdentStart :: Scanner Char
conIdentStart = upperScanner
( see ' VarIdent ' and ' ConIdent ' respectively ) .
It matches only one character at a time and only the characters after
the first letter .
identLetter :: Scanner Char
identLetter
= lowerScanner <|> upperScanner <|> numericScanner <|> Parsec.oneOf "_'"
| Valid characters in symbolic names ( i.e. , in @<varsym>@ and @<consym>@ ,
see also VarIdent and ' ConIdent ' ) .
> < symbol > : : = < any Unicode symbol or punctuation >
nameSymbolChar :: Scanner Char
nameSymbolChar = Parsec.satisfy
((isSymbol .||. isPunctuation) .&&. (`notElem` ['(', ')']))
languageDef :: Parsec.LanguageDef ()
languageDef = Parsec.LanguageDef
{ Parsec.commentStart = blockCommentStart
, Parsec.commentEnd = blockCommentEnd
, Parsec.commentLine = lineCommentStart
, Parsec.nestedComments = True
, Parsec.identStart = varIdentStart <|> conIdentStart
, Parsec.identLetter = identLetter
, Parsec.opStart = nameSymbolChar
, Parsec.opLetter = nameSymbolChar
, Parsec.caseSensitive = True
}
tokenParser :: Parsec.TokenParser ()
tokenParser = Parsec.makeTokenParser languageDef
| Scanner for zero or more whitespace characters or comments .
whitespaceScanner :: Scanner ()
whitespaceScanner = Parsec.whiteSpace tokenParser
| Scanner for ' ConIdent ' and ' VarIdent ' tokens .
identScanner :: Scanner Token
identScanner = mkIdentToken <$> Parsec.identifier tokenParser
| Scanner for ' ' and ' VarSymbol ' tokens .
symbolScanner :: Scanner Token
symbolScanner = Parsec.between (Parsec.char '(') (Parsec.char ')')
(mkSymbolToken <$> Parsec.option "" (Parsec.operator tokenParser))
| Scanner for ' IntToken 's .
integerScanner :: Scanner Token
integerScanner = IntToken <$> Parsec.integer tokenParser
| Scanner for ' StrToken 's .
stringScanner :: Scanner Token
stringScanner = StrToken <$> Parsec.stringLiteral tokenParser
specialSymbolScanners :: [Scanner Token]
specialSymbolScanners = map
(\(symbol, token) -> Parsec.symbol tokenParser symbol >> return token)
specialSymbols
tokenScanner :: Scanner TokenWithPos
tokenScanner = tokenWithPos
$ Parsec.choice
$ map Parsec.try
$ identScanner
: symbolScanner
: integerScanner
: stringScanner
: specialSymbolScanners
| A scanner for zero or more ' 's .
tokenListScanner :: Scanner [TokenWithPos]
tokenListScanner = whitespaceScanner
*> Parsec.many (Parsec.lexeme tokenParser tokenScanner)
<* Parsec.eof
scan :: MonadReporter r => SrcFile -> r [TokenWithPos]
scan srcFile = runParsecOrFail srcFile (srcFileContents srcFile)
tokenListScanner
|
3dd8370ab17bca009f8299f051dfeb68a8f01a39796ee7443affeae413ff2fa4 | jumarko/clojure-experiments | ch08_macros.clj | (ns clojure-experiments.books.joy-of-clojure.ch08-macros
(:require [clojure.walk :as walk]
[clojure.xml :as xml])
(:import [java.io BufferedReader InputStreamReader]
java.net.URL))
eval ( p. 175 )
(eval 42)
= > 42
(eval '(list 1 2))
= > ( 1 2 )
;; This will fail with ClassCastException
#_(eval (list 1 2))
;; Now something more exciting - evaluate a condition that is a function call
(eval (list (symbol "+") 1 2))
= > 3
;; it may help to see that this returns the list
(list (symbol "+") 1 2)
;; => (+ 1 2)
vs. just using this which is basically calling ` ( eval 3 ) `
(eval (+ 1 2))
= > 3
;; contextual-eval -> local bindings
;; to mitigate issues with `eval` which uses global bindings
;; This is also used in ch17 - section 17.4.2 (`break` macro)
(defn contextual-eval [ctx expr]
(eval
`(let [~@(mapcat (fn [[k v]] [k `'~v])
ctx)]
~expr)))
here the let bindings will be : [ a ( quote 1 ) b ( quote 2 ) ]
(contextual-eval '{a 1 b 2} '(+ a b))
= > 3
Control structures ( p. 178 )
;; do-until macro
(defmacro do-until
[& clauses]
(when clauses
;; Don't be confused: this `when` will get macroexpanded to `if` via `macroexpand-all`
(list 'clojure.core/when (first clauses)
;; ... and this `if` will disappear because it's evaluated when the macro executes at compile time
(if (next clauses)
(second clauses)
(throw (IllegalArgumentException. "do-until requires an even number of forms")))
;; "calling" do-until recursively
(cons 'do-until (nnext clauses)))))
(macroexpand-1 '(do-until true (prn 1) false (prn 2)))
= > ( clojure.core/when true ( prn 1 ) ( do - until false ( prn 2 ) ) )
just for fun you can try decompile to Java code
(require '[clj-java-decompiler.core :refer [decompile disassemble] :as decompiler])
#_(decompile (do-until true (prn 1) false (prn 2)))
(walk/macroexpand-all '(do-until true (prn 1) false (prn 2)))
= > ( if true ( do ( prn 1 ) ( if false ( do ( prn 2 ) nil ) ) ) )
(do-until
(even? 2) (println "Even")
(odd? 3) (println "Odd")
(zero? 1) (println "You'll never see me")
:lollipop (println "Truthy thing"))
;; Even
;; Odd
;; => nil
(macroexpand-1
'(do-until
(even? 2) (println "Even")
(odd? 3) (println "Odd")
(zero? 1) (println "You'll never see me")
:lollipop (println "Truthy thing")))
= > ( clojure.core/when ( even ? 2 ) ( println " Even " ) ( do - until ( odd ? 3 ) ( println " Odd " ) ( zero ? 1 ) ( println " You 'll never see me " ) : lollipop ( println " thing " ) ) )
(walk/macroexpand-all
'(do-until
(even? 2) (println "Even")
(odd? 3) (println "Odd")
(zero? 1) (println "You'll never see me")
:lollipop (println "Truthy thing")))
= > ( if ( even ? 2 ) ( do ( println " Even " ) ( if ( odd ? 3 ) ( do ( println " Odd " ) ( if ( zero ? 1 ) ( do ( println " You 'll never see me " ) ( if : lollipop ( do ( println " thing " ) nil ) ) ) ) ) ) ) )
unless ( p. 179 )
(defmacro unless [condition & body]
`(when-not ~condition
~@body))
;; implementation from the book uses if + not + do
(defmacro unless [condition & body]
`(if (not ~condition)
(do ~@body)))
(unless (even? 3) "Now we see it...")
;; => "Now we see it..."
(unless (even? 2) "Now we don't.")
;; => nil
def - watched ( p. 181 )
;;; - calling a function whenever a var changes
(defmacro def-watched [name & value]
`(do
(def ~name ~@value)
(add-watch (var ~name)
:re-bind
(fn [~'key ~'r old# new#]
(println old# " -> " new#)))))
(def-watched x 2)
(alter-var-root #'x inc)
(macroexpand-1 '(def-watched x 2))
;; => (do
( def x 2 )
;; (clojure.core/add-watch
;; #'x
;; :re-bind
;; (clojure.core/fn
[ key r old__25921__auto _ _ _ _ ]
;; (clojure.core/println old__25921__auto__ " -> " new__25922__auto__))))
Using macros to change forms ( p. 182 - 185 )
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; internally, we'll use this structure:
(comment
{:tag <form>
:attrs {}
:content [<nodes]})
;;; Modeling domain - start with outer-level element
(defmacro domain
{:style/indent 1}
[name & body]
;; notice how we quote the map
`{:tag :domain
:attrs {:name (str '~name)}
body has to be inside vector otherwise we would n't be able to have more than one group inside a domain
;; next we'll define `grouping`s that go inside the domain's body
(declare handle-things)
(defmacro grouping
{:style/indent 1}
[name & body]
`{:tag :grouping
:attrs {:name (str '~name)}
:content [~@(handle-things body)]})
(declare grok-attrs grok-props)
(defn handle-things [things]
(for [t things]
{:tag :thing
:attrs (grok-attrs (take-while (comp not vector?) t))
:content (if-let [c (grok-props (drop-while (comp not vector?) t))]
[c]
[])}))
(defn grok-attrs [attrs]
(into {:name (str (first attrs))}
(for [a (rest attrs)]
(cond
(list? a) [:isa (str (second a))]
(string? a) [:comment a]))))
(defn grok-props [props]
(when props
{:tag :properties
:attrs nil
:content (apply vector (for [p props]
{:tag :property
:attrs {:name (str (first p))}
:content nil}))}))
(def d
(domain man-vs-monster
(grouping people
(Human "A stock human")
(Man (isa Human)
"A man, baby"
[name]
[has-beard?]))
(grouping monsters
(Chupacabra
"A fierce, yet elusive creature"
[eats-goats?]))))
d
;;=>
{:content
[{:content
[{:tag :thing,
:attrs {:name "Human", :comment "A stock human"},
:content [{:tag :properties, :attrs nil, :content []}]}
{:tag :thing,
:attrs {:name "Man", :isa "Human", :comment "A man, baby"},
:content
[{:tag :properties,
:attrs nil,
:content
[{:tag :property, :attrs {:name "name"}, :content nil}
{:tag :property, :attrs {:name "has-beard?"}, :content nil}]}]}],
:attrs {:name "people"},
:tag :grouping}
{:content
[{:tag :thing,
:attrs {:name "Chupacabra", :comment "A fierce, yet elusive creature"},
:content
[{:tag :properties,
:attrs nil,
:content [{:tag :property, :attrs {:name "eats-goats?"}, :content nil}]}]}],
:attrs {:name "monsters"},
:tag :grouping}],
:attrs {:name "man-vs-monster"},
:tag :domain}
;; let's convert this to XML!
(xml/emit d)
;; <?xml version='1.0' encoding='UTF-8'?>
< domain - monster ' >
;; <grouping name='people'>
;; <thing name='Human' comment='A stock human'>
;; <properties>
;; </properties>
;; </thing>
< thing ' isa='Human ' comment='A man , baby ' >
;; <properties>
;; <property name='name'/>
< property name='has - beard?'/ >
;; </properties>
;; </thing>
;; </grouping>
;; <grouping name='monsters'>
;; <thing name='Chupacabra' comment='A fierce, yet elusive creature'>
;; <properties>
< property - goats?'/ >
;; </properties>
;; </thing>
;; </grouping>
;; </domain>
Using macros to control symbolic resolution time ( p. 186 - 189 )
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Macroexpand this simple macro to understand how Clojure macros resolve symbols
(defmacro resolution [] `x)
(macroexpand '(resolution)) ; it doesn't matter here whether you use macroexpand-1 or macroexpand
;; => clojure-experiments.books.joy-of-clojure.ch08-macros/x
;; because the name is fully qualified this works without issues:
(def x 9)
(resolution)
= > 9
;; this would not work if the symbol was not fully qualified
(let [x 109] (resolution))
= > 9
Anaphora - awhen ( p. 187 )
Note : that Clojure provides when - let and if - let that do nest and are much more useful !
(defmacro awhen [expr & body]
`(let [~'it ~expr]
(when ~'it
~@body)))
(awhen [1 2 3] (it 2))
= > 3
8.6 Using macros to manage resources ( p. 188 - 189 )
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; We can use standard with-open when the thing is `java.io.Closeable`
(defn joc-www []
;; (-> "" ; doesn't work (timeout)
(-> ""
URL.
.openStream
InputStreamReader.
BufferedReader.
))
;; Note that this will most likely timeout!
(let [stream (joc-www)]
(with-open [page stream]
(println (.readLine page))
(print "The stream will now close...")
(println "but let's read from it anyway.")
(.readLine stream)) ; illegal after close
)
;; generic with-resource macro that can be used
;; when `with-open` not (ie. when the resource doesn't implemebt Closeable)
;; note that unlike `with-open` this doesn't accept multiple bindings
(defmacro with-resource [binding close-fn & body]
`(let ~binding
(try
~@body
(finally
(~close-fn ~(binding 0))))))
(let [stream (joc-www)]
(with-resource [page stream]
#(.close %)
(.readLine page)))
;; => "<!doctype html>"
8.7 . macros returning functions ( p. 190 - 193 )
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; we want to create `contract` macro that can be used like this:
(comment
(contract doubler
[x]
(:require (pos? x))
(:ensure (= (* 2 x) %)))
)
;; This macro will return a function.
It 's useful to first imagine what the function will look like :
(fn doubler ([f x]
{:pre [(pos? x)]
:post [(= (* 2 x) %)]}
(f x)))
(declare collect-bodies)
(defmacro contract
{:style/indent 1}
[name & forms]
(list* `fn name (collect-bodies forms)))
;; to allow for multi-arity function definition we start with collect-bodies
(declare build-contract)
(defn collect-bodies [forms]
for every form build a partition of 3 elements : arglist , " requires " contract , and " ensures " contract
(for [form (partition 3 forms)]
(build-contract form)))
(defn build-contract [c]
(let [args (first c)] ; grab args
build the arglist vector - fist arg is ` f ` and then all the explicit args
;; build the metadata map with `:pre`/`:post` keys
(apply merge
(for [con (rest c)]
(cond
(= 'require (first con))
(assoc {} :pre (vec (rest con)))
(= 'ensure (first con))
(assoc {} :post (vec (rest con)))
:else (throw (Exception. (str "Unknown tag " (first con)))))))
;; build the call site - this looks the same as `(cons 'f args)`
(list* 'f args))))
;; my simplified version - does it work?
(defn build-contract [c]
(let [args (first c)] ; grab args
build the arglist vector - fist arg is ` f ` and then all the explicit args
;; build the metadata map with `:pre`/`:post` keys
(apply merge
(for [con (rest c)
:let [tag (first con) conditions (vec (rest con))]]
(cond
(= 'require tag)
{:pre conditions}
(= 'ensure tag)
{:post conditions}
:else (throw (Exception. (str "Unknown tag " tag))))))
;; build the call site - this looks the same as `(cons 'f args)`
(list* 'f args))))
;; use it like this:
(def doubler-contract
(contract doubler
[x]
(require (pos? x))
(ensure (= (* 2 x) %))))
;; test correct use
(def times2 (partial doubler-contract #(* 2 %)))
(times2 9)
= > 18
;; test incorrect (:use [ :refer []])
(def times3 (partial doubler-contract #(* 3 %)))
#_(times3 9)
Execution error ( AssertionError ) at clojure - experiments.books.joy - of - clojure.ch08 - macros / doubler ( form-init8072950533536683414.clj:366 ) .
Assert failed : (= ( * 2 x ) % )
let 's extend doubler - contract to cover two arities
(def doubler-contract
(contract doubler
[x]
(require (pos? x))
(ensure (= (* 2 x) %))
[x y]
(require (pos? x) (pos? y))
(ensure (= % (* 2 (+ x y))))))
;; test a correct use
((partial doubler-contract #(+ %1 %1 %2 %2))
2 3)
= > 10
;; test an incorrect use
#_((partial doubler-contract #(* 3 (+ %1 %2)))
2 3)
Execution error ( AssertionError ) at clojure - experiments.books.joy - of - clojure.ch08 - macros / doubler ( form-init8072950533536683414.clj:406 ) .
Assert failed : (= % ( * 2 ( + x y ) ) )
| null | https://raw.githubusercontent.com/jumarko/clojure-experiments/01e9957749e5b2355c5d55203ece8ea1daed82c6/src/clojure_experiments/books/joy_of_clojure/ch08_macros.clj | clojure | This will fail with ClassCastException
Now something more exciting - evaluate a condition that is a function call
it may help to see that this returns the list
=> (+ 1 2)
contextual-eval -> local bindings
to mitigate issues with `eval` which uses global bindings
This is also used in ch17 - section 17.4.2 (`break` macro)
do-until macro
Don't be confused: this `when` will get macroexpanded to `if` via `macroexpand-all`
... and this `if` will disappear because it's evaluated when the macro executes at compile time
"calling" do-until recursively
Even
Odd
=> nil
implementation from the book uses if + not + do
=> "Now we see it..."
=> nil
- calling a function whenever a var changes
=> (do
(clojure.core/add-watch
#'x
:re-bind
(clojure.core/fn
(clojure.core/println old__25921__auto__ " -> " new__25922__auto__))))
internally, we'll use this structure:
Modeling domain - start with outer-level element
notice how we quote the map
next we'll define `grouping`s that go inside the domain's body
=>
let's convert this to XML!
<?xml version='1.0' encoding='UTF-8'?>
<grouping name='people'>
<thing name='Human' comment='A stock human'>
<properties>
</properties>
</thing>
<properties>
<property name='name'/>
</properties>
</thing>
</grouping>
<grouping name='monsters'>
<thing name='Chupacabra' comment='A fierce, yet elusive creature'>
<properties>
</properties>
</thing>
</grouping>
</domain>
it doesn't matter here whether you use macroexpand-1 or macroexpand
=> clojure-experiments.books.joy-of-clojure.ch08-macros/x
because the name is fully qualified this works without issues:
this would not work if the symbol was not fully qualified
We can use standard with-open when the thing is `java.io.Closeable`
(-> "" ; doesn't work (timeout)
Note that this will most likely timeout!
illegal after close
generic with-resource macro that can be used
when `with-open` not (ie. when the resource doesn't implemebt Closeable)
note that unlike `with-open` this doesn't accept multiple bindings
=> "<!doctype html>"
we want to create `contract` macro that can be used like this:
This macro will return a function.
to allow for multi-arity function definition we start with collect-bodies
grab args
build the metadata map with `:pre`/`:post` keys
build the call site - this looks the same as `(cons 'f args)`
my simplified version - does it work?
grab args
build the metadata map with `:pre`/`:post` keys
build the call site - this looks the same as `(cons 'f args)`
use it like this:
test correct use
test incorrect (:use [ :refer []])
test a correct use
test an incorrect use | (ns clojure-experiments.books.joy-of-clojure.ch08-macros
(:require [clojure.walk :as walk]
[clojure.xml :as xml])
(:import [java.io BufferedReader InputStreamReader]
java.net.URL))
eval ( p. 175 )
(eval 42)
= > 42
(eval '(list 1 2))
= > ( 1 2 )
#_(eval (list 1 2))
(eval (list (symbol "+") 1 2))
= > 3
(list (symbol "+") 1 2)
vs. just using this which is basically calling ` ( eval 3 ) `
(eval (+ 1 2))
= > 3
(defn contextual-eval [ctx expr]
(eval
`(let [~@(mapcat (fn [[k v]] [k `'~v])
ctx)]
~expr)))
here the let bindings will be : [ a ( quote 1 ) b ( quote 2 ) ]
(contextual-eval '{a 1 b 2} '(+ a b))
= > 3
Control structures ( p. 178 )
(defmacro do-until
[& clauses]
(when clauses
(list 'clojure.core/when (first clauses)
(if (next clauses)
(second clauses)
(throw (IllegalArgumentException. "do-until requires an even number of forms")))
(cons 'do-until (nnext clauses)))))
(macroexpand-1 '(do-until true (prn 1) false (prn 2)))
= > ( clojure.core/when true ( prn 1 ) ( do - until false ( prn 2 ) ) )
just for fun you can try decompile to Java code
(require '[clj-java-decompiler.core :refer [decompile disassemble] :as decompiler])
#_(decompile (do-until true (prn 1) false (prn 2)))
(walk/macroexpand-all '(do-until true (prn 1) false (prn 2)))
= > ( if true ( do ( prn 1 ) ( if false ( do ( prn 2 ) nil ) ) ) )
(do-until
(even? 2) (println "Even")
(odd? 3) (println "Odd")
(zero? 1) (println "You'll never see me")
:lollipop (println "Truthy thing"))
(macroexpand-1
'(do-until
(even? 2) (println "Even")
(odd? 3) (println "Odd")
(zero? 1) (println "You'll never see me")
:lollipop (println "Truthy thing")))
= > ( clojure.core/when ( even ? 2 ) ( println " Even " ) ( do - until ( odd ? 3 ) ( println " Odd " ) ( zero ? 1 ) ( println " You 'll never see me " ) : lollipop ( println " thing " ) ) )
(walk/macroexpand-all
'(do-until
(even? 2) (println "Even")
(odd? 3) (println "Odd")
(zero? 1) (println "You'll never see me")
:lollipop (println "Truthy thing")))
= > ( if ( even ? 2 ) ( do ( println " Even " ) ( if ( odd ? 3 ) ( do ( println " Odd " ) ( if ( zero ? 1 ) ( do ( println " You 'll never see me " ) ( if : lollipop ( do ( println " thing " ) nil ) ) ) ) ) ) ) )
unless ( p. 179 )
(defmacro unless [condition & body]
`(when-not ~condition
~@body))
(defmacro unless [condition & body]
`(if (not ~condition)
(do ~@body)))
(unless (even? 3) "Now we see it...")
(unless (even? 2) "Now we don't.")
def - watched ( p. 181 )
(defmacro def-watched [name & value]
`(do
(def ~name ~@value)
(add-watch (var ~name)
:re-bind
(fn [~'key ~'r old# new#]
(println old# " -> " new#)))))
(def-watched x 2)
(alter-var-root #'x inc)
(macroexpand-1 '(def-watched x 2))
( def x 2 )
[ key r old__25921__auto _ _ _ _ ]
Using macros to change forms ( p. 182 - 185 )
(comment
{:tag <form>
:attrs {}
:content [<nodes]})
(defmacro domain
{:style/indent 1}
[name & body]
`{:tag :domain
:attrs {:name (str '~name)}
body has to be inside vector otherwise we would n't be able to have more than one group inside a domain
(declare handle-things)
(defmacro grouping
{:style/indent 1}
[name & body]
`{:tag :grouping
:attrs {:name (str '~name)}
:content [~@(handle-things body)]})
(declare grok-attrs grok-props)
(defn handle-things [things]
(for [t things]
{:tag :thing
:attrs (grok-attrs (take-while (comp not vector?) t))
:content (if-let [c (grok-props (drop-while (comp not vector?) t))]
[c]
[])}))
(defn grok-attrs [attrs]
(into {:name (str (first attrs))}
(for [a (rest attrs)]
(cond
(list? a) [:isa (str (second a))]
(string? a) [:comment a]))))
(defn grok-props [props]
(when props
{:tag :properties
:attrs nil
:content (apply vector (for [p props]
{:tag :property
:attrs {:name (str (first p))}
:content nil}))}))
(def d
(domain man-vs-monster
(grouping people
(Human "A stock human")
(Man (isa Human)
"A man, baby"
[name]
[has-beard?]))
(grouping monsters
(Chupacabra
"A fierce, yet elusive creature"
[eats-goats?]))))
d
{:content
[{:content
[{:tag :thing,
:attrs {:name "Human", :comment "A stock human"},
:content [{:tag :properties, :attrs nil, :content []}]}
{:tag :thing,
:attrs {:name "Man", :isa "Human", :comment "A man, baby"},
:content
[{:tag :properties,
:attrs nil,
:content
[{:tag :property, :attrs {:name "name"}, :content nil}
{:tag :property, :attrs {:name "has-beard?"}, :content nil}]}]}],
:attrs {:name "people"},
:tag :grouping}
{:content
[{:tag :thing,
:attrs {:name "Chupacabra", :comment "A fierce, yet elusive creature"},
:content
[{:tag :properties,
:attrs nil,
:content [{:tag :property, :attrs {:name "eats-goats?"}, :content nil}]}]}],
:attrs {:name "monsters"},
:tag :grouping}],
:attrs {:name "man-vs-monster"},
:tag :domain}
(xml/emit d)
< domain - monster ' >
< thing ' isa='Human ' comment='A man , baby ' >
< property name='has - beard?'/ >
< property - goats?'/ >
Using macros to control symbolic resolution time ( p. 186 - 189 )
Macroexpand this simple macro to understand how Clojure macros resolve symbols
(defmacro resolution [] `x)
(def x 9)
(resolution)
= > 9
(let [x 109] (resolution))
= > 9
Anaphora - awhen ( p. 187 )
Note : that Clojure provides when - let and if - let that do nest and are much more useful !
(defmacro awhen [expr & body]
`(let [~'it ~expr]
(when ~'it
~@body)))
(awhen [1 2 3] (it 2))
= > 3
8.6 Using macros to manage resources ( p. 188 - 189 )
(defn joc-www []
(-> ""
URL.
.openStream
InputStreamReader.
BufferedReader.
))
(let [stream (joc-www)]
(with-open [page stream]
(println (.readLine page))
(print "The stream will now close...")
(println "but let's read from it anyway.")
)
(defmacro with-resource [binding close-fn & body]
`(let ~binding
(try
~@body
(finally
(~close-fn ~(binding 0))))))
(let [stream (joc-www)]
(with-resource [page stream]
#(.close %)
(.readLine page)))
8.7 . macros returning functions ( p. 190 - 193 )
(comment
(contract doubler
[x]
(:require (pos? x))
(:ensure (= (* 2 x) %)))
)
It 's useful to first imagine what the function will look like :
(fn doubler ([f x]
{:pre [(pos? x)]
:post [(= (* 2 x) %)]}
(f x)))
(declare collect-bodies)
(defmacro contract
{:style/indent 1}
[name & forms]
(list* `fn name (collect-bodies forms)))
(declare build-contract)
(defn collect-bodies [forms]
for every form build a partition of 3 elements : arglist , " requires " contract , and " ensures " contract
(for [form (partition 3 forms)]
(build-contract form)))
(defn build-contract [c]
build the arglist vector - fist arg is ` f ` and then all the explicit args
(apply merge
(for [con (rest c)]
(cond
(= 'require (first con))
(assoc {} :pre (vec (rest con)))
(= 'ensure (first con))
(assoc {} :post (vec (rest con)))
:else (throw (Exception. (str "Unknown tag " (first con)))))))
(list* 'f args))))
(defn build-contract [c]
build the arglist vector - fist arg is ` f ` and then all the explicit args
(apply merge
(for [con (rest c)
:let [tag (first con) conditions (vec (rest con))]]
(cond
(= 'require tag)
{:pre conditions}
(= 'ensure tag)
{:post conditions}
:else (throw (Exception. (str "Unknown tag " tag))))))
(list* 'f args))))
(def doubler-contract
(contract doubler
[x]
(require (pos? x))
(ensure (= (* 2 x) %))))
(def times2 (partial doubler-contract #(* 2 %)))
(times2 9)
= > 18
(def times3 (partial doubler-contract #(* 3 %)))
#_(times3 9)
Execution error ( AssertionError ) at clojure - experiments.books.joy - of - clojure.ch08 - macros / doubler ( form-init8072950533536683414.clj:366 ) .
Assert failed : (= ( * 2 x ) % )
let 's extend doubler - contract to cover two arities
(def doubler-contract
(contract doubler
[x]
(require (pos? x))
(ensure (= (* 2 x) %))
[x y]
(require (pos? x) (pos? y))
(ensure (= % (* 2 (+ x y))))))
((partial doubler-contract #(+ %1 %1 %2 %2))
2 3)
= > 10
#_((partial doubler-contract #(* 3 (+ %1 %2)))
2 3)
Execution error ( AssertionError ) at clojure - experiments.books.joy - of - clojure.ch08 - macros / doubler ( form-init8072950533536683414.clj:406 ) .
Assert failed : (= % ( * 2 ( + x y ) ) )
|
fc4ed4af79e9a100942589503669d18e0171c49d76015918196a387a7b276e22 | B-Lang-org/bsc | MakeOctalChars.hs | the program used to generate OctalChars.bsv
-- should it need to be changed
redirect stdout to OctalChars.bsv
showOct num = show first ++ show middle ++ show last
where (rest, last) = quotRem num 8
(first, middle) = quotRem rest 8
nums = [0..255]
display_num num = putStrLn ("$display(\"\\" ++ (showOct num) ++ "\");")
main = do
putStrLn("(* synthesize *)")
putStrLn("module sysOctalChars();")
putStrLn("rule test;")
mapM_ display_num nums
putStrLn("endrule")
putStrLn("endmodule")
| null | https://raw.githubusercontent.com/B-Lang-org/bsc/bd141b505394edc5a4bdd3db442a9b0a8c101f0f/testsuite/bsc.syntax/bsv05/strings/MakeOctalChars.hs | haskell | should it need to be changed | the program used to generate OctalChars.bsv
redirect stdout to OctalChars.bsv
showOct num = show first ++ show middle ++ show last
where (rest, last) = quotRem num 8
(first, middle) = quotRem rest 8
nums = [0..255]
display_num num = putStrLn ("$display(\"\\" ++ (showOct num) ++ "\");")
main = do
putStrLn("(* synthesize *)")
putStrLn("module sysOctalChars();")
putStrLn("rule test;")
mapM_ display_num nums
putStrLn("endrule")
putStrLn("endmodule")
|
57a490a238fb2c8f8b689fbc2190f684b733b1c4a74e8c738b2573e12b378da8 | elastic/eui-cljs | icon_logo_kafka.cljs | (ns eui.icon-logo-kafka
(:require ["@elastic/eui/lib/components/icon/assets/logo_kafka.js" :as eui]))
(def logoKafka eui/icon)
| null | https://raw.githubusercontent.com/elastic/eui-cljs/ad60b57470a2eb8db9bca050e02f52dd964d9f8e/src/eui/icon_logo_kafka.cljs | clojure | (ns eui.icon-logo-kafka
(:require ["@elastic/eui/lib/components/icon/assets/logo_kafka.js" :as eui]))
(def logoKafka eui/icon)
| |
3ec29c47fe37868d8e5434de413f6c4d5d8609a0a26c783f2374b8dc61034403 | larcenists/larceny | 16label3.scm | (bits 16)
(text
(with-win-lose foo bar
(zero? ax))
(nop)
(nop)
(nop)
(seq (label bar
(inv (iter (seq (dec ax)
(inc cx)
(>= ax 0)))))
(label foo (push cx))))
00000000 85C0 test ax , ax
00000002 0F840D00 jz near 0x13
00000006 E90300 jmp 0xc
00000009 90 nop
0000000A 90 nop
0000000B 90 nop
0000000C 48 dec ax
0000000D 41 inc cx
0000000E 3D0000 cmp ax,0x0
00000011 7DF9 jnl 0xc
00000013 51 push cx
| null | https://raw.githubusercontent.com/larcenists/larceny/fef550c7d3923deb7a5a1ccd5a628e54cf231c75/src/Lib/Sassy/tests/prims16/16label3.scm | scheme | (bits 16)
(text
(with-win-lose foo bar
(zero? ax))
(nop)
(nop)
(nop)
(seq (label bar
(inv (iter (seq (dec ax)
(inc cx)
(>= ax 0)))))
(label foo (push cx))))
00000000 85C0 test ax , ax
00000002 0F840D00 jz near 0x13
00000006 E90300 jmp 0xc
00000009 90 nop
0000000A 90 nop
0000000B 90 nop
0000000C 48 dec ax
0000000D 41 inc cx
0000000E 3D0000 cmp ax,0x0
00000011 7DF9 jnl 0xc
00000013 51 push cx
| |
67bfe337c1aecd3bf0c9aa6e82bf74db454698d7e4835b8485d973bf38b6e7f2 | haskell/aeson | BigProduct.hs | # LANGUAGE DeriveGeneric #
module Auto.G.BigProduct where
import Control.DeepSeq
import Data.Aeson
import GHC.Generics (Generic)
import Options
data BigProduct = BigProduct
!Int !Int !Int !Int !Int
!Int !Int !Int !Int !Int
!Int !Int !Int !Int !Int
!Int !Int !Int !Int !Int
!Int !Int !Int !Int !Int
deriving (Show, Eq, Generic)
instance NFData BigProduct where
rnf a = a `seq` ()
instance ToJSON BigProduct where
toJSON = genericToJSON opts
toEncoding = genericToEncoding opts
instance FromJSON BigProduct where
parseJSON = genericParseJSON opts
bigProduct :: BigProduct
bigProduct = BigProduct 1 2 3 4 5
6 7 8 9 10
11 12 13 14 15
16 17 18 19 20
21 22 23 24 25
| null | https://raw.githubusercontent.com/haskell/aeson/d711df76b826942f4a9e791712512c6b19b8c1c8/benchmarks/bench/Auto/G/BigProduct.hs | haskell | # LANGUAGE DeriveGeneric #
module Auto.G.BigProduct where
import Control.DeepSeq
import Data.Aeson
import GHC.Generics (Generic)
import Options
data BigProduct = BigProduct
!Int !Int !Int !Int !Int
!Int !Int !Int !Int !Int
!Int !Int !Int !Int !Int
!Int !Int !Int !Int !Int
!Int !Int !Int !Int !Int
deriving (Show, Eq, Generic)
instance NFData BigProduct where
rnf a = a `seq` ()
instance ToJSON BigProduct where
toJSON = genericToJSON opts
toEncoding = genericToEncoding opts
instance FromJSON BigProduct where
parseJSON = genericParseJSON opts
bigProduct :: BigProduct
bigProduct = BigProduct 1 2 3 4 5
6 7 8 9 10
11 12 13 14 15
16 17 18 19 20
21 22 23 24 25
| |
f08e4d93f0a3bd9a33d16037c919b009bc031cdecb7e3db5f55db622f638bdbf | whamtet/dogfort | session.cljs | (ns dogfort.middleware.session
"Middleware for maintaining browser sessions using cookies.
Sessions are stored using types that adhere to the
dogfort.middleware.session.store/SessionStore protocol.
Ring comes with two stores included:
dogfort.middleware.session.memory/memory-store
dogfort.middleware.session.cookie/cookie-store"
(:require [dogfort.middleware.cookies :as cookies]
[dogfort.middleware.session.store :as store]
[dogfort.middleware.session.memory :as mem]
[redlobster.promise :as p])
(:use-macros
[redlobster.macros :only [promise waitp let-realised]]
))
(defn- session-options
[options]
{:store (:store options (mem/memory-store))
:cookie-name (:cookie-name options "ring-session")
:cookie-attrs (merge {:path "/"
:http-only true}
(:cookie-attrs options)
(if-let [root (:root options)]
{:path root}))})
(defn- bare-session-request
[request & [{:keys [store cookie-name]}]]
(let [req-key (get-in request [:cookies cookie-name :value])
session (store/read-session store req-key)
session-key (if session req-key)]
(merge request {:session (or session {})
:session/key session-key})))
(defn session-request
"Reads current HTTP session map and adds it to :session key of the request.
See: wrap-session."
{:arglists '([request] [request options])
:added "1.2"}
[request & [options]]
(-> request
cookies/cookies-request
(bare-session-request options)))
(defn- bare-session-response
[response {session-key :session/key} & [{:keys [store cookie-name cookie-attrs]}]]
(let [new-session-key (if (contains? response :session)
(if-let [session (response :session)]
(if (:recreate (meta session))
(do
(store/delete-session store session-key)
(store/write-session store nil session))
(store/write-session store session-key session))
(if session-key
(store/delete-session store session-key))))
session-attrs (:session-cookie-attrs response)
cookie {cookie-name
(merge cookie-attrs
session-attrs
{:value (or new-session-key session-key)})}
response (dissoc response :session :session-cookie-attrs)]
(if (or (and new-session-key (not= session-key new-session-key))
(and session-attrs (or new-session-key session-key)))
(assoc response :cookies (merge (response :cookies) cookie))
response)))
(defn session-response
"Updates session based on :session key in response. See: wrap-session."
{:arglists '([response request] [response request options])
:added "1.2"}
[response request & [options]]
(let-realised
[response response]
(bare-session-response @response request options)))
(defn wrap-session
"Reads in the current HTTP session map, and adds it to the :session key on
the request. If a :session key is added to the response by the handler, the
session is updated with the new value. If the value is nil, the session is
deleted.
Accepts the following options:
:store - An implementation of the SessionStore protocol in the
dogfort.middleware.session.store namespace. This determines how
the session is stored. Defaults to in-memory storage using
dogfort.middleware.session.store/memory-store.
:root - The root path of the session. Any path above this will not be
able to see this session. Equivalent to setting the cookie's
path attribute. Defaults to \"/\".
:cookie-name - The name of the cookie that holds the session key. Defaults to
\"ring-session\"
:cookie-attrs - A map of attributes to associate with the session cookie.
Defaults to {:http-only true}."
([handler]
(wrap-session handler {}))
([handler options]
(let [options (session-options options)]
(fn [request]
(let [new-request (session-request request options)]
(-> (handler new-request)
(session-response new-request options)))))))
| null | https://raw.githubusercontent.com/whamtet/dogfort/75c2908355cc18bf350a5b761d2906e013ee9f94/src/dogfort/middleware/session.cljs | clojure | (ns dogfort.middleware.session
"Middleware for maintaining browser sessions using cookies.
Sessions are stored using types that adhere to the
dogfort.middleware.session.store/SessionStore protocol.
Ring comes with two stores included:
dogfort.middleware.session.memory/memory-store
dogfort.middleware.session.cookie/cookie-store"
(:require [dogfort.middleware.cookies :as cookies]
[dogfort.middleware.session.store :as store]
[dogfort.middleware.session.memory :as mem]
[redlobster.promise :as p])
(:use-macros
[redlobster.macros :only [promise waitp let-realised]]
))
(defn- session-options
[options]
{:store (:store options (mem/memory-store))
:cookie-name (:cookie-name options "ring-session")
:cookie-attrs (merge {:path "/"
:http-only true}
(:cookie-attrs options)
(if-let [root (:root options)]
{:path root}))})
(defn- bare-session-request
[request & [{:keys [store cookie-name]}]]
(let [req-key (get-in request [:cookies cookie-name :value])
session (store/read-session store req-key)
session-key (if session req-key)]
(merge request {:session (or session {})
:session/key session-key})))
(defn session-request
"Reads current HTTP session map and adds it to :session key of the request.
See: wrap-session."
{:arglists '([request] [request options])
:added "1.2"}
[request & [options]]
(-> request
cookies/cookies-request
(bare-session-request options)))
(defn- bare-session-response
[response {session-key :session/key} & [{:keys [store cookie-name cookie-attrs]}]]
(let [new-session-key (if (contains? response :session)
(if-let [session (response :session)]
(if (:recreate (meta session))
(do
(store/delete-session store session-key)
(store/write-session store nil session))
(store/write-session store session-key session))
(if session-key
(store/delete-session store session-key))))
session-attrs (:session-cookie-attrs response)
cookie {cookie-name
(merge cookie-attrs
session-attrs
{:value (or new-session-key session-key)})}
response (dissoc response :session :session-cookie-attrs)]
(if (or (and new-session-key (not= session-key new-session-key))
(and session-attrs (or new-session-key session-key)))
(assoc response :cookies (merge (response :cookies) cookie))
response)))
(defn session-response
"Updates session based on :session key in response. See: wrap-session."
{:arglists '([response request] [response request options])
:added "1.2"}
[response request & [options]]
(let-realised
[response response]
(bare-session-response @response request options)))
(defn wrap-session
"Reads in the current HTTP session map, and adds it to the :session key on
the request. If a :session key is added to the response by the handler, the
session is updated with the new value. If the value is nil, the session is
deleted.
Accepts the following options:
:store - An implementation of the SessionStore protocol in the
dogfort.middleware.session.store namespace. This determines how
the session is stored. Defaults to in-memory storage using
dogfort.middleware.session.store/memory-store.
:root - The root path of the session. Any path above this will not be
able to see this session. Equivalent to setting the cookie's
path attribute. Defaults to \"/\".
:cookie-name - The name of the cookie that holds the session key. Defaults to
\"ring-session\"
:cookie-attrs - A map of attributes to associate with the session cookie.
Defaults to {:http-only true}."
([handler]
(wrap-session handler {}))
([handler options]
(let [options (session-options options)]
(fn [request]
(let [new-request (session-request request options)]
(-> (handler new-request)
(session-response new-request options)))))))
| |
7682a3a2ea9a131c94605e203889eab0dc51553fc162875235ae2d4831f7a248 | slyrus/cl-bio | defpackage.lisp |
(in-package #:cl-user)
(defpackage #:bio-entrez
(:nicknames #:entrez)
(:use #:cl)
(:export #:*entrez-dictionary*
#:*entrez-xml-dictionary*
#:entrez-fetch
#:entrez-search
#:gb-set-get-gb-seqs
#:gb-seg-get-sequence
#:gb-seq-sequence-get-residues
#:generif
#:generif-text))
(defpackage #:entrez-user (:use #:cl #:bio #:entrez))
| null | https://raw.githubusercontent.com/slyrus/cl-bio/e6de2bc7f4accaa11466902407e43fae3184973f/entrez/defpackage.lisp | lisp |
(in-package #:cl-user)
(defpackage #:bio-entrez
(:nicknames #:entrez)
(:use #:cl)
(:export #:*entrez-dictionary*
#:*entrez-xml-dictionary*
#:entrez-fetch
#:entrez-search
#:gb-set-get-gb-seqs
#:gb-seg-get-sequence
#:gb-seq-sequence-get-residues
#:generif
#:generif-text))
(defpackage #:entrez-user (:use #:cl #:bio #:entrez))
| |
4339c2ca464bcac4e32b28e4d7ce78d4d9129cf94342fa61b8f71e8b486e0747 | hyperfiddle/electric | analyzer.cljc | (ns dustin.analyzer
(:require [clojure.tools.analyzer :as ana]
[clojure.tools.analyzer.env :as env]
[clojure.tools.analyzer.jvm :as clj]
[clojure.tools.analyzer.ast :as ast]
[clojure.tools.analyzer.passes.jvm.emit-form :as e]
[hyperfiddle.rcf :refer [tests]]))
Docs :
;
(tests
(clj/analyze '(inc 1))
:= '{:args [{:op :const,
:env {:context :ctx/expr, :locals {}, :ns dustin.analyzer, :column _, :line _, :file _},
:type :number,
:literal? true,
:val 1,
:form 1,
:o-tag long,
:tag long}],
:children [:args],
:method inc,
:op :static-call,
:env {:context :ctx/expr, :locals {}, :ns dustin.analyzer, :column _, :line _, :file _},
:o-tag long,
:class clojure.lang.Numbers,
:top-level true,
:form (. clojure.lang.Numbers (inc 1)),
:tag long,
:validated? true,
:raw-forms ((inc 1))})
(tests
(ast/children (clj/analyze '(inc 1)))
:= [{:op :const,
:env {:context :ctx/expr, :locals {}, :ns _, :file _},
:type :number,
:literal? true,
:val 1,
:form 1,
:o-tag _,
:tag _}])
(tests
"flatten"
(ast/nodes (clj/analyze '(inc 1)))
:= [{:op :static-call,
:method 'inc,
:form '(. clojure.lang.Numbers (inc 1)),
:raw-forms '((inc 1))
:args [{:op :const,
:env _,
:type :number,
:literal? true,
:val 1,
:form 1,
:o-tag long,
:tag long}],
:children [:args],
:env _,
:o-tag long,
:class clojure.lang.Numbers,
:top-level true,
:tag long,
:validated? true}
{:op :const,
:form 1,
:val 1,
:env _,
:type :number,
:literal? true,
:o-tag long,
:tag long}])
(tests
(e/emit-form (clj/analyze '(inc 1)))
:= '(clojure.lang.Numbers/inc 1))
(tests
(clj/analyze '(if true :a :b))
:= {:form '(if true :a :b),
:op :if,
:children [:test :then :else],
:else {:op :const,
:env _,
:type :keyword,
:literal? true,
:val :b,
:form :b,
:o-tag clojure.lang.Keyword,
:tag clojure.lang.Keyword}
:then {:op :const,
:env _,
:type :keyword,
:literal? true,
:val :a,
:form :a,
:o-tag clojure.lang.Keyword,
:tag clojure.lang.Keyword}
:test {:op :const,
:env _,
:type :bool,
:literal? true,
:val true,
:form true,
:o-tag java.lang.Boolean,
:tag java.lang.Boolean}
:env _,
:o-tag clojure.lang.Keyword,
:top-level true,
:tag clojure.lang.Keyword})
(tests
(clj/analyze '(let [a 1]))
:= _)
(tests
(def body '(loop [x 0]
(case (int x)
0 (recur (inc x))
1 42)))
(clj/analyze body)
:= _
(e/emit-form (clj/analyze body))
:= '(loop* [x 0]
(let* [?G__40534 (clojure.lang.RT/intCast x)]
; #L9006
(case* ?G__40534
0 0
(throw (new java.lang.IllegalArgumentException (clojure.core/str "No matching clause: " ?G__40534)))
{0 [0 (recur (clojure.lang.Numbers/inc x))],
1 [1 42]}
:compact
:int
nil))))
(tests
(clj/empty-env)
:= {:context :ctx/expr, :locals {}, :ns 'dustin.analyzer}
)
(defmacro foo [x] ::x)
(apply #'foo '(foo 1) {} [1])
| null | https://raw.githubusercontent.com/hyperfiddle/electric/1c6c3891cbf13123fef8d33e6555d300f0dac134/scratch/dustin/y2021/analyzer.cljc | clojure |
#L9006 | (ns dustin.analyzer
(:require [clojure.tools.analyzer :as ana]
[clojure.tools.analyzer.env :as env]
[clojure.tools.analyzer.jvm :as clj]
[clojure.tools.analyzer.ast :as ast]
[clojure.tools.analyzer.passes.jvm.emit-form :as e]
[hyperfiddle.rcf :refer [tests]]))
Docs :
(tests
(clj/analyze '(inc 1))
:= '{:args [{:op :const,
:env {:context :ctx/expr, :locals {}, :ns dustin.analyzer, :column _, :line _, :file _},
:type :number,
:literal? true,
:val 1,
:form 1,
:o-tag long,
:tag long}],
:children [:args],
:method inc,
:op :static-call,
:env {:context :ctx/expr, :locals {}, :ns dustin.analyzer, :column _, :line _, :file _},
:o-tag long,
:class clojure.lang.Numbers,
:top-level true,
:form (. clojure.lang.Numbers (inc 1)),
:tag long,
:validated? true,
:raw-forms ((inc 1))})
(tests
(ast/children (clj/analyze '(inc 1)))
:= [{:op :const,
:env {:context :ctx/expr, :locals {}, :ns _, :file _},
:type :number,
:literal? true,
:val 1,
:form 1,
:o-tag _,
:tag _}])
(tests
"flatten"
(ast/nodes (clj/analyze '(inc 1)))
:= [{:op :static-call,
:method 'inc,
:form '(. clojure.lang.Numbers (inc 1)),
:raw-forms '((inc 1))
:args [{:op :const,
:env _,
:type :number,
:literal? true,
:val 1,
:form 1,
:o-tag long,
:tag long}],
:children [:args],
:env _,
:o-tag long,
:class clojure.lang.Numbers,
:top-level true,
:tag long,
:validated? true}
{:op :const,
:form 1,
:val 1,
:env _,
:type :number,
:literal? true,
:o-tag long,
:tag long}])
(tests
(e/emit-form (clj/analyze '(inc 1)))
:= '(clojure.lang.Numbers/inc 1))
(tests
(clj/analyze '(if true :a :b))
:= {:form '(if true :a :b),
:op :if,
:children [:test :then :else],
:else {:op :const,
:env _,
:type :keyword,
:literal? true,
:val :b,
:form :b,
:o-tag clojure.lang.Keyword,
:tag clojure.lang.Keyword}
:then {:op :const,
:env _,
:type :keyword,
:literal? true,
:val :a,
:form :a,
:o-tag clojure.lang.Keyword,
:tag clojure.lang.Keyword}
:test {:op :const,
:env _,
:type :bool,
:literal? true,
:val true,
:form true,
:o-tag java.lang.Boolean,
:tag java.lang.Boolean}
:env _,
:o-tag clojure.lang.Keyword,
:top-level true,
:tag clojure.lang.Keyword})
(tests
(clj/analyze '(let [a 1]))
:= _)
(tests
(def body '(loop [x 0]
(case (int x)
0 (recur (inc x))
1 42)))
(clj/analyze body)
:= _
(e/emit-form (clj/analyze body))
:= '(loop* [x 0]
(let* [?G__40534 (clojure.lang.RT/intCast x)]
(case* ?G__40534
0 0
(throw (new java.lang.IllegalArgumentException (clojure.core/str "No matching clause: " ?G__40534)))
{0 [0 (recur (clojure.lang.Numbers/inc x))],
1 [1 42]}
:compact
:int
nil))))
(tests
(clj/empty-env)
:= {:context :ctx/expr, :locals {}, :ns 'dustin.analyzer}
)
(defmacro foo [x] ::x)
(apply #'foo '(foo 1) {} [1])
|
2310d5d701f035e22c435630c855f0304c99bbfb28d1bbd0df96f226aeb7aae0 | haskell-tools/haskell-tools | GADTsChecker.hs | {-# LANGUAGE MultiWayIf #-}
module Language.Haskell.Tools.Refactor.Builtin.ExtensionOrganizer.Checkers.GADTsChecker where
import Control.Reference ((^.), (&))
import Control.Monad.Trans.Maybe (MaybeT(..))
import Language.Haskell.Tools.AST
import Language.Haskell.Tools.Refactor
import Language.Haskell.Tools.Refactor.Builtin.ExtensionOrganizer.ExtMonad
| Checks a GADT - style constructor if GADTSyntax is turned on .
Sometimes GADTSyntax is sufficient and GADTs is not even needed .
chkGADTsGadtConDecl :: CheckNode GadtConDecl
chkGADTsGadtConDecl = conditional chkGADTsGadtConDecl' GADTSyntax
| Checks a data constructor declaration if GADTs or ExistentialQuantification is turned on .
This function is responsible for checking ExistentialQuantification as well .
-- (there is no separate checker for that extension)
chkConDeclForExistentials :: CheckNode ConDecl
chkConDeclForExistentials = conditionalAny chkConDeclForExistentials' [GADTs, ExistentialQuantification]
| Checks whether a GADTs - style constructor declaration requires GADTs .
If all data constructors are vanilla Haskell 98 data constructors
-- , then only GADTSyntax is needed. If any constructor's lookup fails
, we add MissingInformation .
chkGADTsGadtConDecl' :: CheckNode GadtConDecl
chkGADTsGadtConDecl' conDecl = do
let conNames = conDecl ^. (gadtConNames & annListElems)
mres <- mapM (runMaybeT . isVanillaDataConNameM) conNames
addEvidence_ GADTSyntax conDecl
if | any isNothing mres ->
addRelationMI (GADTs `lOr` ExistentialQuantification) conDecl
| any (not . fromJust) mres ->
addRelation (GADTs `lOr` ExistentialQuantification) conDecl
| otherwise -> return conDecl
| Extracts the name from a ConDecl , and checks whether it is a vanilla
data constructor . Ifthe lookup fails , adds MissingInformation .
chkConDeclForExistentials' :: CheckNode ConDecl
chkConDeclForExistentials' conDecl =
fromMaybeTM (addRelationMI (GADTs `lOr` ExistentialQuantification) conDecl) $
case conDecl ^. element of
UConDecl _ _ n _ -> chkName n
URecordDecl _ _ n _ -> chkName n
UInfixConDecl _ _ _ op _ -> chkName (op ^. operatorName)
where chkName :: HasNameInfo' n => n -> MaybeT ExtMonad ConDecl
chkName n = do
isVanilla <- isVanillaDataConNameM n
if isVanilla
then return conDecl
else lift . addRelation (GADTs `lOr` ExistentialQuantification) $ conDecl
| null | https://raw.githubusercontent.com/haskell-tools/haskell-tools/b1189ab4f63b29bbf1aa14af4557850064931e32/src/builtin-refactorings/Language/Haskell/Tools/Refactor/Builtin/ExtensionOrganizer/Checkers/GADTsChecker.hs | haskell | # LANGUAGE MultiWayIf #
(there is no separate checker for that extension)
, then only GADTSyntax is needed. If any constructor's lookup fails |
module Language.Haskell.Tools.Refactor.Builtin.ExtensionOrganizer.Checkers.GADTsChecker where
import Control.Reference ((^.), (&))
import Control.Monad.Trans.Maybe (MaybeT(..))
import Language.Haskell.Tools.AST
import Language.Haskell.Tools.Refactor
import Language.Haskell.Tools.Refactor.Builtin.ExtensionOrganizer.ExtMonad
| Checks a GADT - style constructor if GADTSyntax is turned on .
Sometimes GADTSyntax is sufficient and GADTs is not even needed .
chkGADTsGadtConDecl :: CheckNode GadtConDecl
chkGADTsGadtConDecl = conditional chkGADTsGadtConDecl' GADTSyntax
| Checks a data constructor declaration if GADTs or ExistentialQuantification is turned on .
This function is responsible for checking ExistentialQuantification as well .
chkConDeclForExistentials :: CheckNode ConDecl
chkConDeclForExistentials = conditionalAny chkConDeclForExistentials' [GADTs, ExistentialQuantification]
| Checks whether a GADTs - style constructor declaration requires GADTs .
If all data constructors are vanilla Haskell 98 data constructors
, we add MissingInformation .
chkGADTsGadtConDecl' :: CheckNode GadtConDecl
chkGADTsGadtConDecl' conDecl = do
let conNames = conDecl ^. (gadtConNames & annListElems)
mres <- mapM (runMaybeT . isVanillaDataConNameM) conNames
addEvidence_ GADTSyntax conDecl
if | any isNothing mres ->
addRelationMI (GADTs `lOr` ExistentialQuantification) conDecl
| any (not . fromJust) mres ->
addRelation (GADTs `lOr` ExistentialQuantification) conDecl
| otherwise -> return conDecl
| Extracts the name from a ConDecl , and checks whether it is a vanilla
data constructor . Ifthe lookup fails , adds MissingInformation .
chkConDeclForExistentials' :: CheckNode ConDecl
chkConDeclForExistentials' conDecl =
fromMaybeTM (addRelationMI (GADTs `lOr` ExistentialQuantification) conDecl) $
case conDecl ^. element of
UConDecl _ _ n _ -> chkName n
URecordDecl _ _ n _ -> chkName n
UInfixConDecl _ _ _ op _ -> chkName (op ^. operatorName)
where chkName :: HasNameInfo' n => n -> MaybeT ExtMonad ConDecl
chkName n = do
isVanilla <- isVanillaDataConNameM n
if isVanilla
then return conDecl
else lift . addRelation (GADTs `lOr` ExistentialQuantification) $ conDecl
|
61b040e4e41edc94ede0c3b0523a13ca59d066adde528784efa1a691d479b803 | ijp/guildhall | hash-utils.scm | ;;; hash-utils.scm ---
Copyright ( C ) 2009 < >
Author : < >
;; This program is free software, you can redistribute it and/or
modify it under the terms of the new - style BSD license .
You should have received a copy of the BSD license along with this
;; program. If not, see <>.
;;; Commentary:
;;; Code:
#!r6rs
(library (guildhall spells hash-utils)
(export hash-combine
hash-fold)
(import (rnrs base)
(rnrs arithmetic fixnums))
(define hash-bits (- (fixnum-width) 1))
(define hash-mask (fxnot (fxarithmetic-shift -1 hash-bits)))
(define (hash-combine h1 h2)
(fxxor (fxrotate-bit-field (fxand h1 hash-mask) 0 hash-bits 7)
(fxrotate-bit-field (fxand h2 hash-mask) 0 hash-bits (- hash-bits 6))))
(define (hash-fold hasher initial-hash lst)
(let loop ((hash initial-hash) (lst lst))
(if (null? lst)
hash
(loop (hash-combine hash (hasher (car lst)))
(cdr lst)))))
)
| null | https://raw.githubusercontent.com/ijp/guildhall/2fe2cc539f4b811bbcd69e58738db03eb5a2b778/guildhall/spells/hash-utils.scm | scheme | hash-utils.scm ---
This program is free software, you can redistribute it and/or
program. If not, see <>.
Commentary:
Code: |
Copyright ( C ) 2009 < >
Author : < >
modify it under the terms of the new - style BSD license .
You should have received a copy of the BSD license along with this
#!r6rs
(library (guildhall spells hash-utils)
(export hash-combine
hash-fold)
(import (rnrs base)
(rnrs arithmetic fixnums))
(define hash-bits (- (fixnum-width) 1))
(define hash-mask (fxnot (fxarithmetic-shift -1 hash-bits)))
(define (hash-combine h1 h2)
(fxxor (fxrotate-bit-field (fxand h1 hash-mask) 0 hash-bits 7)
(fxrotate-bit-field (fxand h2 hash-mask) 0 hash-bits (- hash-bits 6))))
(define (hash-fold hasher initial-hash lst)
(let loop ((hash initial-hash) (lst lst))
(if (null? lst)
hash
(loop (hash-combine hash (hasher (car lst)))
(cdr lst)))))
)
|
88fa68f92e5b03ad57e88c0d7a0244d3e6cb4eceab11bf6944a91b22d9346d86 | HunterYIboHu/htdp2-solution | ex183-list-to-cons.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-abbr-reader.ss" "lang")((modname ex183-list-to-cons) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
;; only use cons expr
(check-expect (cons "a" (list 0 #false))
(cons "a" (cons 0 (cons #false '()))))
(check-expect (list (cons 1 (cons 13 '())))
(cons (cons 1 (cons 13 '())) '()))
(check-expect (cons (list 1 (list 13 '())) '())
(cons (cons 1
(cons
(cons 13
(cons '()
'()))
'()))
'()))
(check-expect (list '() '() (cons 1 '()))
(cons '()
(cons '()
(cons (cons 1 '())
'()))))
(check-expect (cons "a" (cons (list 1) (list #false '())))
(cons "a" (cons (cons 1 '()) (cons #false (cons '() '())))))
;; only use list expr
(check-expect (cons "a" (list 0 #false))
(list "a" 0 #false))
(check-expect (list (cons 1 (cons 13 '())))
(list (list 1 13)))
(check-expect (cons (list 1 (list 13 '())) '())
(list (list 1 (list 13 '()))))
(check-expect (list '() '() (cons 1 '()))
(list '() '() (list 1)))
(check-expect (cons "a" (cons (list 1) (list #false '())))
(list "a" (list 1) #false '()))
| null | https://raw.githubusercontent.com/HunterYIboHu/htdp2-solution/6182b4c2ef650ac7059f3c143f639d09cd708516/Chapter2/Section12-design-by-composed/ex183-list-to-cons.rkt | racket | about the language level of this file in a form that our tools can easily process.
only use cons expr
only use list expr | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-abbr-reader.ss" "lang")((modname ex183-list-to-cons) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
(check-expect (cons "a" (list 0 #false))
(cons "a" (cons 0 (cons #false '()))))
(check-expect (list (cons 1 (cons 13 '())))
(cons (cons 1 (cons 13 '())) '()))
(check-expect (cons (list 1 (list 13 '())) '())
(cons (cons 1
(cons
(cons 13
(cons '()
'()))
'()))
'()))
(check-expect (list '() '() (cons 1 '()))
(cons '()
(cons '()
(cons (cons 1 '())
'()))))
(check-expect (cons "a" (cons (list 1) (list #false '())))
(cons "a" (cons (cons 1 '()) (cons #false (cons '() '())))))
(check-expect (cons "a" (list 0 #false))
(list "a" 0 #false))
(check-expect (list (cons 1 (cons 13 '())))
(list (list 1 13)))
(check-expect (cons (list 1 (list 13 '())) '())
(list (list 1 (list 13 '()))))
(check-expect (list '() '() (cons 1 '()))
(list '() '() (list 1)))
(check-expect (cons "a" (cons (list 1) (list #false '())))
(list "a" (list 1) #false '()))
|
4781c2651745d4595feddf79f42603f053043ba85081991f56b615bb5bd2daa0 | orbitz/scow | scow_server_state.mli | open Core.Std
open Async.Std
module Make :
functor (Statem : Scow_statem.S) ->
functor (Log : Scow_log.S) ->
functor (Store : Scow_store.S) ->
functor (Transport : Scow_transport.S with type Node.t = Store.node) ->
sig
type msg = Scow_server_msg.Make(Statem)(Log)(Transport).t
type op = Scow_server_msg.Make(Statem)(Log)(Transport).op
type errors =
[ `Invalid_log
| `Invalid_term_store
| `Invalid_vote_store
| `Not_found of Scow_log_index.t
| `Transport_error
]
type 's handler =
msg Gen_server.t ->
's ->
op ->
('s, errors) Deferred.Result.t
type t
module Append_entry : sig
type errors = [ `Not_master | `Append_failed | `Invalid_log ]
type ret = (Statem.ret, errors) Result.t
type t = { log_index : Scow_log_index.t
; ret : ret Ivar.t
}
end
module Init_args : sig
type t_ = { me : Transport.Node.t
; nodes : Transport.Node.t list
; statem : Statem.t
; transport : Transport.t
; log : Log.t
; store : Store.t
; timeout : Time.Span.t
; timeout_rand : Time.Span.t
; notify : Scow_notify.t
; follower : t handler
; candidate : t handler
; leader : t handler
}
type t = t_
end
val create : Init_args.t -> (t, [> `Invalid_vote_store | `Invalid_term_store ]) Deferred.Result.t
val handler : t -> t handler
val current_term : t -> Scow_term.t
val set_current_term : Scow_term.t -> t -> t
val transport : t -> Transport.t
val log : t -> Log.t
val store : t -> Store.t
val statem : t -> Statem.t
val notify : t -> Scow_notify.t
val commit_idx : t -> Scow_log_index.t
val set_commit_idx : Scow_log_index.t -> t -> t
val compute_highest_match_idx : t -> Scow_log_index.t
val last_applied : t -> Scow_log_index.t
val set_last_applied : Scow_log_index.t -> t -> t
val me : t -> Transport.Node.t
val nodes : t -> Transport.Node.t list
val leader : t -> Transport.Node.t option
val set_leader : Transport.Node.t option -> t -> t
val voted_for : t -> Transport.Node.t option
val set_voted_for : Transport.Node.t option -> t -> t
val set_heartbeat_timeout : msg Gen_server.t -> t -> t
val set_election_timeout : msg Gen_server.t -> t -> t
val cancel_election_timeout : t -> t
val cancel_heartbeat_timeout : t -> t
val set_state_follower : t -> t
val set_state_candidate : t -> t
val set_state_leader : t -> t
val record_vote : Transport.Node.t -> t -> t
val count_votes : t -> int
val clear_votes : t -> t
val add_append_entry : Append_entry.t -> t -> t
val remove_append_entry : Scow_log_index.t -> t -> (Append_entry.t option * t)
val remove_all_append_entries : t -> (Append_entry.t list * t)
val next_idx : Transport.Node.t -> t -> Scow_log_index.t option
val set_next_idx : Transport.Node.t -> Scow_log_index.t -> t -> t
val clear_next_idx : t -> t
val match_idx : Transport.Node.t -> t -> Scow_log_index.t option
val set_match_idx : Transport.Node.t -> Scow_log_index.t -> t -> t
val clear_match_idx : t -> t
end
| null | https://raw.githubusercontent.com/orbitz/scow/feb633ef94824f44f0ffc679a548f97288c8ae48/lib/scow/scow_server_state.mli | ocaml | open Core.Std
open Async.Std
module Make :
functor (Statem : Scow_statem.S) ->
functor (Log : Scow_log.S) ->
functor (Store : Scow_store.S) ->
functor (Transport : Scow_transport.S with type Node.t = Store.node) ->
sig
type msg = Scow_server_msg.Make(Statem)(Log)(Transport).t
type op = Scow_server_msg.Make(Statem)(Log)(Transport).op
type errors =
[ `Invalid_log
| `Invalid_term_store
| `Invalid_vote_store
| `Not_found of Scow_log_index.t
| `Transport_error
]
type 's handler =
msg Gen_server.t ->
's ->
op ->
('s, errors) Deferred.Result.t
type t
module Append_entry : sig
type errors = [ `Not_master | `Append_failed | `Invalid_log ]
type ret = (Statem.ret, errors) Result.t
type t = { log_index : Scow_log_index.t
; ret : ret Ivar.t
}
end
module Init_args : sig
type t_ = { me : Transport.Node.t
; nodes : Transport.Node.t list
; statem : Statem.t
; transport : Transport.t
; log : Log.t
; store : Store.t
; timeout : Time.Span.t
; timeout_rand : Time.Span.t
; notify : Scow_notify.t
; follower : t handler
; candidate : t handler
; leader : t handler
}
type t = t_
end
val create : Init_args.t -> (t, [> `Invalid_vote_store | `Invalid_term_store ]) Deferred.Result.t
val handler : t -> t handler
val current_term : t -> Scow_term.t
val set_current_term : Scow_term.t -> t -> t
val transport : t -> Transport.t
val log : t -> Log.t
val store : t -> Store.t
val statem : t -> Statem.t
val notify : t -> Scow_notify.t
val commit_idx : t -> Scow_log_index.t
val set_commit_idx : Scow_log_index.t -> t -> t
val compute_highest_match_idx : t -> Scow_log_index.t
val last_applied : t -> Scow_log_index.t
val set_last_applied : Scow_log_index.t -> t -> t
val me : t -> Transport.Node.t
val nodes : t -> Transport.Node.t list
val leader : t -> Transport.Node.t option
val set_leader : Transport.Node.t option -> t -> t
val voted_for : t -> Transport.Node.t option
val set_voted_for : Transport.Node.t option -> t -> t
val set_heartbeat_timeout : msg Gen_server.t -> t -> t
val set_election_timeout : msg Gen_server.t -> t -> t
val cancel_election_timeout : t -> t
val cancel_heartbeat_timeout : t -> t
val set_state_follower : t -> t
val set_state_candidate : t -> t
val set_state_leader : t -> t
val record_vote : Transport.Node.t -> t -> t
val count_votes : t -> int
val clear_votes : t -> t
val add_append_entry : Append_entry.t -> t -> t
val remove_append_entry : Scow_log_index.t -> t -> (Append_entry.t option * t)
val remove_all_append_entries : t -> (Append_entry.t list * t)
val next_idx : Transport.Node.t -> t -> Scow_log_index.t option
val set_next_idx : Transport.Node.t -> Scow_log_index.t -> t -> t
val clear_next_idx : t -> t
val match_idx : Transport.Node.t -> t -> Scow_log_index.t option
val set_match_idx : Transport.Node.t -> Scow_log_index.t -> t -> t
val clear_match_idx : t -> t
end
| |
371a8d413e740a28dfa570153c805bcc2b9d123cadaba65a515eccd4d294f430 | diagrams/diagrams-contrib | CirclePacking.hs | # LANGUAGE FlexibleContexts #
-----------------------------------------------------------------------------
-- |
-- Module : Diagrams.TwoD.Layout.CirclePacking
Copyright : ( c ) 2012
-- License : BSD-style (see LICENSE)
-- Maintainer :
--
-- A method for laying out diagrams using a circle packing algorithm. For
-- details on the algorithm, see "Optimisation.CirclePacking" in the module
-- circle-packing.
--
-- Here is an example:
--
-- > import Optimisation.CirclePacking
-- > import Diagrams.TwoD.Vector (e)
-- >
-- > colorize = zipWith fc $
> cycle [ red , blue , yellow , magenta , cyan , bisque , firebrick , indigo ]
-- >
-- > objects = colorize $
-- > [ circle r | r <- [0.1,0.2..1.6] ] ++
-- > [ hexagon r | r <- [0.1,0.2..0.7] ] ++
> [ decagon r | r < - [ 0.1,0.2 .. 0.7 ] ]
-- >
-- > -- Just a approximation, diagram objects do not have an exact radius
> radiusApproximation o = maximum [ radius ( e ( alpha @@ turn ) ) o | alpha < - [ 0,0.1 .. 1.0 ] ]
-- >
-- > circlePackingExample =
-- > position $ map (\(o,(x,y)) -> (p2 (x,y),o)) $
> packCircles radiusApproximation objects
--
-- <<diagrams/src_Diagrams_TwoD_Layout_CirclePacking_circlePackingExample.svg#diagram=circlePackingExample&width=400>>
module Diagrams.TwoD.Layout.CirclePacking
( renderCirclePacking
, createCirclePacking
, RadiusFunction
, approxRadius
, circleRadius ) where
import Optimisation.CirclePacking
import Diagrams.Core.Envelope
import Diagrams.Prelude
import Diagrams.TwoD.Vector (e)
-- | Combines the passed objects, whose radius is estimated using the given
-- 'RadiusFunction', so that they do not overlap (according to the radius
-- function) and otherwise form, as far as possible, a tight circle.
renderCirclePacking :: (Monoid' m, Floating (N b), Ord (N b)) => RadiusFunction b m -> [QDiagram b V2 (N b) m] -> QDiagram b V2 (N b) m
renderCirclePacking radiusFunc = createCirclePacking radiusFunc id
toFractional :: (Real a, Fractional b) => a -> b
toFractional = fromRational . toRational
-- | More general version of 'renderCirclePacking'. You can use this if you
-- have more information available in the values of type @a@ that allows you to
-- calculate the radius better (or even exactly).
createCirclePacking :: (Monoid' m, Ord (N b), Floating (N b)) => (a -> Double) -> (a -> QDiagram b V2 (N b) m) -> [a] -> QDiagram b V2 (N b) m
createCirclePacking radiusFunc diagramFunc =
position .
map (\(o,(x,y)) -> (p2 (toFractional x, toFractional y), diagramFunc o)) .
packCircles radiusFunc
-- | The type of radius-estimating functions for Diagrams such as
-- 'approxRadius' and 'circleRadius'. When you can calculate the radius better,
-- but not any more once you converted your data to a diagram, use 'createCirclePacking'.
type RadiusFunction b m = QDiagram b V2 (N b) m -> Double
-- | A safe approximation. Calculates the outer radius of the smallest
-- axis-aligned polygon with the given number of edges that contains the
object . A parameter of 4 up to 8 should be sufficient for most applications .
approxRadius :: (Monoid' m, Floating (N b), Real (N b), Ord (N b)) => Int -> RadiusFunction b m
approxRadius n =
if n < 3
then error "circleRadius: n needs to be at least 3"
else \o -> outByIn * maximum [ toFractional (envelopeS (e alpha) o)
| i <- [1..n]
, let alpha = (fromIntegral i + 0.5) / fromIntegral n @@ turn
]
incircle radius : a / ( 2 * tan ( tau / n ) )
outcircle radius : a / ( 2 * sin ( tau /n ) )
-- hence factor is : out/in = tan (tau/n) / sin (tau/n)
where
outByIn = Prelude.tan (pi / (2 * fromIntegral n)) / sin (pi / (2 * fromIntegral n))
--
-- | An unsafe approximation. This is the radius of the largest circle that
-- fits in the rectangular bounding box of the object, so it may be too small.
-- It is, however, exact for circles, and there is no function that is safe for
-- all diagrams and exact for circles.
circleRadius :: (Monoid' m, Floating (N b), Real (N b)) => RadiusFunction b m
circleRadius o = toFractional $ maximum [ envelopeS (e (alpha @@ turn)) o | alpha <- [0,0.25,0.5,0.75]]
| null | https://raw.githubusercontent.com/diagrams/diagrams-contrib/63f2aec4c070d1f558a5fcefd67b5315d64faaf5/src/Diagrams/TwoD/Layout/CirclePacking.hs | haskell | ---------------------------------------------------------------------------
|
Module : Diagrams.TwoD.Layout.CirclePacking
License : BSD-style (see LICENSE)
Maintainer :
A method for laying out diagrams using a circle packing algorithm. For
details on the algorithm, see "Optimisation.CirclePacking" in the module
circle-packing.
Here is an example:
> import Optimisation.CirclePacking
> import Diagrams.TwoD.Vector (e)
>
> colorize = zipWith fc $
>
> objects = colorize $
> [ circle r | r <- [0.1,0.2..1.6] ] ++
> [ hexagon r | r <- [0.1,0.2..0.7] ] ++
>
> -- Just a approximation, diagram objects do not have an exact radius
>
> circlePackingExample =
> position $ map (\(o,(x,y)) -> (p2 (x,y),o)) $
<<diagrams/src_Diagrams_TwoD_Layout_CirclePacking_circlePackingExample.svg#diagram=circlePackingExample&width=400>>
| Combines the passed objects, whose radius is estimated using the given
'RadiusFunction', so that they do not overlap (according to the radius
function) and otherwise form, as far as possible, a tight circle.
| More general version of 'renderCirclePacking'. You can use this if you
have more information available in the values of type @a@ that allows you to
calculate the radius better (or even exactly).
| The type of radius-estimating functions for Diagrams such as
'approxRadius' and 'circleRadius'. When you can calculate the radius better,
but not any more once you converted your data to a diagram, use 'createCirclePacking'.
| A safe approximation. Calculates the outer radius of the smallest
axis-aligned polygon with the given number of edges that contains the
hence factor is : out/in = tan (tau/n) / sin (tau/n)
| An unsafe approximation. This is the radius of the largest circle that
fits in the rectangular bounding box of the object, so it may be too small.
It is, however, exact for circles, and there is no function that is safe for
all diagrams and exact for circles. | # LANGUAGE FlexibleContexts #
Copyright : ( c ) 2012
> cycle [ red , blue , yellow , magenta , cyan , bisque , firebrick , indigo ]
> [ decagon r | r < - [ 0.1,0.2 .. 0.7 ] ]
> radiusApproximation o = maximum [ radius ( e ( alpha @@ turn ) ) o | alpha < - [ 0,0.1 .. 1.0 ] ]
> packCircles radiusApproximation objects
module Diagrams.TwoD.Layout.CirclePacking
( renderCirclePacking
, createCirclePacking
, RadiusFunction
, approxRadius
, circleRadius ) where
import Optimisation.CirclePacking
import Diagrams.Core.Envelope
import Diagrams.Prelude
import Diagrams.TwoD.Vector (e)
renderCirclePacking :: (Monoid' m, Floating (N b), Ord (N b)) => RadiusFunction b m -> [QDiagram b V2 (N b) m] -> QDiagram b V2 (N b) m
renderCirclePacking radiusFunc = createCirclePacking radiusFunc id
toFractional :: (Real a, Fractional b) => a -> b
toFractional = fromRational . toRational
createCirclePacking :: (Monoid' m, Ord (N b), Floating (N b)) => (a -> Double) -> (a -> QDiagram b V2 (N b) m) -> [a] -> QDiagram b V2 (N b) m
createCirclePacking radiusFunc diagramFunc =
position .
map (\(o,(x,y)) -> (p2 (toFractional x, toFractional y), diagramFunc o)) .
packCircles radiusFunc
type RadiusFunction b m = QDiagram b V2 (N b) m -> Double
object . A parameter of 4 up to 8 should be sufficient for most applications .
approxRadius :: (Monoid' m, Floating (N b), Real (N b), Ord (N b)) => Int -> RadiusFunction b m
approxRadius n =
if n < 3
then error "circleRadius: n needs to be at least 3"
else \o -> outByIn * maximum [ toFractional (envelopeS (e alpha) o)
| i <- [1..n]
, let alpha = (fromIntegral i + 0.5) / fromIntegral n @@ turn
]
incircle radius : a / ( 2 * tan ( tau / n ) )
outcircle radius : a / ( 2 * sin ( tau /n ) )
where
outByIn = Prelude.tan (pi / (2 * fromIntegral n)) / sin (pi / (2 * fromIntegral n))
circleRadius :: (Monoid' m, Floating (N b), Real (N b)) => RadiusFunction b m
circleRadius o = toFractional $ maximum [ envelopeS (e (alpha @@ turn)) o | alpha <- [0,0.25,0.5,0.75]]
|
ac6d84327a8ba4fa53abb0c96c2b1d7ade90bf34f7d950b15b6e7871f7050255 | basho/riak_test | riak_kv_eleveldb_backend_intercepts.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2015 Basho Technologies , Inc.
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%%-------------------------------------------------------------------
-module(riak_kv_eleveldb_backend_intercepts).
-compile(export_all).
-include("intercept.hrl").
-define(M, riak_kv_eleveldb_backend_orig).
corrupting_put(Bucket, Key, IndexSpecs, Val0, ModState) ->
Val =
case random:uniform(20) of
10 ->
corrupt_binary(Val0);
_ -> Val0
end,
?M:put_orig(Bucket, Key, IndexSpecs, Val, ModState).
corrupting_get(Bucket, Key, ModState) ->
case ?M:get_orig(Bucket, Key, ModState) of
{ok, BinVal0, UpdModState} ->
BinVal =
case random:uniform(20) of
10 ->
corrupt_binary(BinVal0);
_ -> BinVal0
end,
{ok, BinVal, UpdModState};
Else -> Else
end.
corrupt_binary(O) ->
crypto:rand_bytes(byte_size(O)).
batch_put(Context, Values, IndexSpecs, State) ->
Tally = riak_core_metadata:get(
{riak_test, backend_intercept},
self(),
[{default, 0}]),
riak_core_metadata:put(
{riak_test, backend_intercept},
self(),
Tally+1),
?M:batch_put_orig(Context, Values, IndexSpecs, State).
| null | https://raw.githubusercontent.com/basho/riak_test/8170137b283061ba94bc85bf42575021e26c929d/intercepts/riak_kv_eleveldb_backend_intercepts.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
------------------------------------------------------------------- | Copyright ( c ) 2015 Basho Technologies , Inc.
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(riak_kv_eleveldb_backend_intercepts).
-compile(export_all).
-include("intercept.hrl").
-define(M, riak_kv_eleveldb_backend_orig).
corrupting_put(Bucket, Key, IndexSpecs, Val0, ModState) ->
Val =
case random:uniform(20) of
10 ->
corrupt_binary(Val0);
_ -> Val0
end,
?M:put_orig(Bucket, Key, IndexSpecs, Val, ModState).
corrupting_get(Bucket, Key, ModState) ->
case ?M:get_orig(Bucket, Key, ModState) of
{ok, BinVal0, UpdModState} ->
BinVal =
case random:uniform(20) of
10 ->
corrupt_binary(BinVal0);
_ -> BinVal0
end,
{ok, BinVal, UpdModState};
Else -> Else
end.
corrupt_binary(O) ->
crypto:rand_bytes(byte_size(O)).
batch_put(Context, Values, IndexSpecs, State) ->
Tally = riak_core_metadata:get(
{riak_test, backend_intercept},
self(),
[{default, 0}]),
riak_core_metadata:put(
{riak_test, backend_intercept},
self(),
Tally+1),
?M:batch_put_orig(Context, Values, IndexSpecs, State).
|
dd4613461cf942a7b08726096e5ba49731d4a8e39c3e816a88fa4a3edc731451 | ocaml-multicore/tezos | node_wrapper.ml | module Lib = struct
type version = string
type t = Hacl of version | Secp256k1 of version
let to_string = function Hacl _ -> "hacl" | Secp256k1 _ -> "secp256k1"
let to_js_lib = function
| Hacl _ -> "hacl-wasm"
| Secp256k1 _ -> "@nomadic-labs/secp256k1wasm"
let version = function Hacl v | Secp256k1 v -> v
let to_load_ident x = Printf.sprintf "load_%s" (to_string x)
let to_js x =
let load_ident = to_load_ident x in
let js_lib = to_js_lib x in
match x with
| Hacl _ ->
Printf.sprintf
{|
function %s() {
/* We have to cheat to avoid the noise from hacl-wasm */
var old_log = console.log;
console.log = function () {};
var loader = require('%s');
console.log = old_log;
return loader.getInitializedHaclModule().then(function(loaded){
console.log('hacl loaded');
global._HACL = loaded})
}
|}
load_ident
js_lib
| Secp256k1 _ ->
Printf.sprintf
{|
function %s() {
var loader = require('%s');
return loader().then(function(loaded) {
console.log('secp256k1 loaded');
global._SECP256K1 = loaded})
}
|}
load_ident
js_lib
end
let files = ref []
let libs = ref []
let args = ref []
let () =
Arg.parse
[
("--hacl", String (fun s -> libs := Lib.Hacl s :: !libs), "Load hacl-wasm");
( "--secp256k1",
String (fun s -> libs := Lib.Secp256k1 s :: !libs),
"Load @nomadic-labs/secp256k1wasm" );
( "--",
Rest_all (fun l -> args := List.rev_append l !args),
"args to pass to the scripts" );
]
(fun a ->
match Filename.extension a with
| ".js" -> files := a :: !files
| _ -> args := a :: !args)
(Printf.sprintf
"%s [FLAGS] FILE.js -- args"
(Filename.basename Sys.executable_name))
let setup () =
let package_json =
{|
{
"private": true,
"type": "commonjs",
"description": "n/a",
"license": "n/a"
}
|}
in
let npmrc =
"@nomadic-labs:registry=/"
in
let write_file name content =
let oc = open_out_bin name in
output_string oc content ;
close_out oc
in
write_file "package.json" package_json ;
write_file ".npmrc" npmrc
let install x =
let cmd =
Printf.sprintf "npm install %s@%s" (Lib.to_js_lib x) (Lib.version x)
in
match Sys.command cmd with
| 0 -> ()
| _ ->
failwith
(Printf.sprintf
"unable to install %s (%s)"
(Lib.to_js_lib x)
(Lib.to_string x))
let run i file args =
let argv = "node" :: file :: args in
let file =
if Filename.is_implicit file then Filename.concat "." file else file
in
let argv = String.concat ", " (List.map (Printf.sprintf "%S") argv) in
Printf.sprintf
{|
function run_%d () {
console.log('Ready to run %s with argv = [ %s ]');
var old_argv = process.argv;
process.argv = [%s];
require('%s');
process.argv = old_argv;
}
|}
i
file
argv
argv
file
let () =
let libs = List.rev !libs in
let files = List.rev !files in
let args = List.rev !args in
let b = Buffer.create 1024 in
Buffer.add_string
b
{|process.on('uncaughtException', function (error) {
console.log(error.stack);
});|} ;
List.iter (fun lib -> Buffer.add_string b (Lib.to_js lib)) libs ;
List.iteri (fun i file -> Buffer.add_string b (run i file args)) files ;
let promises = List.map Lib.to_load_ident libs in
Buffer.add_string b "Promise.resolve('Loading')" ;
List.iter
(fun p -> Buffer.add_string b (Printf.sprintf ".then(%s)" p))
promises ;
List.iteri
(fun i _file -> Buffer.add_string b (Printf.sprintf ".then(run_%d)" i))
files ;
Buffer.add_string
b
".catch(function (e) { console.log(e); process.exit(1) })\n" ;
print_newline () ;
(match libs with
| [] -> ()
| _ :: _ ->
setup () ;
List.iter install libs) ;
let oc = Unix.open_process_out "node" in
Buffer.output_buffer oc b ;
flush_all () ;
match Unix.close_process_out oc with
| WEXITED x -> exit x
| WSIGNALED _ -> exit 1
| WSTOPPED _ -> exit 1
| null | https://raw.githubusercontent.com/ocaml-multicore/tezos/e4fd21a1cb02d194b3162ab42d512b7c985ee8a9/src/tooling/node_wrapper.ml | ocaml | module Lib = struct
type version = string
type t = Hacl of version | Secp256k1 of version
let to_string = function Hacl _ -> "hacl" | Secp256k1 _ -> "secp256k1"
let to_js_lib = function
| Hacl _ -> "hacl-wasm"
| Secp256k1 _ -> "@nomadic-labs/secp256k1wasm"
let version = function Hacl v | Secp256k1 v -> v
let to_load_ident x = Printf.sprintf "load_%s" (to_string x)
let to_js x =
let load_ident = to_load_ident x in
let js_lib = to_js_lib x in
match x with
| Hacl _ ->
Printf.sprintf
{|
function %s() {
/* We have to cheat to avoid the noise from hacl-wasm */
var old_log = console.log;
console.log = function () {};
var loader = require('%s');
console.log = old_log;
return loader.getInitializedHaclModule().then(function(loaded){
console.log('hacl loaded');
global._HACL = loaded})
}
|}
load_ident
js_lib
| Secp256k1 _ ->
Printf.sprintf
{|
function %s() {
var loader = require('%s');
return loader().then(function(loaded) {
console.log('secp256k1 loaded');
global._SECP256K1 = loaded})
}
|}
load_ident
js_lib
end
let files = ref []
let libs = ref []
let args = ref []
let () =
Arg.parse
[
("--hacl", String (fun s -> libs := Lib.Hacl s :: !libs), "Load hacl-wasm");
( "--secp256k1",
String (fun s -> libs := Lib.Secp256k1 s :: !libs),
"Load @nomadic-labs/secp256k1wasm" );
( "--",
Rest_all (fun l -> args := List.rev_append l !args),
"args to pass to the scripts" );
]
(fun a ->
match Filename.extension a with
| ".js" -> files := a :: !files
| _ -> args := a :: !args)
(Printf.sprintf
"%s [FLAGS] FILE.js -- args"
(Filename.basename Sys.executable_name))
let setup () =
let package_json =
{|
{
"private": true,
"type": "commonjs",
"description": "n/a",
"license": "n/a"
}
|}
in
let npmrc =
"@nomadic-labs:registry=/"
in
let write_file name content =
let oc = open_out_bin name in
output_string oc content ;
close_out oc
in
write_file "package.json" package_json ;
write_file ".npmrc" npmrc
let install x =
let cmd =
Printf.sprintf "npm install %s@%s" (Lib.to_js_lib x) (Lib.version x)
in
match Sys.command cmd with
| 0 -> ()
| _ ->
failwith
(Printf.sprintf
"unable to install %s (%s)"
(Lib.to_js_lib x)
(Lib.to_string x))
let run i file args =
let argv = "node" :: file :: args in
let file =
if Filename.is_implicit file then Filename.concat "." file else file
in
let argv = String.concat ", " (List.map (Printf.sprintf "%S") argv) in
Printf.sprintf
{|
function run_%d () {
console.log('Ready to run %s with argv = [ %s ]');
var old_argv = process.argv;
process.argv = [%s];
require('%s');
process.argv = old_argv;
}
|}
i
file
argv
argv
file
let () =
let libs = List.rev !libs in
let files = List.rev !files in
let args = List.rev !args in
let b = Buffer.create 1024 in
Buffer.add_string
b
{|process.on('uncaughtException', function (error) {
console.log(error.stack);
});|} ;
List.iter (fun lib -> Buffer.add_string b (Lib.to_js lib)) libs ;
List.iteri (fun i file -> Buffer.add_string b (run i file args)) files ;
let promises = List.map Lib.to_load_ident libs in
Buffer.add_string b "Promise.resolve('Loading')" ;
List.iter
(fun p -> Buffer.add_string b (Printf.sprintf ".then(%s)" p))
promises ;
List.iteri
(fun i _file -> Buffer.add_string b (Printf.sprintf ".then(run_%d)" i))
files ;
Buffer.add_string
b
".catch(function (e) { console.log(e); process.exit(1) })\n" ;
print_newline () ;
(match libs with
| [] -> ()
| _ :: _ ->
setup () ;
List.iter install libs) ;
let oc = Unix.open_process_out "node" in
Buffer.output_buffer oc b ;
flush_all () ;
match Unix.close_process_out oc with
| WEXITED x -> exit x
| WSIGNALED _ -> exit 1
| WSTOPPED _ -> exit 1
| |
04ba4b3f371728cbe245fa11dfd8058a526b472b29e03e6e13946fc0e740d67c | GlideAngle/flare-timing | Rational.hs | module Internal.Ellipsoid.PointToPoint.Rational
( distance
, inverse
, azimuthFwd
, azimuthRev
) where
import Data.UnitsOfMeasure (u)
import Data.UnitsOfMeasure.Internal (Quantity(..))
import Flight.LatLng (LatLng(..), AzimuthFwd, AzimuthRev)
import Flight.LatLng.Rational (Epsilon(..))
import Flight.Distance (QTaskDistance, SpanLatLng)
import Flight.Earth.Ellipsoid
( Ellipsoid(..)
, GeodeticInverse(..), GeodeticAccuracy(..)
)
import qualified Flight.Earth.Ellipsoid as E (Andoyer(..))
import Flight.Geodesy (EarthMath(..), InverseProblem(..), InverseSolution(..))
import qualified Internal.Ellipsoid.PointToPoint.Andoyer.Rational as A
import qualified Internal.Ellipsoid.PointToPoint.Vincenty.Rational as V
distance
:: (Real a, Fractional a, Show a)
=> EarthMath
-> Ellipsoid a
-> Epsilon
-> SpanLatLng a
distance Pythagorus = error "Pythagorus on the Ellipsoid"
distance Haversines = error "Haversines on the Ellipsoid"
distance Vincenty = V.distance
distance AndoyerLambert = A.distance E.AndoyerLambert
distance ForsytheAndoyerLambert = A.distance E.ForsytheAndoyerLambert
distance FsAndoyer = A.distance E.FsAndoyer
inverse
:: EarthMath
-> Ellipsoid Rational
-> Epsilon
-> GeodeticAccuracy Rational
-> InverseProblem (LatLng Rational [u| rad |])
-> GeodeticInverse
(InverseSolution
(QTaskDistance Rational [u| m |])
(Quantity Rational [u| rad |])
)
inverse Pythagorus = error "Pythagorus on the Ellipsoid"
inverse Haversines = error "Haversines on the Ellipsoid"
inverse Vincenty = V.inverse
inverse AndoyerLambert = A.inverse E.AndoyerLambert
inverse ForsytheAndoyerLambert = A.inverse E.ForsytheAndoyerLambert
inverse FsAndoyer = A.inverse E.FsAndoyer
azimuthFwd
:: (Real a, Fractional a, Show a)
=> EarthMath
-> Ellipsoid a
-> Epsilon
-> AzimuthFwd a
azimuthFwd Pythagorus = error "Pythagorus on the Ellipsoid"
azimuthFwd Haversines = error "Haversines on the Ellipsoid"
azimuthFwd Vincenty = V.azimuthFwd
azimuthFwd AndoyerLambert = A.azimuthFwd E.AndoyerLambert
azimuthFwd ForsytheAndoyerLambert = A.azimuthFwd E.ForsytheAndoyerLambert
azimuthFwd FsAndoyer = A.azimuthFwd E.FsAndoyer
azimuthRev
:: (Real a, Fractional a, Show a)
=> EarthMath
-> Ellipsoid a
-> Epsilon
-> AzimuthRev a
azimuthRev Pythagorus = error "Pythagorus on the Ellipsoid"
azimuthRev Haversines = error "Haversines on the Ellipsoid"
azimuthRev Vincenty = V.azimuthRev
azimuthRev AndoyerLambert = A.azimuthRev E.AndoyerLambert
azimuthRev ForsytheAndoyerLambert = A.azimuthRev E.ForsytheAndoyerLambert
azimuthRev FsAndoyer = A.azimuthRev E.FsAndoyer
| null | https://raw.githubusercontent.com/GlideAngle/flare-timing/27bd34c1943496987382091441a1c2516c169263/lang-haskell/earth/library/Internal/Ellipsoid/PointToPoint/Rational.hs | haskell | module Internal.Ellipsoid.PointToPoint.Rational
( distance
, inverse
, azimuthFwd
, azimuthRev
) where
import Data.UnitsOfMeasure (u)
import Data.UnitsOfMeasure.Internal (Quantity(..))
import Flight.LatLng (LatLng(..), AzimuthFwd, AzimuthRev)
import Flight.LatLng.Rational (Epsilon(..))
import Flight.Distance (QTaskDistance, SpanLatLng)
import Flight.Earth.Ellipsoid
( Ellipsoid(..)
, GeodeticInverse(..), GeodeticAccuracy(..)
)
import qualified Flight.Earth.Ellipsoid as E (Andoyer(..))
import Flight.Geodesy (EarthMath(..), InverseProblem(..), InverseSolution(..))
import qualified Internal.Ellipsoid.PointToPoint.Andoyer.Rational as A
import qualified Internal.Ellipsoid.PointToPoint.Vincenty.Rational as V
distance
:: (Real a, Fractional a, Show a)
=> EarthMath
-> Ellipsoid a
-> Epsilon
-> SpanLatLng a
distance Pythagorus = error "Pythagorus on the Ellipsoid"
distance Haversines = error "Haversines on the Ellipsoid"
distance Vincenty = V.distance
distance AndoyerLambert = A.distance E.AndoyerLambert
distance ForsytheAndoyerLambert = A.distance E.ForsytheAndoyerLambert
distance FsAndoyer = A.distance E.FsAndoyer
inverse
:: EarthMath
-> Ellipsoid Rational
-> Epsilon
-> GeodeticAccuracy Rational
-> InverseProblem (LatLng Rational [u| rad |])
-> GeodeticInverse
(InverseSolution
(QTaskDistance Rational [u| m |])
(Quantity Rational [u| rad |])
)
inverse Pythagorus = error "Pythagorus on the Ellipsoid"
inverse Haversines = error "Haversines on the Ellipsoid"
inverse Vincenty = V.inverse
inverse AndoyerLambert = A.inverse E.AndoyerLambert
inverse ForsytheAndoyerLambert = A.inverse E.ForsytheAndoyerLambert
inverse FsAndoyer = A.inverse E.FsAndoyer
azimuthFwd
:: (Real a, Fractional a, Show a)
=> EarthMath
-> Ellipsoid a
-> Epsilon
-> AzimuthFwd a
azimuthFwd Pythagorus = error "Pythagorus on the Ellipsoid"
azimuthFwd Haversines = error "Haversines on the Ellipsoid"
azimuthFwd Vincenty = V.azimuthFwd
azimuthFwd AndoyerLambert = A.azimuthFwd E.AndoyerLambert
azimuthFwd ForsytheAndoyerLambert = A.azimuthFwd E.ForsytheAndoyerLambert
azimuthFwd FsAndoyer = A.azimuthFwd E.FsAndoyer
azimuthRev
:: (Real a, Fractional a, Show a)
=> EarthMath
-> Ellipsoid a
-> Epsilon
-> AzimuthRev a
azimuthRev Pythagorus = error "Pythagorus on the Ellipsoid"
azimuthRev Haversines = error "Haversines on the Ellipsoid"
azimuthRev Vincenty = V.azimuthRev
azimuthRev AndoyerLambert = A.azimuthRev E.AndoyerLambert
azimuthRev ForsytheAndoyerLambert = A.azimuthRev E.ForsytheAndoyerLambert
azimuthRev FsAndoyer = A.azimuthRev E.FsAndoyer
| |
45954892bb9bd3adcb25dd38503ebeb4d02c74d4fcee12fe870726180bd73587 | aelve/guide | Routes.hs | module Guide.Routes
(
addRoute,
adminRoute,
authRoute,
deleteRoute,
haskellRoute,
feedRoute,
moveRoute,
renderRoute,
setRoute,
) where
import Web.Routing.Combinators (PathState (Open))
import Web.Spock (Path, (<//>))
haskellRoute :: Path '[] 'Open
haskellRoute = "haskell"
authRoute :: Path '[] 'Open
authRoute = "auth"
setRoute :: Path '[] 'Open
setRoute = haskellRoute <//> "set"
addRoute :: Path '[] 'Open
addRoute = haskellRoute <//> "add"
moveRoute :: Path '[] 'Open
moveRoute = haskellRoute <//> "move"
deleteRoute :: Path '[] 'Open
deleteRoute = haskellRoute <//> "delete"
feedRoute :: Path '[] 'Open
feedRoute = haskellRoute <//> "feed"
renderRoute :: Path '[] 'Open
renderRoute = haskellRoute <//> "render"
adminRoute :: Path '[] 'Open
adminRoute = "admin"
| null | https://raw.githubusercontent.com/aelve/guide/96a338d61976344d2405a16b11567e5464820a9e/back/src/Guide/Routes.hs | haskell | module Guide.Routes
(
addRoute,
adminRoute,
authRoute,
deleteRoute,
haskellRoute,
feedRoute,
moveRoute,
renderRoute,
setRoute,
) where
import Web.Routing.Combinators (PathState (Open))
import Web.Spock (Path, (<//>))
haskellRoute :: Path '[] 'Open
haskellRoute = "haskell"
authRoute :: Path '[] 'Open
authRoute = "auth"
setRoute :: Path '[] 'Open
setRoute = haskellRoute <//> "set"
addRoute :: Path '[] 'Open
addRoute = haskellRoute <//> "add"
moveRoute :: Path '[] 'Open
moveRoute = haskellRoute <//> "move"
deleteRoute :: Path '[] 'Open
deleteRoute = haskellRoute <//> "delete"
feedRoute :: Path '[] 'Open
feedRoute = haskellRoute <//> "feed"
renderRoute :: Path '[] 'Open
renderRoute = haskellRoute <//> "render"
adminRoute :: Path '[] 'Open
adminRoute = "admin"
| |
4e5a6396924c84f8bbcf3139451bec201584858159f64b51e01bb69c8e4a05af | seagreen/unison-code-explorer | Search.hs | module UCE.UI.Search where
import Concur.Core (Widget)
import Concur.Replica (HTML)
import qualified Concur.Replica.DOM as H
import qualified Concur.Replica.DOM.Events as P
import qualified Concur.Replica.DOM.Props as P
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import qualified Data.Text as Text
import UCE.Code
import UCE.Prelude
import UCE.UI.Declaration
import Unison.Name (Name)
import qualified Unison.Name as Name
import qualified Unison.Util.Relation as Relation
newtype OpenNames = OpenNames {unOpenNames :: Set Name}
deriving newtype (Semigroup, Monoid)
search :: CodeInfo -> Text -> OpenNames -> Widget HTML Reference
search codeinfo searchStr openNames = do
res <-
H.div
[]
[ One2 <$> searchBox,
Two2 <$> results
]
case res of
One2 t ->
search codeinfo t openNames
Two2 (Left newOpenNames) ->
search codeinfo searchStr newOpenNames
Two2 (Right ref) ->
pure ref
where
searchBox :: Widget HTML Text
searchBox = do
e <-
H.div
[]
[ H.input
[ P.className "input search-box",
P.autofocus True,
P.placeholder "Search string",
P.value searchStr,
P.onInput,
P.type_ "text"
]
]
pure (P.targetValue (P.target e))
results :: Widget HTML (Either OpenNames Reference)
results =
H.ul
[]
( codeinfo
& codeDeclarationNames
& Relation.range
& Map.filterWithKey (\n _ -> Text.isInfixOf strLower (Text.toLower (Name.toText n)))
& Map.toAscList
& map viewResult
)
where
strLower :: Text
strLower =
Text.toLower searchStr
viewResult :: (Name, Set Reference) -> Widget HTML (Either OpenNames Reference)
viewResult (name, refs) = do
H.li
[P.className "search-result"]
[ Left (OpenNames (setSwap name (unOpenNames openNames)))
<$ H.button
[P.onClick, P.className "button"]
[ H.text (btn <> " " <> Name.toText name)
],
Right <$> body
]
where
isOpen :: Bool
isOpen =
Set.member name (unOpenNames openNames)
btn :: Text
btn
| isOpen = "-"
| otherwise = "+"
body :: Widget HTML Reference
body
| not isOpen = H.div [] []
| otherwise = viewBody codeinfo refs
| null | https://raw.githubusercontent.com/seagreen/unison-code-explorer/9183f7ed4ee5d72a464ed5e3a041cd4733ccd353/src/UCE/UI/Search.hs | haskell | module UCE.UI.Search where
import Concur.Core (Widget)
import Concur.Replica (HTML)
import qualified Concur.Replica.DOM as H
import qualified Concur.Replica.DOM.Events as P
import qualified Concur.Replica.DOM.Props as P
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import qualified Data.Text as Text
import UCE.Code
import UCE.Prelude
import UCE.UI.Declaration
import Unison.Name (Name)
import qualified Unison.Name as Name
import qualified Unison.Util.Relation as Relation
newtype OpenNames = OpenNames {unOpenNames :: Set Name}
deriving newtype (Semigroup, Monoid)
search :: CodeInfo -> Text -> OpenNames -> Widget HTML Reference
search codeinfo searchStr openNames = do
res <-
H.div
[]
[ One2 <$> searchBox,
Two2 <$> results
]
case res of
One2 t ->
search codeinfo t openNames
Two2 (Left newOpenNames) ->
search codeinfo searchStr newOpenNames
Two2 (Right ref) ->
pure ref
where
searchBox :: Widget HTML Text
searchBox = do
e <-
H.div
[]
[ H.input
[ P.className "input search-box",
P.autofocus True,
P.placeholder "Search string",
P.value searchStr,
P.onInput,
P.type_ "text"
]
]
pure (P.targetValue (P.target e))
results :: Widget HTML (Either OpenNames Reference)
results =
H.ul
[]
( codeinfo
& codeDeclarationNames
& Relation.range
& Map.filterWithKey (\n _ -> Text.isInfixOf strLower (Text.toLower (Name.toText n)))
& Map.toAscList
& map viewResult
)
where
strLower :: Text
strLower =
Text.toLower searchStr
viewResult :: (Name, Set Reference) -> Widget HTML (Either OpenNames Reference)
viewResult (name, refs) = do
H.li
[P.className "search-result"]
[ Left (OpenNames (setSwap name (unOpenNames openNames)))
<$ H.button
[P.onClick, P.className "button"]
[ H.text (btn <> " " <> Name.toText name)
],
Right <$> body
]
where
isOpen :: Bool
isOpen =
Set.member name (unOpenNames openNames)
btn :: Text
btn
| isOpen = "-"
| otherwise = "+"
body :: Widget HTML Reference
body
| not isOpen = H.div [] []
| otherwise = viewBody codeinfo refs
| |
efdc684c7b4af5bb0c3fd733b89221fdf792b0a03578b09893dfbaae702fe1ed | Kakadu/fp2022 | demoParse.ml | * Copyright 2021 - 2022 ,
* SPDX - License - Identifier : CC0 - 1.0
open Bash_lib
let () =
let input = Stdio.In_channel.input_all Caml.stdin in
match Parser.parse input with
| Result.Ok ast -> Format.printf "%a\n%!" Ast.pp_script ast
| Error m -> Format.printf "Parse error %a\n%!" Parser.pp_error m
;;
| null | https://raw.githubusercontent.com/Kakadu/fp2022/6013a5ef227f41324295009a5a4d9d64f716723e/Bash/demos/demoParse.ml | ocaml | * Copyright 2021 - 2022 ,
* SPDX - License - Identifier : CC0 - 1.0
open Bash_lib
let () =
let input = Stdio.In_channel.input_all Caml.stdin in
match Parser.parse input with
| Result.Ok ast -> Format.printf "%a\n%!" Ast.pp_script ast
| Error m -> Format.printf "Parse error %a\n%!" Parser.pp_error m
;;
| |
60ed48cb36a1543a3c0f2465f998beafd2ba5e5a78fd09b838f1f4f6bb580f1e | roterski/syncrate-fulcro | comment_form.cljs | (ns app.comments.ui.comment-form
(:require
[app.comments.validations]
[app.ui.components :refer [field]]
[app.auth.ui.session :refer [Session]]
[goog.object :as gobj]
[com.fulcrologic.fulcro.dom :as dom :refer [div ul li p h1 h3 button]]
[com.fulcrologic.fulcro.dom.events :as evt]
[com.fulcrologic.fulcro.components :as prim :refer [defsc]]
[com.fulcrologic.fulcro.routing.dynamic-routing :as dr]
[com.fulcrologic.fulcro.components :as comp]
[com.fulcrologic.fulcro.mutations :as m :refer [defmutation]]
[taoensso.timbre :as log]
[com.fulcrologic.fulcro-css.css :as css]
[com.fulcrologic.fulcro.algorithms.form-state :as fs]
[com.fulcrologic.fulcro.algorithms.tempid :refer [tempid]]
[com.fulcrologic.fulcro.mutations :as m :refer [defmutation]]))
(defn add-comment*
[state-map {:comment/keys [id post parent] :as props}]
(let [comment-ident [:comment/id id]
parent-ident (if (nil? parent)
[:post/id post :post/new-comment]
[:comment/id parent :comment/new-comment])]
(-> state-map
(assoc-in parent-ident comment-ident)
(assoc-in comment-ident props))))
(declare CommentForm)
(defmutation add-comment-form
[props]
(action [{:keys [state]}]
(let [comment-id (tempid)]
(swap! state (fn [s]
(-> s
(add-comment* (merge {:comment/id comment-id :comment/body ""} props))
(fs/add-form-config* CommentForm [:comment/id comment-id])))))))
(defn remove-comment*
[state-map {:comment/keys [id post parent] :as props}]
(cond-> state-map
true (update-in [:comment/id] dissoc id)
(nil? parent) (update-in [:post/id post] dissoc :post/new-comment)
(some? parent) (update-in [:comment/id parent] dissoc :comment/new-comment)))
(defn remove-comment-form*
[state-map {:comment/keys [id]}]
(let [form-id {:table :comment/id :row id}]
(-> state-map
(update-in [::fs/forms-by-ident] dissoc form-id))))
(defmutation remove-comment
[props]
(action [{:keys [state]}]
(swap! state (fn [s]
(-> s
(remove-comment* props)
(remove-comment-form* props))))))
(defn move-comment-from-new*
[state-map {:comment/keys [id post parent] :as props}]
(let [comment-ident [:comment/id id]
post-comment? (nil? parent)]
(cond-> state-map
post-comment? (update-in [:post/id post] dissoc :post/new-comment)
post-comment? (update-in [:post/id post :post/comments] (fnil conj []) comment-ident)
(not post-comment?) (update-in [:comment/id parent] dissoc :comment/new-comment)
(not post-comment?) (update-in [:comment/id parent :comment/children] (fnil conj []) comment-ident))))
(defmutation create-comment! [{:comment/keys [tempid] :as props}]
(action [{:keys [state]}]
(log/info "Creating comment..."))
(ok-action [{:keys [state result] :as env}]
(log/info "...comment created successfully")
(let [id (get-in result [:body `create-comment! :tempids tempid])]
(swap! state (fn [s]
(-> s
(move-comment-from-new* (merge props {:comment/id id}))
(remove-comment-form* {:comment/id id}))))))
(error-action [env]
(log/error "...creating comment failed")
(log/error env))
(remote [{:keys [state] :as env}] true))
(defsc CommentForm [this {:comment/keys [id body] :as props} {:keys [post parent]}]
{:query [:comment/id :comment/body fs/form-config-join]
:form-fields #{:comment/body}
:ident :comment/id
:initLocalState (fn [this _]
{:save-ref (fn [r] (gobj/set this "input-ref" r))})
:componentDidMount (fn [this]
(when-let [input-field (gobj/get this "input-ref")]
(.focus input-field)))}
(let [validity (fs/get-spec-validity props :comment/body)
submit! (fn [evt]
(when (or (identical? true evt) (evt/enter-key? evt))
(comp/transact! this `[(fs/mark-complete! {:field :comment/body})])
(when (contains? #{:valid} validity)
(comp/transact! this `[(create-comment! {:comment/tempid ~id :comment/body ~body :comment/post ~post :comment/parent ~parent})]))))
cancel #(comp/transact! this `[(remove-comment {:comment/id ~id :comment/post ~post :comment/parent ~parent})])]
(div
(div :.ui.form {:classes [(when (contains? #{:invalid} (fs/get-spec-validity props)) "error")]}
(field {:label "New Comment"
:value (or body "")
:valid? (contains? #{:valid :unchecked} validity)
:error-message "Cannot be blank"
:ref (comp/get-state this :save-ref)
:onKeyDown submit!
:autoComplete "off"
:onChange #(do
(comp/transact! this `[(fs/mark-complete! {:field :comment/body})])
(m/set-string! this :comment/body :event %))})
(dom/button :.ui.primary.button {:onClick #(submit! true) :disabled (contains? #{:invalid :unchecked} validity)}
"Create")
(dom/button :.ui.secondary.button {:onClick cancel} "Cancel")))))
(def ui-comment-form (comp/computed-factory CommentForm {:keyfn #(-> % :comment/id str)}))
| null | https://raw.githubusercontent.com/roterski/syncrate-fulcro/3fda40b12973e64c7ff976174498ec512b411323/src/main/app/comments/ui/comment_form.cljs | clojure | (ns app.comments.ui.comment-form
(:require
[app.comments.validations]
[app.ui.components :refer [field]]
[app.auth.ui.session :refer [Session]]
[goog.object :as gobj]
[com.fulcrologic.fulcro.dom :as dom :refer [div ul li p h1 h3 button]]
[com.fulcrologic.fulcro.dom.events :as evt]
[com.fulcrologic.fulcro.components :as prim :refer [defsc]]
[com.fulcrologic.fulcro.routing.dynamic-routing :as dr]
[com.fulcrologic.fulcro.components :as comp]
[com.fulcrologic.fulcro.mutations :as m :refer [defmutation]]
[taoensso.timbre :as log]
[com.fulcrologic.fulcro-css.css :as css]
[com.fulcrologic.fulcro.algorithms.form-state :as fs]
[com.fulcrologic.fulcro.algorithms.tempid :refer [tempid]]
[com.fulcrologic.fulcro.mutations :as m :refer [defmutation]]))
(defn add-comment*
[state-map {:comment/keys [id post parent] :as props}]
(let [comment-ident [:comment/id id]
parent-ident (if (nil? parent)
[:post/id post :post/new-comment]
[:comment/id parent :comment/new-comment])]
(-> state-map
(assoc-in parent-ident comment-ident)
(assoc-in comment-ident props))))
(declare CommentForm)
(defmutation add-comment-form
[props]
(action [{:keys [state]}]
(let [comment-id (tempid)]
(swap! state (fn [s]
(-> s
(add-comment* (merge {:comment/id comment-id :comment/body ""} props))
(fs/add-form-config* CommentForm [:comment/id comment-id])))))))
(defn remove-comment*
[state-map {:comment/keys [id post parent] :as props}]
(cond-> state-map
true (update-in [:comment/id] dissoc id)
(nil? parent) (update-in [:post/id post] dissoc :post/new-comment)
(some? parent) (update-in [:comment/id parent] dissoc :comment/new-comment)))
(defn remove-comment-form*
[state-map {:comment/keys [id]}]
(let [form-id {:table :comment/id :row id}]
(-> state-map
(update-in [::fs/forms-by-ident] dissoc form-id))))
(defmutation remove-comment
[props]
(action [{:keys [state]}]
(swap! state (fn [s]
(-> s
(remove-comment* props)
(remove-comment-form* props))))))
(defn move-comment-from-new*
[state-map {:comment/keys [id post parent] :as props}]
(let [comment-ident [:comment/id id]
post-comment? (nil? parent)]
(cond-> state-map
post-comment? (update-in [:post/id post] dissoc :post/new-comment)
post-comment? (update-in [:post/id post :post/comments] (fnil conj []) comment-ident)
(not post-comment?) (update-in [:comment/id parent] dissoc :comment/new-comment)
(not post-comment?) (update-in [:comment/id parent :comment/children] (fnil conj []) comment-ident))))
(defmutation create-comment! [{:comment/keys [tempid] :as props}]
(action [{:keys [state]}]
(log/info "Creating comment..."))
(ok-action [{:keys [state result] :as env}]
(log/info "...comment created successfully")
(let [id (get-in result [:body `create-comment! :tempids tempid])]
(swap! state (fn [s]
(-> s
(move-comment-from-new* (merge props {:comment/id id}))
(remove-comment-form* {:comment/id id}))))))
(error-action [env]
(log/error "...creating comment failed")
(log/error env))
(remote [{:keys [state] :as env}] true))
(defsc CommentForm [this {:comment/keys [id body] :as props} {:keys [post parent]}]
{:query [:comment/id :comment/body fs/form-config-join]
:form-fields #{:comment/body}
:ident :comment/id
:initLocalState (fn [this _]
{:save-ref (fn [r] (gobj/set this "input-ref" r))})
:componentDidMount (fn [this]
(when-let [input-field (gobj/get this "input-ref")]
(.focus input-field)))}
(let [validity (fs/get-spec-validity props :comment/body)
submit! (fn [evt]
(when (or (identical? true evt) (evt/enter-key? evt))
(comp/transact! this `[(fs/mark-complete! {:field :comment/body})])
(when (contains? #{:valid} validity)
(comp/transact! this `[(create-comment! {:comment/tempid ~id :comment/body ~body :comment/post ~post :comment/parent ~parent})]))))
cancel #(comp/transact! this `[(remove-comment {:comment/id ~id :comment/post ~post :comment/parent ~parent})])]
(div
(div :.ui.form {:classes [(when (contains? #{:invalid} (fs/get-spec-validity props)) "error")]}
(field {:label "New Comment"
:value (or body "")
:valid? (contains? #{:valid :unchecked} validity)
:error-message "Cannot be blank"
:ref (comp/get-state this :save-ref)
:onKeyDown submit!
:autoComplete "off"
:onChange #(do
(comp/transact! this `[(fs/mark-complete! {:field :comment/body})])
(m/set-string! this :comment/body :event %))})
(dom/button :.ui.primary.button {:onClick #(submit! true) :disabled (contains? #{:invalid :unchecked} validity)}
"Create")
(dom/button :.ui.secondary.button {:onClick cancel} "Cancel")))))
(def ui-comment-form (comp/computed-factory CommentForm {:keyfn #(-> % :comment/id str)}))
| |
32b87adf36af930ce37db2ad95298c8554be0294cc615414921eb5598475da05 | gadfly361/reagent-figwheel | app_state.cljs | (ns {{ns-name}}.app-state
(:require
[reagent.core :as reagent]))
(defonce main
(reagent/atom
{:app {:debug? ^boolean js/goog.DEBUG}}))
| null | https://raw.githubusercontent.com/gadfly361/reagent-figwheel/4c40657b31a2b358be5697add2e96e8cac6f8535/src/leiningen/new/reagent_figwheel/gadfly/src/app/app_state.cljs | clojure | (ns {{ns-name}}.app-state
(:require
[reagent.core :as reagent]))
(defonce main
(reagent/atom
{:app {:debug? ^boolean js/goog.DEBUG}}))
| |
b02ac0fb678a16d75f42a21bbee0c3e99276dc7fe117653187ff212528a8a5e6 | dcos/dcos-net | dcos_rest_sup.erl | -module(dcos_rest_sup).
-behaviour(supervisor).
-export([start_link/1, init/1]).
start_link(Enabled) ->
supervisor:start_link({local, ?MODULE}, ?MODULE, [Enabled]).
init([false]) ->
{ok, {#{}, []}};
init([true]) ->
setup_cowboy(),
{ok, {#{}, []}}.
setup_cowboy() ->
Dispatch = cowboy_router:compile([
{'_', [
{"/lashup/kv/[...]", dcos_rest_lashup_handler, []},
{"/lashup/key", dcos_rest_key_handler, []},
{"/v1/vips", dcos_rest_vips_handler, []},
{"/v1/nodes", dcos_rest_nodes_handler, []},
{"/v1/version", dcos_rest_dns_handler, [version]},
{"/v1/config", dcos_rest_dns_handler, [config]},
{"/v1/hosts/:host", dcos_rest_dns_handler, [hosts]},
{"/v1/services/:service", dcos_rest_dns_handler, [services]},
{"/v1/enumerate", dcos_rest_dns_handler, [enumerate]},
{"/v1/records", dcos_rest_dns_handler, [records]},
{"/v1/metrics/[:registry]", dcos_rest_metrics_handler, []}
]}
]),
{ok, Ip} = application:get_env(dcos_rest, ip),
{ok, Port} = application:get_env(dcos_rest, port),
{ok, _} = cowboy:start_clear(
http, [{ip, Ip}, {port, Port}], #{
env => #{dispatch => Dispatch}
}).
| null | https://raw.githubusercontent.com/dcos/dcos-net/7bd01ac237ff4b9a12a020ed443e71c45f7063f4/apps/dcos_rest/src/dcos_rest_sup.erl | erlang | -module(dcos_rest_sup).
-behaviour(supervisor).
-export([start_link/1, init/1]).
start_link(Enabled) ->
supervisor:start_link({local, ?MODULE}, ?MODULE, [Enabled]).
init([false]) ->
{ok, {#{}, []}};
init([true]) ->
setup_cowboy(),
{ok, {#{}, []}}.
setup_cowboy() ->
Dispatch = cowboy_router:compile([
{'_', [
{"/lashup/kv/[...]", dcos_rest_lashup_handler, []},
{"/lashup/key", dcos_rest_key_handler, []},
{"/v1/vips", dcos_rest_vips_handler, []},
{"/v1/nodes", dcos_rest_nodes_handler, []},
{"/v1/version", dcos_rest_dns_handler, [version]},
{"/v1/config", dcos_rest_dns_handler, [config]},
{"/v1/hosts/:host", dcos_rest_dns_handler, [hosts]},
{"/v1/services/:service", dcos_rest_dns_handler, [services]},
{"/v1/enumerate", dcos_rest_dns_handler, [enumerate]},
{"/v1/records", dcos_rest_dns_handler, [records]},
{"/v1/metrics/[:registry]", dcos_rest_metrics_handler, []}
]}
]),
{ok, Ip} = application:get_env(dcos_rest, ip),
{ok, Port} = application:get_env(dcos_rest, port),
{ok, _} = cowboy:start_clear(
http, [{ip, Ip}, {port, Port}], #{
env => #{dispatch => Dispatch}
}).
| |
8c7be81df221eff3caadc2097192721afed2f14614bd7f1601a9a348b97902c8 | mcapodici/badlanguage | Parser.hs | {-# LANGUAGE OverloadedStrings #-}
module Parser.Parser (program) where
import Parser.FloatParser
import Text.Parsec.Text
import Text.Parsec.Prim (try)
import Text.ParserCombinators.Parsec.Char
import Text.Parsec.Combinator
import Parser.Data
import Control.Applicative
program :: Parser Expression
program = program' <* eof
program' :: Parser Expression
program' =
(try list) <|> atom
where
list :: Parser Expression
list = do
char '('
op <- opParser
spaces
subs <- sepBy program' spaces
spaces
char ')'
spaces
return $ Sub op subs
atom :: Parser Expression
atom = Terminal <$> valParser
opParser :: Parser Operator
opParser =
choice $ Prelude.map try
[ string "+" >> return Add
, string "*" >> return Multiply
, string "and" >> return And
, string "or" >> return Or
, string "if" >> return If
, string "set" >> return Set
, string "get" >> return Get
, string "do" >> return Do
, string "eq" >> return Eq
, string "neq" >> return Neq
, string "while" >> return While
, string "print" >> return Print
, string "input" >> return Input
]
valParser :: Parser LTerminal
valParser =
choice $ Prelude.map try
[ string "true" >> return (asBool True)
, string "false" >> return (asBool False)
, asFloat <$> float
, asString <$> (char '"' *> many(noneOf "\"") <* char '"')
, LTVar <$> many1 alphaNum
]
where
asBool = LTValue . LVBool
asFloat = LTValue . LVFloat
asString = LTValue . LVString
| null | https://raw.githubusercontent.com/mcapodici/badlanguage/dcf4662907671a9eb00ba5c0be98de9e81c7d8da/src/Parser/Parser.hs | haskell | # LANGUAGE OverloadedStrings # |
module Parser.Parser (program) where
import Parser.FloatParser
import Text.Parsec.Text
import Text.Parsec.Prim (try)
import Text.ParserCombinators.Parsec.Char
import Text.Parsec.Combinator
import Parser.Data
import Control.Applicative
program :: Parser Expression
program = program' <* eof
program' :: Parser Expression
program' =
(try list) <|> atom
where
list :: Parser Expression
list = do
char '('
op <- opParser
spaces
subs <- sepBy program' spaces
spaces
char ')'
spaces
return $ Sub op subs
atom :: Parser Expression
atom = Terminal <$> valParser
opParser :: Parser Operator
opParser =
choice $ Prelude.map try
[ string "+" >> return Add
, string "*" >> return Multiply
, string "and" >> return And
, string "or" >> return Or
, string "if" >> return If
, string "set" >> return Set
, string "get" >> return Get
, string "do" >> return Do
, string "eq" >> return Eq
, string "neq" >> return Neq
, string "while" >> return While
, string "print" >> return Print
, string "input" >> return Input
]
valParser :: Parser LTerminal
valParser =
choice $ Prelude.map try
[ string "true" >> return (asBool True)
, string "false" >> return (asBool False)
, asFloat <$> float
, asString <$> (char '"' *> many(noneOf "\"") <* char '"')
, LTVar <$> many1 alphaNum
]
where
asBool = LTValue . LVBool
asFloat = LTValue . LVFloat
asString = LTValue . LVString
|
ac79bae7626d7e97ef380a8d8f74f34dcf2e6788f0ea94ab5559347365b224dd | gbwey/predicate-typed | Iterator.hs | # LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeApplications #
# LANGUAGE DataKinds #
{-# LANGUAGE GADTs #-}
# LANGUAGE TypeFamilies #
# LANGUAGE PolyKinds #
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ConstraintKinds #-}
# LANGUAGE NoStarIsType #
{-# LANGUAGE EmptyDataDeriving #-}
-- | promoted iterator functions
module Predicate.Data.Iterator (
-- ** functions
Scanl
, ScanN
, ScanNA
, FoldN
, Foldl
, Unfoldr
, IterateUntil
, IterateWhile
, IterateNWhile
, IterateNUntil
, UnfoldN
, Para
, ParaN
, DoN
, Repeat
-- ** type families
, UnfoldrT
) where
import Predicate.Core
import Predicate.Misc
import Predicate.Util
import Predicate.Data.Tuple (type (***))
import Predicate.Data.Ordering (type (>))
import Predicate.Data.Enum (type (...), Pred)
import Predicate.Data.List (Last)
import Predicate.Data.Maybe (MaybeBool)
import GHC.TypeLits (Nat, KnownNat, ErrorMessage((:$$:),(:<>:)))
import qualified GHC.TypeLits as GL
import Data.Kind (Type)
import Control.Lens
import Data.Proxy (Proxy(Proxy))
import Data.Maybe (catMaybes)
import Control.Arrow (Arrow((&&&)))
import Data.Void (Void)
-- $setup
-- >>> import Predicate
-- >>> :set -XDataKinds
-- >>> :set -XTypeApplications
-- >>> :set -XTypeOperators
-- >>> :set -XAllowAmbiguousTypes
-- >>> :set -XOverloadedStrings
-- >>> :set -XFlexibleContexts
-- >>> import Data.Time
-- want to pass Proxy b to q but then we have no way to calculate 'b'
| similar to ' '
--
> > > pz @(Scanl ( Snd : + Fst ) ) ( [ 99],[1 .. 5 ] )
Val [ [ 99],[1,99],[2,1,99],[3,2,1,99],[4,3,2,1,99],[5,4,3,2,1,99 ] ]
--
> > > pl @(Scanl ( Snd : + Fst ) ) ( [ 99 ] , [ ] )
Present [ [ 99 ] ] ( [ [ 99 ] ] | b=[99 ] | as= [ ] )
Val [ [ 99 ] ]
--
data Scanl p q r deriving Show
-- scanr :: (a -> b -> b) -> b -> [a] -> [b]
result is but signature is flipped ( ( a , b ) - > b ) - > b - > [ a ] - > [ b ]
instance ( PP p (b,a) ~ b
, PP q x ~ b
, PP r x ~ [a]
, P p (b,a)
, P q x
, P r x
, Show b
, Show a
)
=> P (Scanl p q r) x where
type PP (Scanl p q r) x = [PP q x]
eval _ opts z = do
let msg0 = "Scanl"
lr <- runPQ NoInline msg0 (Proxy @q) (Proxy @r) opts z []
case lr of
Left e -> pure e
Right (q,r,qq,rr) ->
case chkSize opts msg0 r [hh rr] of
Left e -> pure e
Right _ -> do
let ff i b as' rs
| i >= getMaxRecursionValue opts = pure (rs, Left $ mkNode opts (Fail (msg0 <> ":recursion limit i=" <> show i)) ("(b,as')=" <> showL opts (b,as')) [])
| otherwise =
case as' of
+ + [ ( ( i , q ) , mkNode opts ( q ) ( msg0 < > " ( done ) " ) [ ] ) ] , Right ( ) )
a:as -> do
pp :: TT b <- evalHide @p opts (b,a)
case getValueLR NoInline opts (msg0 <> " i=" <> show i <> " a=" <> showL opts a) pp [] of
Left e -> pure (rs,Left e)
Right b' -> ff (i+1) b' as (rs ++ [((i,b), pp)])
(ts,lrx) :: ([((Int, b), TT b)], Either (TT [b]) ()) <- ff 1 q r []
pure $ case splitAndAlign opts msg0 (((0,q), mkNode opts (Val q) (msg0 <> "(initial)") []) : ts) of
Left e -> errorInProgram $ "Scanl e=" ++ show (hh e)
Right abcs ->
let vals = map (view _1) abcs
itts = map (view _2 &&& view _3) abcs
in case lrx of
Left e -> mkNodeCopy opts e msg0 (hh qq : hh rr : map (hh . prefixNumberToTT) itts)
Right () -> mkNode opts (Val vals) (show3' opts msg0 vals "b=" q <> showVerbose opts " | as=" r) (hh qq : hh rr : map (hh . prefixNumberToTT) itts)
-- | iterates n times keeping all the results
--
> > > pz @(ScanN 4 Succ I d ) ' c '
-- Val "cdefg"
--
> > > pz @(Dup > > 4 ( Pred * * * Succ ) I d ) ' g '
-- Val [('g','g'),('f','h'),('e','i'),('d','j'),('c','k')]
--
> > > pz @(ScanN 4 Succ I d ) 4
Val [ 4,5,6,7,8 ]
--
> > > pz @('(0,1 ) > > 20 ' ( Snd , Fst + Snd ) I d > > Map ) " sdf "
Val [ 0,1,1,2,3,5,8,13,21,34,55,89,144,233,377,610,987,1597,2584,4181,6765 ]
--
> > > pl @(ScanN 2 Succ I d ) 4
Present [ 4,5,6 ] ( [ 4,5,6 ] | b=4 | as=[1,2 ] )
-- Val [4,5,6]
--
> > > pl @(ScanN 5 I d I d ) 4
Present [ 4,4,4,4,4,4 ] ( [ 4,4,4,4,4,4 ] | b=4 | as=[1,2,3,4,5 ] )
Val [ 4,4,4,4,4,4 ]
--
> > > pl @(ScanN 2 Succ I d > > PadR 10 ( MEmptyT Ordering ) I d ) LT
Present [ LT , EQ , GT , EQ , EQ , EQ , EQ , EQ , EQ , EQ ] ( ( > > ) [ LT , EQ , GT , EQ , EQ , EQ , EQ , EQ , EQ , EQ ] | { PadR 10 pad = EQ [ LT , EQ , GT , EQ , EQ , EQ , EQ , EQ , EQ , EQ ] | [ LT , EQ , GT ] } )
Val [ LT , EQ , GT , EQ , EQ , EQ , EQ , EQ , EQ , EQ ]
--
> > > pl @(ScanN 4 Pred I d ) 99
Present [ 99,98,97,96,95 ] ( [ 99,98,97,96,95 ] | b=99 | as=[1,2,3,4 ] )
-- Val [99,98,97,96,95]
--
data ScanN n p q deriving Show
n times using q then run p
instance P (ScanNT n p q) x => P (ScanN n p q) x where
type PP (ScanN n p q) x = PP (ScanNT n p q) x
eval _ = eval (Proxy @(ScanNT n p q))
| tuple version of ' '
--
-- >>> pl @(ScanNA Succ) (4,'a')
Present " abcde " ( " abcde " | b='a ' | as=[1,2,3,4 ] )
-- Val "abcde"
--
> > > pl @(ScanNA Tail ) ( 4,"abcd " )
Present [ " abcd","bcd","cd","d " , " " ] ( [ " abcd","bcd","cd","d " , " " ] | b="abcd " | as=[1,2,3,4 ] )
-- Val ["abcd","bcd","cd","d",""]
--
> > > pl @(Len & & & I d > > ScanNA Tail ) " abcd "
Present [ " abcd","bcd","cd","d " , " " ] ( ( > > ) [ " abcd","bcd","cd","d " , " " ] | { [ " abcd","bcd","cd","d " , " " ] | b="abcd " | as=[1,2,3,4 ] } )
-- Val ["abcd","bcd","cd","d",""]
--
data ScanNA q deriving Show
type ScanNAT q = ScanN Fst q Snd
instance P (ScanNAT q) x => P (ScanNA q) x where
type PP (ScanNA q) x = PP (ScanNAT q) x
eval _ = eval (Proxy @(ScanNAT q))
-- | iterates n times keeping only the last result
--
> > > pz @(FoldN 4 Succ I d ) ' c '
-- Val 'g'
--
> > > pz @(ReadP Day I d > > I d ... FoldN 5 Succ I d ) " 2020 - 07 - 27 "
Val [ 2020 - 07 - 27,2020 - 07 - 28,2020 - 07 - 29,2020 - 07 - 30,2020 - 07 - 31,2020 - 08 - 01 ]
--
-- >>> pl @(FoldN 2 Succ Id) LT
Present GT ( ( > > ) GT | { Last GT | [ LT , EQ , GT ] } )
Val GT
--
-- >>> pl @(FoldN 30 Succ Id) LT
Error Succ IO e = Prelude . . Ordering.succ : bad argument ( )
Fail " Succ IO e = Prelude . . Ordering.succ : bad argument "
--
-- >>> pl @(FoldN 6 Succ Id) 'a'
-- Present 'g' ((>>) 'g' | {Last 'g' | "abcdefg"})
-- Val 'g'
--
-- >>> pl @(FoldN 6 Pred Id) 'a'
-- Present '[' ((>>) '[' | {Last '[' | "a`_^]\\["})
-- Val '['
--
-- >>> pl @(FoldN 0 Succ Id) LT
-- Present LT ((>>) LT | {Last LT | [LT]})
--
-- >>> pl @(FoldN 2 Succ Id >> FoldN 2 Pred Id) LT
Present LT ( ( > > ) LT | { Last LT | [ GT , EQ , LT ] } )
--
> > > pz @(FoldN 4 ( I d < > I d ) I d ) " abc " -- same as above
-- Val "abcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabc"
--
data FoldN n p q deriving Show
type FoldNT n p q = ScanN n p q >> Last
instance P (FoldNT n p q) x => P (FoldN n p q) x where
type PP (FoldN n p q) x = PP (FoldNT n p q) x
eval _ = eval (Proxy @(FoldNT n p q))
-- | Foldl similar to 'foldl'
--
> > > pl @(Foldl ( Fst + Snd ) 0 ( 1 ... 10 ) ) ( )
Present 55 ( ( > > ) 55 | { Last 55 | [ 0,1,3,6,10,15,21,28,36,45,55 ] } )
Val 55
--
> > > pz @(Foldl ( Snd : + Fst ) ' [ 99 ] ( 1 ... 10 ) ) ( )
Val [ 10,9,8,7,6,5,4,3,2,1,99 ]
--
> > > pl @(Foldl ' ( ) ( EnumFromTo 1 9999 ) ) ( )
Error list size exceeded ( max is 100 )
Fail " list size exceeded "
--
> > > pl @(Foldl ( Guard " someval " ( Fst < Snd ) > > Snd ) Head Tail ) [ 1,4,7,9,16 ]
Present 16 ( ( > > ) 16 | { Last 16 | [ 1,4,7,9,16 ] } )
-- Val 16
--
> > > pl @(Foldl ( Guard ( PrintT " % d not less than % d " I d ) ( Fst < Snd ) > > Snd ) Head Tail ) [ 1,4,7,6,16 ]
Error 7 not less than 6 ( )
Fail " 7 not less than 6 "
--
> > > pl @(Foldl ( If ( L11 & & ( Snd > L12 ) ) ' ( ' True , Snd ) ' ( ' False , L12 ) ) ' ( ' True , Head ) Tail ) [ 1,4,7,9,16 ]
-- Present (True,16) ((>>) (True,16) | {Last (True,16) | [(True,1),(True,4),(True,7),(True,9),(True,16)]})
-- Val (True,16)
--
> > > pl @(Foldl ( If ( L11 & & ( Snd > L12 ) ) ' ( ' True , Snd ) ' ( ' False , L12 ) ) ' ( ' True , Head ) Tail ) [ 1,4,7,9,16,2 ]
-- Present (False,16) ((>>) (False,16) | {Last (False,16) | [(True,1),(True,4),(True,7),(True,9),(True,16),(False,16)]})
-- Val (False,16)
--
> > > pl @(Foldl ( Snd : + Fst ) ( MEmptyT [ _ ] ) I d ) [ 1 .. 5 ]
Present [ 5,4,3,2,1 ] ( ( > > ) [ 5,4,3,2,1 ] | { Last [ 5,4,3,2,1 ] | [ [ ] , [ 1],[2,1],[3,2,1],[4,3,2,1],[5,4,3,2,1 ] ] } )
Val [ 5,4,3,2,1 ]
--
> > > pl @('Just Uncons > > ( If L11 ( If ( L12 < Snd ) ' ( ' True , Snd ) ' ( ' False , Snd ) ) ) ' ( ' True , ) Snd ) [ -10,-2,2,3,4,10,9,11 ]
Present ( False,9 ) ( ( > > ) ( False,9 ) | { Last ( False,9 ) | [ ( True,-10),(True,-2),(True,2),(True,3),(True,4),(True,10),(False,9),(False,9 ) ] } )
Val ( False,9 )
--
> > > pl @('Just Uncons > > ( If L11 ( If ( L12 < Snd ) ' ( ' True , Snd ) ' ( ' False , Snd ) ) ) ' ( ' True , ) Snd ) [ -10,2,3,4,10,11 ]
-- Present (True,11) ((>>) (True,11) | {Last (True,11) | [(True,-10),(True,2),(True,3),(True,4),(True,10),(True,11)]})
-- Val (True,11)
--
data Foldl p q r deriving Show
type FoldLT p q r = Scanl p q r >> Last
instance P (FoldLT p q r) x => P (Foldl p q r) x where
type PP (Foldl p q r) x = PP (FoldLT p q r) x
eval _ = eval (Proxy @(FoldLT p q r))
| similar to ' '
--
> > > pz @(Unfoldr ( ( Not Null ) ( SplitAt 2 I d ) ) I d ) [ 1 .. 5 ]
Val [ [ 1,2],[3,4],[5 ] ]
--
> > > pl @(Unfoldr ( If Null ( MkNothing _ ) ( ' ( Take 3 I d , Drop 1 I d ) > > MkJust I d ) ) I d ) " abcdefghi "
Present [ " abc","bcd","cde","def","efg","fgh","ghi","hi","i " ] ( " abcdefghi " [ " abc","bcd","cde","def","efg","fgh","ghi","hi","i " ] | s="abcdefghi " )
-- Val ["abc","bcd","cde","def","efg","fgh","ghi","hi","i"]
--
> > > pl @(Unfoldr ( If Null ( MkNothing _ ) ( Pure _ ( SplitAt 2 I d ) ) ) I d ) [ 1 .. 5 ]
Present [ [ 1,2],[3,4],[5 ] ] ( [ 1,2,3,4,5 ] [ [ 1,2],[3,4],[5 ] ] | s=[1,2,3,4,5 ] )
Val [ [ 1,2],[3,4],[5 ] ]
--
> > > pl @(Unfoldr ( ( Not Null ) ( SplitAt 2 I d ) ) I d ) [ 1 .. 5 ]
Present [ [ 1,2],[3,4],[5 ] ] ( [ 1,2,3,4,5 ] [ [ 1,2],[3,4],[5 ] ] | s=[1,2,3,4,5 ] )
Val [ [ 1,2],[3,4],[5 ] ]
--
> > > pl @(Unfoldr ( If Null ( MkNothing _ ) ( Guard " yy " ( Len < 3 ) > > Pure _ ( SplitAt 2 I d ) ) ) I d ) [ 1 .. 5 ]
Error yy ( [ 1,2,3,4,5 ] )
-- Fail "yy"
--
> > > pl @(Unfoldr ( ( Not Null ) ( Guard " yy " ( Len < 3 ) > > SplitAt 2 I d ) ) I d ) [ 1 .. 5 ]
Error yy ( [ 1,2,3,4,5 ] )
-- Fail "yy"
--
> > > pl @(Unfoldr ( Guard " xx " ( Len > 4 ) > > Uncons ) I d ) [ 1 .. 10 ]
Error xx ( [ 1,2,3,4,5,6,7,8,9,10 ] )
-- Fail "xx"
--
> > > pl @(Unfoldr Uncons I d ) [ 1 .. 10 ]
Present [ 1,2,3,4,5,6,7,8,9,10 ] ( [ 1,2,3,4,5,6,7,8,9,10 ] [ 1,2,3,4,5,6,7,8,9,10 ] | s=[1,2,3,4,5,6,7,8,9,10 ] )
Val [ 1,2,3,4,5,6,7,8,9,10 ]
--
> > > pan @(Unfoldr ( If ( I d < 1 ) ( MkNothing _ ) ( MkJust ( DivMod I d 2 > > Swap ) ) ) I d ) 8
P Unfoldr 8 [ 0,0,0,1 ]
-- |
-- +- P Id 8
-- |
-- +- P i=1: If 'False Just (0,4)
-- |
+ - P i=2 : If ' False Just ( 0,2 )
-- |
+ - P i=3 : If ' False Just ( 0,1 )
-- |
+ - P i=4 : If ' False Just ( 1,0 )
-- |
-- `- P i=5: If 'True Nothing
Val [ 0,0,0,1 ]
--
data Unfoldr p q deriving Show
instance ( PP q a ~ s
, PP p s ~ Maybe (b,s)
, P q a
, P p s
, Show s
, Show b
)
=> P (Unfoldr p q) a where
type PP (Unfoldr p q) a = [UnfoldrT (PP p (PP q a))]
eval _ opts z = do
let msg0 = "Unfoldr"
qq <- eval (Proxy @q) opts z
case getValueLR NoInline opts msg0 qq [] of
Left e -> pure e
Right q -> do
let msg1 = msg0 <> " " <> showL opts q
ff i s rs | i >= getMaxRecursionValue opts = pure (rs, Left $ mkNode opts (Fail (msg1 <> ":recursion limit i=" <> show i)) ("s=" <> showL opts s) [])
| otherwise = do
pp :: TT (PP p s) <- evalHide @p opts s
case getValueLR NoInline opts (msg1 <> " i=" <> show i <> " s=" <> show s) pp [] of
Left e -> pure (rs, Left e)
Right Nothing -> pure (rs ++ [((i,Nothing), pp)], Right ())
Right w@(Just (_b,s')) -> ff (i+1) s' (rs ++ [((i,w), pp)])
(ts,lr) :: ([((Int, PP p s), TT (PP p s))], Either (TT [b]) ()) <- ff 1 q []
pure $ case splitAndAlign opts msg1 ts of
Left e -> errorInProgram $ "Unfoldr e=" ++ show (hh e)
Right abcs ->
let vals = map (view _1) abcs
itts = map (view _2 &&& view _3) abcs
in case lr of
Left e -> mkNodeCopy opts e msg1 (hh qq : map (hh . prefixNumberToTT) itts)
Right () ->
let ret = fst <$> catMaybes vals
in mkNode opts (Val ret) (show3' opts msg1 ret "s=" q) (hh qq : map (hh . prefixNumberToTT) itts)
| calculate the return type for ' '
type family UnfoldrT (mbs :: Type) where
UnfoldrT (Maybe (b, _)) = b
UnfoldrT o = GL.TypeError (
'GL.Text "UnfoldrT: expected 'Maybe (b, _)' "
':$$: 'GL.Text "o = "
':<>: 'GL.ShowType o)
| run times with state @s@
data UnfoldN n p s deriving Show
-- have to rewrite (a,s) to (a,(s,n)) hence the L11 ...
type IterateNT n p s = Unfoldr (MaybeBool (Snd > 0) ((p *** Pred) >> '(L11,'(L12,Snd)))) '(s,n)
instance P (IterateNT n p s) x => P (UnfoldN n p s) x where
type PP (UnfoldN n p s) x = PP (IterateNT n p s) x
eval _ = eval (Proxy @(IterateNT n p s))
| unfolds a value applying @f@ until the condition @p@ is true
--
> > > pl @(IterateUntil ( I d < 90 ) Pred ) 94
Present [ 94,93,92,91,90 ] ( [ 94,93,92,91,90 ] | s=94 )
Val [ 94,93,92,91,90 ]
--
data IterateUntil p f deriving Show
type IterateUntilT p f = IterateWhile (Not p) f
instance P (IterateUntilT p f) x => P (IterateUntil p f) x where
type PP (IterateUntil p f) x = PP (IterateUntilT p f) x
eval _ = eval (Proxy @(IterateUntilT p f))
| unfolds a value applying @f@ while the condition @p@ is true
--
> > > pl @(IterateWhile ( I d > 90 ) Pred ) 94
Present [ 94,93,92,91 ] ( 94 [ 94,93,92,91 ] | s=94 )
Val [ 94,93,92,91 ]
--
data IterateWhile p f deriving Show
type IterateWhileT p f = Unfoldr (MaybeBool p '(Id, f)) Id
instance P (IterateWhileT p f) x => P (IterateWhile p f) x where
type PP (IterateWhile p f) x = PP (IterateWhileT p f) x
eval _ = eval (Proxy @(IterateWhileT p f))
| unfolds a value applying @f@ while the condition @p@ is true or @n@ times
--
> > > pl @(IterateNWhile 10 ( I d > 90 ) Pred ) 95
-- Present [95,94,93,92,91] ((>>) [95,94,93,92,91] | {Map [95,94,93,92,91] | [(10,95),(9,94),(8,93),(7,92),(6,91)]})
-- Val [95,94,93,92,91]
--
> > > pl @(IterateNWhile 3 ( I d > 90 ) Pred ) 95
Present [ 95,94,93 ] ( ( > > ) [ 95,94,93 ] | { Map [ 95,94,93 ] | [ ( 3,95),(2,94),(1,93 ) ] } )
-- Val [95,94,93]
--
data IterateNWhile n p f deriving Show
type IterateNWhileT n p f = '(n, Id) >> IterateWhile (Fst > 0 && (Snd >> p)) (Pred *** f) >> Map Snd
instance P (IterateNWhileT n p f) x => P (IterateNWhile n p f) x where
type PP (IterateNWhile n p f) x = PP (IterateNWhileT n p f) x
eval _ = eval (Proxy @(IterateNWhileT n p f))
| unfolds a value applying @f@ until the condition @p@ is true or @n@ times
--
> > > pl @(IterateNUntil 10 ( I d < = 90 ) Pred ) 95
-- Present [95,94,93,92,91] ((>>) [95,94,93,92,91] | {Map [95,94,93,92,91] | [(10,95),(9,94),(8,93),(7,92),(6,91)]})
-- Val [95,94,93,92,91]
--
> > > pl @(IterateNUntil 3 ( I d < = 90 ) Pred ) 95
Present [ 95,94,93 ] ( ( > > ) [ 95,94,93 ] | { Map [ 95,94,93 ] | [ ( 3,95),(2,94),(1,93 ) ] } )
-- Val [95,94,93]
--
> > > pl @(IterateNUntil 9999 ' False I d ) 1
Error Unfoldr ( 9999,1):recursion limit i=100 ( ( 9999,1 ) )
-- Fail "Unfoldr (9999,1):recursion limit i=100"
--
data IterateNUntil n p f deriving Show
type IterateNUntilT n p f = IterateNWhile n (Not p) f
instance P (IterateNUntilT n p f) x => P (IterateNUntil n p f) x where
type PP (IterateNUntil n p f) x = PP (IterateNUntilT n p f) x
eval _ = eval (Proxy @(IterateNUntilT n p f))
data ParaImpl (n :: Nat) (os :: [k]) deriving Show
-- | runs values in parallel unlike 'Do' which is serial
--
> > > pz @(Para ' [ Id , Id + 1,Id * 4 ] ) [ 10,20,30 ]
-- Val [10,21,120]
--
> > > pz @(Para ' [ Id , Id + 1,Id * 4 ] ) [ 10,20,30,40 ]
Fail " Para : invalid length(4 ) expected 3 "
--
> > > pl @(Para ' [ W ' True , , W ' False , Lt 2 ] ) [ 1,2,-99,-999 ]
Present [ True , False , False , True ] ( Para(0 ) [ True , False , False , True ] | [ 1,2,-99,-999 ] )
-- Val [True,False,False,True]
--
> > > pl @(Para ' [ W ' True , , W ' False , Lt 2 ] ) [ 1,2,-99 ]
Error Para : invalid length(3 ) expected 4
Fail " Para : invalid length(3 ) expected 4 "
--
> > > pl @(Para ' [ W ' True , , W ' False , Lt 2 ] ) [ 1,2,-99,-999,1,1,2 ]
Error Para : invalid length(7 ) expected 4
Fail " Para : invalid length(7 ) expected 4 "
--
data Para (ps :: [k]) deriving Show
passthru but adds the length of ps ( replaces LenT in the type synonym to avoid type synonyms being expanded out
instance ( [a] ~ x
, GetLen ps
, P (ParaImpl (LenT ps) ps) x
) => P (Para ps) x where
type PP (Para ps) x = PP (ParaImpl (LenT ps) ps) x
eval _ opts as' = do
let msg0 = "Para"
n = getLen @ps
case chkSize opts msg0 as' [] of
Left e -> pure e
Right (asLen,as)
| n == asLen -> eval (Proxy @(ParaImpl (LenT ps) ps)) opts as
| otherwise ->
let msg1 = msg0 <> badLength asLen n
in pure $ mkNode opts (Fail msg1) "" []
-- only allow non empty lists -- might need [a] ~ x but it seems fine
instance GL.TypeError ('GL.Text "ParaImpl '[] invalid: requires at least one value in the list")
=> P (ParaImpl n ('[] :: [k])) x where
type PP (ParaImpl n ('[] :: [k])) x = Void
eval _ _ _ = errorInProgram "ParaImpl empty list"
instance ( KnownNat n
, Show a
, Show (PP p a)
, P p a
, x ~ [a]
) => P (ParaImpl n '[p]) x where
type PP (ParaImpl n '[p]) x = [PP p (ExtractAFromTA x)]
eval _ opts as' = do
let msgbase0 = "Para"
msgbase1 = msgbase0 <> "(" <> show (n-1) <> ")"
n = nat @n @Int
case as' of
[a] -> do
pp <- eval (Proxy @p) opts a
pure $ case getValueLR NoInline opts msgbase1 pp [] of
Left e -> e
Right b ->
let ret = [b]
in mkNode opts (Val ret) (msgbase1 <> " " <> showL opts ret <> showVerbose opts " | " a) [hh pp]
_ -> errorInProgram $ "ParaImpl base case should have exactly one element but found " ++ show as'
instance ( KnownNat n
, GetLen ps
, P p a
, P (ParaImpl n (p1 ': ps)) x
, PP (ParaImpl n (p1 ': ps)) x ~ [PP p a]
, Show a
, Show (PP p a)
, x ~ [a]
)
=> P (ParaImpl n (p ': p1 ': ps)) x where
type PP (ParaImpl n (p ': p1 ': ps)) x = [PP p (ExtractAFromTA x)]
eval _ _ [] = errorInProgram "ParaImpl n+1 case has no data left"
eval _ opts (a:as) = do
let cpos = n-pos-1
msgbase0 = "Para(" <> show cpos <> " of " <> show (n-1) <> ")"
msgbase1 = "Para(" <> show cpos <> ")"
n = nat @n
pos = 1 + getLen @ps
pp <- eval (Proxy @p) opts a
case getValueLR NoInline opts msgbase0 pp [] of
Left e -> pure e
Right b -> do
qq <- eval (Proxy @(ParaImpl n (p1 ': ps))) opts as
pure $ case getValueLR Inline opts "" qq [hh pp] of
Left e -> e
Right bs -> mkNode opts (Val (b:bs)) (msgbase1 <> " " <> showL opts (b:bs) <> showVerbose opts " | " (a:as)) [hh pp, hh qq]
-- | leverages 'Para' for repeating expressions (passthrough method)
--
> > > pz @(ParaN 4 Succ ) [ 1 .. 4 ]
Val [ 2,3,4,5 ]
--
> > > pz @(ParaN 4 Succ ) " "
Fail " Para : invalid length(5 ) expected 4 "
--
-- >>> pz @(ParaN 4 Succ) "azwx"
-- Val "b{xy"
--
> > > pl @(ParaN 5 ( Guard " 0 - 255 " ( Between 0 255 I d ) ) ) [ 1,2,3,4,12 ]
Present [ 1,2,3,4,12 ] ( Para(0 ) [ 1,2,3,4,12 ] | [ 1,2,3,4,12 ] )
Val [ 1,2,3,4,12 ]
--
-- >>> pl @(ParaN 5 (Guard "0-255" (Between 0 255 Id))) [1,2,3,400,12]
Error 0 - 255 ( Guard | 400 | Para(3 of 4 ) )
-- Fail "0-255"
--
> > > pz @(ParaN 5 ( Guard ( PrintF " bad value % d " I d ) ( Between 0 255 I d ) ) ) [ 1,2,3,400,12 ]
Fail " bad value 400 "
--
> > > pl @(ParaN 4 ( PrintF " % 03d " I d ) ) [ 141,21,3,0 ]
Present [ " 141","021","003","000 " ] ( Para(0 ) [ " 141","021","003","000 " ] | [ 141,21,3,0 ] )
Val [ " 141","021","003","000 " ]
--
data ParaN (n :: Nat) p deriving Show
instance ( x ~ [a]
, P (ParaImpl (LenT (RepeatT n p)) (RepeatT n p)) x
, GetLen (RepeatT n p)
) => P (ParaN n p) x where
type PP (ParaN n p) x = PP (Para (RepeatT n p)) x
eval _ = eval (Proxy @(Para (RepeatT n p)))
| creates a promoted list of predicates and then evaluates them into a list . see PP instance for ' [ k ]
--
-- >>> pz @(Repeat 4 Succ) 'c'
-- Val "dddd"
--
> > > pz @(Repeat 4 " abc " ) ( )
-- Val ["abc","abc","abc","abc"]
--
> > > pl @(Repeat 4 " xy " ) 3
Present [ " xy","xy","xy","xy " ] ( ' [ " xy","xy","xy","xy " ] ( ' " xy " ) | 3 )
-- Val ["xy","xy","xy","xy"]
--
data Repeat (n :: Nat) p deriving Show
instance P (RepeatT n p) a => P (Repeat n p) a where
type PP (Repeat n p) a = PP (RepeatT n p) a
eval _ = eval (Proxy @(RepeatT n p))
-- | leverages 'Do' for repeating predicates (passthrough method)
same as @DoN n p = = FoldN n p Id@ but more efficient
--
-- >>> pz @(DoN 4 Succ) 'c'
-- Val 'g'
--
> > > pz @(DoN 4 ( I d < > " | " ) ) " abc "
-- Val "abc | | | | "
--
> > > pz @(DoN 4 ( I d < > " | " < > I d ) ) " abc "
-- Val "abc|abc|abc|abc|abc|abc|abc|abc|abc|abc|abc|abc|abc|abc|abc|abc"
--
> > > pl @(DoN 4 ( I d + 4 ) ) 1
Present 17 ( ( > > ) 17 | { 13 + 4 = 17 } )
-- Val 17
--
> > > pl @(DoN 4 ( I d + 7 ) ) 3
Present 31 ( ( > > ) 31 | { 24 + 7 = 31 } )
-- Val 31
--
-- >>> pl @(DoN 4 9) ()
-- Present 9 ((>>) 9 | {'9})
-- Val 9
--
> > > pl @(DoN 4 " xy " ) 3
-- Present "xy" ((>>) "xy" | {'"xy"})
-- Val "xy"
--
data DoN (n :: Nat) p deriving Show
type DoNT (n :: Nat) p = Do (RepeatT n p)
instance P (DoNT n p) a => P (DoN n p) a where
type PP (DoN n p) a = PP (DoNT n p) a
eval _ = eval (Proxy @(DoNT n p))
| null | https://raw.githubusercontent.com/gbwey/predicate-typed/51f8d51f662722e1109d2ff35644aea1e0371b42/src/Predicate/Data/Iterator.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE RankNTypes #
# LANGUAGE OverloadedStrings #
# LANGUAGE ConstraintKinds #
# LANGUAGE EmptyDataDeriving #
| promoted iterator functions
** functions
** type families
$setup
>>> import Predicate
>>> :set -XDataKinds
>>> :set -XTypeApplications
>>> :set -XTypeOperators
>>> :set -XAllowAmbiguousTypes
>>> :set -XOverloadedStrings
>>> :set -XFlexibleContexts
>>> import Data.Time
want to pass Proxy b to q but then we have no way to calculate 'b'
scanr :: (a -> b -> b) -> b -> [a] -> [b]
| iterates n times keeping all the results
Val "cdefg"
Val [('g','g'),('f','h'),('e','i'),('d','j'),('c','k')]
Val [4,5,6]
Val [99,98,97,96,95]
>>> pl @(ScanNA Succ) (4,'a')
Val "abcde"
Val ["abcd","bcd","cd","d",""]
Val ["abcd","bcd","cd","d",""]
| iterates n times keeping only the last result
Val 'g'
>>> pl @(FoldN 2 Succ Id) LT
>>> pl @(FoldN 30 Succ Id) LT
>>> pl @(FoldN 6 Succ Id) 'a'
Present 'g' ((>>) 'g' | {Last 'g' | "abcdefg"})
Val 'g'
>>> pl @(FoldN 6 Pred Id) 'a'
Present '[' ((>>) '[' | {Last '[' | "a`_^]\\["})
Val '['
>>> pl @(FoldN 0 Succ Id) LT
Present LT ((>>) LT | {Last LT | [LT]})
>>> pl @(FoldN 2 Succ Id >> FoldN 2 Pred Id) LT
same as above
Val "abcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabc"
| Foldl similar to 'foldl'
Val 16
Present (True,16) ((>>) (True,16) | {Last (True,16) | [(True,1),(True,4),(True,7),(True,9),(True,16)]})
Val (True,16)
Present (False,16) ((>>) (False,16) | {Last (False,16) | [(True,1),(True,4),(True,7),(True,9),(True,16),(False,16)]})
Val (False,16)
Present (True,11) ((>>) (True,11) | {Last (True,11) | [(True,-10),(True,2),(True,3),(True,4),(True,10),(True,11)]})
Val (True,11)
Val ["abc","bcd","cde","def","efg","fgh","ghi","hi","i"]
Fail "yy"
Fail "yy"
Fail "xx"
|
+- P Id 8
|
+- P i=1: If 'False Just (0,4)
|
|
|
|
`- P i=5: If 'True Nothing
have to rewrite (a,s) to (a,(s,n)) hence the L11 ...
Present [95,94,93,92,91] ((>>) [95,94,93,92,91] | {Map [95,94,93,92,91] | [(10,95),(9,94),(8,93),(7,92),(6,91)]})
Val [95,94,93,92,91]
Val [95,94,93]
Present [95,94,93,92,91] ((>>) [95,94,93,92,91] | {Map [95,94,93,92,91] | [(10,95),(9,94),(8,93),(7,92),(6,91)]})
Val [95,94,93,92,91]
Val [95,94,93]
Fail "Unfoldr (9999,1):recursion limit i=100"
| runs values in parallel unlike 'Do' which is serial
Val [10,21,120]
Val [True,False,False,True]
only allow non empty lists -- might need [a] ~ x but it seems fine
| leverages 'Para' for repeating expressions (passthrough method)
>>> pz @(ParaN 4 Succ) "azwx"
Val "b{xy"
>>> pl @(ParaN 5 (Guard "0-255" (Between 0 255 Id))) [1,2,3,400,12]
Fail "0-255"
>>> pz @(Repeat 4 Succ) 'c'
Val "dddd"
Val ["abc","abc","abc","abc"]
Val ["xy","xy","xy","xy"]
| leverages 'Do' for repeating predicates (passthrough method)
>>> pz @(DoN 4 Succ) 'c'
Val 'g'
Val "abc | | | | "
Val "abc|abc|abc|abc|abc|abc|abc|abc|abc|abc|abc|abc|abc|abc|abc|abc"
Val 17
Val 31
>>> pl @(DoN 4 9) ()
Present 9 ((>>) 9 | {'9})
Val 9
Present "xy" ((>>) "xy" | {'"xy"})
Val "xy"
| # LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeApplications #
# LANGUAGE DataKinds #
# LANGUAGE TypeFamilies #
# LANGUAGE PolyKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE NoStarIsType #
module Predicate.Data.Iterator (
Scanl
, ScanN
, ScanNA
, FoldN
, Foldl
, Unfoldr
, IterateUntil
, IterateWhile
, IterateNWhile
, IterateNUntil
, UnfoldN
, Para
, ParaN
, DoN
, Repeat
, UnfoldrT
) where
import Predicate.Core
import Predicate.Misc
import Predicate.Util
import Predicate.Data.Tuple (type (***))
import Predicate.Data.Ordering (type (>))
import Predicate.Data.Enum (type (...), Pred)
import Predicate.Data.List (Last)
import Predicate.Data.Maybe (MaybeBool)
import GHC.TypeLits (Nat, KnownNat, ErrorMessage((:$$:),(:<>:)))
import qualified GHC.TypeLits as GL
import Data.Kind (Type)
import Control.Lens
import Data.Proxy (Proxy(Proxy))
import Data.Maybe (catMaybes)
import Control.Arrow (Arrow((&&&)))
import Data.Void (Void)
| similar to ' '
> > > pz @(Scanl ( Snd : + Fst ) ) ( [ 99],[1 .. 5 ] )
Val [ [ 99],[1,99],[2,1,99],[3,2,1,99],[4,3,2,1,99],[5,4,3,2,1,99 ] ]
> > > pl @(Scanl ( Snd : + Fst ) ) ( [ 99 ] , [ ] )
Present [ [ 99 ] ] ( [ [ 99 ] ] | b=[99 ] | as= [ ] )
Val [ [ 99 ] ]
data Scanl p q r deriving Show
result is but signature is flipped ( ( a , b ) - > b ) - > b - > [ a ] - > [ b ]
instance ( PP p (b,a) ~ b
, PP q x ~ b
, PP r x ~ [a]
, P p (b,a)
, P q x
, P r x
, Show b
, Show a
)
=> P (Scanl p q r) x where
type PP (Scanl p q r) x = [PP q x]
eval _ opts z = do
let msg0 = "Scanl"
lr <- runPQ NoInline msg0 (Proxy @q) (Proxy @r) opts z []
case lr of
Left e -> pure e
Right (q,r,qq,rr) ->
case chkSize opts msg0 r [hh rr] of
Left e -> pure e
Right _ -> do
let ff i b as' rs
| i >= getMaxRecursionValue opts = pure (rs, Left $ mkNode opts (Fail (msg0 <> ":recursion limit i=" <> show i)) ("(b,as')=" <> showL opts (b,as')) [])
| otherwise =
case as' of
+ + [ ( ( i , q ) , mkNode opts ( q ) ( msg0 < > " ( done ) " ) [ ] ) ] , Right ( ) )
a:as -> do
pp :: TT b <- evalHide @p opts (b,a)
case getValueLR NoInline opts (msg0 <> " i=" <> show i <> " a=" <> showL opts a) pp [] of
Left e -> pure (rs,Left e)
Right b' -> ff (i+1) b' as (rs ++ [((i,b), pp)])
(ts,lrx) :: ([((Int, b), TT b)], Either (TT [b]) ()) <- ff 1 q r []
pure $ case splitAndAlign opts msg0 (((0,q), mkNode opts (Val q) (msg0 <> "(initial)") []) : ts) of
Left e -> errorInProgram $ "Scanl e=" ++ show (hh e)
Right abcs ->
let vals = map (view _1) abcs
itts = map (view _2 &&& view _3) abcs
in case lrx of
Left e -> mkNodeCopy opts e msg0 (hh qq : hh rr : map (hh . prefixNumberToTT) itts)
Right () -> mkNode opts (Val vals) (show3' opts msg0 vals "b=" q <> showVerbose opts " | as=" r) (hh qq : hh rr : map (hh . prefixNumberToTT) itts)
> > > pz @(ScanN 4 Succ I d ) ' c '
> > > pz @(Dup > > 4 ( Pred * * * Succ ) I d ) ' g '
> > > pz @(ScanN 4 Succ I d ) 4
Val [ 4,5,6,7,8 ]
> > > pz @('(0,1 ) > > 20 ' ( Snd , Fst + Snd ) I d > > Map ) " sdf "
Val [ 0,1,1,2,3,5,8,13,21,34,55,89,144,233,377,610,987,1597,2584,4181,6765 ]
> > > pl @(ScanN 2 Succ I d ) 4
Present [ 4,5,6 ] ( [ 4,5,6 ] | b=4 | as=[1,2 ] )
> > > pl @(ScanN 5 I d I d ) 4
Present [ 4,4,4,4,4,4 ] ( [ 4,4,4,4,4,4 ] | b=4 | as=[1,2,3,4,5 ] )
Val [ 4,4,4,4,4,4 ]
> > > pl @(ScanN 2 Succ I d > > PadR 10 ( MEmptyT Ordering ) I d ) LT
Present [ LT , EQ , GT , EQ , EQ , EQ , EQ , EQ , EQ , EQ ] ( ( > > ) [ LT , EQ , GT , EQ , EQ , EQ , EQ , EQ , EQ , EQ ] | { PadR 10 pad = EQ [ LT , EQ , GT , EQ , EQ , EQ , EQ , EQ , EQ , EQ ] | [ LT , EQ , GT ] } )
Val [ LT , EQ , GT , EQ , EQ , EQ , EQ , EQ , EQ , EQ ]
> > > pl @(ScanN 4 Pred I d ) 99
Present [ 99,98,97,96,95 ] ( [ 99,98,97,96,95 ] | b=99 | as=[1,2,3,4 ] )
data ScanN n p q deriving Show
n times using q then run p
instance P (ScanNT n p q) x => P (ScanN n p q) x where
type PP (ScanN n p q) x = PP (ScanNT n p q) x
eval _ = eval (Proxy @(ScanNT n p q))
| tuple version of ' '
Present " abcde " ( " abcde " | b='a ' | as=[1,2,3,4 ] )
> > > pl @(ScanNA Tail ) ( 4,"abcd " )
Present [ " abcd","bcd","cd","d " , " " ] ( [ " abcd","bcd","cd","d " , " " ] | b="abcd " | as=[1,2,3,4 ] )
> > > pl @(Len & & & I d > > ScanNA Tail ) " abcd "
Present [ " abcd","bcd","cd","d " , " " ] ( ( > > ) [ " abcd","bcd","cd","d " , " " ] | { [ " abcd","bcd","cd","d " , " " ] | b="abcd " | as=[1,2,3,4 ] } )
data ScanNA q deriving Show
type ScanNAT q = ScanN Fst q Snd
instance P (ScanNAT q) x => P (ScanNA q) x where
type PP (ScanNA q) x = PP (ScanNAT q) x
eval _ = eval (Proxy @(ScanNAT q))
> > > pz @(FoldN 4 Succ I d ) ' c '
> > > pz @(ReadP Day I d > > I d ... FoldN 5 Succ I d ) " 2020 - 07 - 27 "
Val [ 2020 - 07 - 27,2020 - 07 - 28,2020 - 07 - 29,2020 - 07 - 30,2020 - 07 - 31,2020 - 08 - 01 ]
Present GT ( ( > > ) GT | { Last GT | [ LT , EQ , GT ] } )
Val GT
Error Succ IO e = Prelude . . Ordering.succ : bad argument ( )
Fail " Succ IO e = Prelude . . Ordering.succ : bad argument "
Present LT ( ( > > ) LT | { Last LT | [ GT , EQ , LT ] } )
data FoldN n p q deriving Show
type FoldNT n p q = ScanN n p q >> Last
instance P (FoldNT n p q) x => P (FoldN n p q) x where
type PP (FoldN n p q) x = PP (FoldNT n p q) x
eval _ = eval (Proxy @(FoldNT n p q))
> > > pl @(Foldl ( Fst + Snd ) 0 ( 1 ... 10 ) ) ( )
Present 55 ( ( > > ) 55 | { Last 55 | [ 0,1,3,6,10,15,21,28,36,45,55 ] } )
Val 55
> > > pz @(Foldl ( Snd : + Fst ) ' [ 99 ] ( 1 ... 10 ) ) ( )
Val [ 10,9,8,7,6,5,4,3,2,1,99 ]
> > > pl @(Foldl ' ( ) ( EnumFromTo 1 9999 ) ) ( )
Error list size exceeded ( max is 100 )
Fail " list size exceeded "
> > > pl @(Foldl ( Guard " someval " ( Fst < Snd ) > > Snd ) Head Tail ) [ 1,4,7,9,16 ]
Present 16 ( ( > > ) 16 | { Last 16 | [ 1,4,7,9,16 ] } )
> > > pl @(Foldl ( Guard ( PrintT " % d not less than % d " I d ) ( Fst < Snd ) > > Snd ) Head Tail ) [ 1,4,7,6,16 ]
Error 7 not less than 6 ( )
Fail " 7 not less than 6 "
> > > pl @(Foldl ( If ( L11 & & ( Snd > L12 ) ) ' ( ' True , Snd ) ' ( ' False , L12 ) ) ' ( ' True , Head ) Tail ) [ 1,4,7,9,16 ]
> > > pl @(Foldl ( If ( L11 & & ( Snd > L12 ) ) ' ( ' True , Snd ) ' ( ' False , L12 ) ) ' ( ' True , Head ) Tail ) [ 1,4,7,9,16,2 ]
> > > pl @(Foldl ( Snd : + Fst ) ( MEmptyT [ _ ] ) I d ) [ 1 .. 5 ]
Present [ 5,4,3,2,1 ] ( ( > > ) [ 5,4,3,2,1 ] | { Last [ 5,4,3,2,1 ] | [ [ ] , [ 1],[2,1],[3,2,1],[4,3,2,1],[5,4,3,2,1 ] ] } )
Val [ 5,4,3,2,1 ]
> > > pl @('Just Uncons > > ( If L11 ( If ( L12 < Snd ) ' ( ' True , Snd ) ' ( ' False , Snd ) ) ) ' ( ' True , ) Snd ) [ -10,-2,2,3,4,10,9,11 ]
Present ( False,9 ) ( ( > > ) ( False,9 ) | { Last ( False,9 ) | [ ( True,-10),(True,-2),(True,2),(True,3),(True,4),(True,10),(False,9),(False,9 ) ] } )
Val ( False,9 )
> > > pl @('Just Uncons > > ( If L11 ( If ( L12 < Snd ) ' ( ' True , Snd ) ' ( ' False , Snd ) ) ) ' ( ' True , ) Snd ) [ -10,2,3,4,10,11 ]
data Foldl p q r deriving Show
type FoldLT p q r = Scanl p q r >> Last
instance P (FoldLT p q r) x => P (Foldl p q r) x where
type PP (Foldl p q r) x = PP (FoldLT p q r) x
eval _ = eval (Proxy @(FoldLT p q r))
| similar to ' '
> > > pz @(Unfoldr ( ( Not Null ) ( SplitAt 2 I d ) ) I d ) [ 1 .. 5 ]
Val [ [ 1,2],[3,4],[5 ] ]
> > > pl @(Unfoldr ( If Null ( MkNothing _ ) ( ' ( Take 3 I d , Drop 1 I d ) > > MkJust I d ) ) I d ) " abcdefghi "
Present [ " abc","bcd","cde","def","efg","fgh","ghi","hi","i " ] ( " abcdefghi " [ " abc","bcd","cde","def","efg","fgh","ghi","hi","i " ] | s="abcdefghi " )
> > > pl @(Unfoldr ( If Null ( MkNothing _ ) ( Pure _ ( SplitAt 2 I d ) ) ) I d ) [ 1 .. 5 ]
Present [ [ 1,2],[3,4],[5 ] ] ( [ 1,2,3,4,5 ] [ [ 1,2],[3,4],[5 ] ] | s=[1,2,3,4,5 ] )
Val [ [ 1,2],[3,4],[5 ] ]
> > > pl @(Unfoldr ( ( Not Null ) ( SplitAt 2 I d ) ) I d ) [ 1 .. 5 ]
Present [ [ 1,2],[3,4],[5 ] ] ( [ 1,2,3,4,5 ] [ [ 1,2],[3,4],[5 ] ] | s=[1,2,3,4,5 ] )
Val [ [ 1,2],[3,4],[5 ] ]
> > > pl @(Unfoldr ( If Null ( MkNothing _ ) ( Guard " yy " ( Len < 3 ) > > Pure _ ( SplitAt 2 I d ) ) ) I d ) [ 1 .. 5 ]
Error yy ( [ 1,2,3,4,5 ] )
> > > pl @(Unfoldr ( ( Not Null ) ( Guard " yy " ( Len < 3 ) > > SplitAt 2 I d ) ) I d ) [ 1 .. 5 ]
Error yy ( [ 1,2,3,4,5 ] )
> > > pl @(Unfoldr ( Guard " xx " ( Len > 4 ) > > Uncons ) I d ) [ 1 .. 10 ]
Error xx ( [ 1,2,3,4,5,6,7,8,9,10 ] )
> > > pl @(Unfoldr Uncons I d ) [ 1 .. 10 ]
Present [ 1,2,3,4,5,6,7,8,9,10 ] ( [ 1,2,3,4,5,6,7,8,9,10 ] [ 1,2,3,4,5,6,7,8,9,10 ] | s=[1,2,3,4,5,6,7,8,9,10 ] )
Val [ 1,2,3,4,5,6,7,8,9,10 ]
> > > pan @(Unfoldr ( If ( I d < 1 ) ( MkNothing _ ) ( MkJust ( DivMod I d 2 > > Swap ) ) ) I d ) 8
P Unfoldr 8 [ 0,0,0,1 ]
+ - P i=2 : If ' False Just ( 0,2 )
+ - P i=3 : If ' False Just ( 0,1 )
+ - P i=4 : If ' False Just ( 1,0 )
Val [ 0,0,0,1 ]
data Unfoldr p q deriving Show
instance ( PP q a ~ s
, PP p s ~ Maybe (b,s)
, P q a
, P p s
, Show s
, Show b
)
=> P (Unfoldr p q) a where
type PP (Unfoldr p q) a = [UnfoldrT (PP p (PP q a))]
eval _ opts z = do
let msg0 = "Unfoldr"
qq <- eval (Proxy @q) opts z
case getValueLR NoInline opts msg0 qq [] of
Left e -> pure e
Right q -> do
let msg1 = msg0 <> " " <> showL opts q
ff i s rs | i >= getMaxRecursionValue opts = pure (rs, Left $ mkNode opts (Fail (msg1 <> ":recursion limit i=" <> show i)) ("s=" <> showL opts s) [])
| otherwise = do
pp :: TT (PP p s) <- evalHide @p opts s
case getValueLR NoInline opts (msg1 <> " i=" <> show i <> " s=" <> show s) pp [] of
Left e -> pure (rs, Left e)
Right Nothing -> pure (rs ++ [((i,Nothing), pp)], Right ())
Right w@(Just (_b,s')) -> ff (i+1) s' (rs ++ [((i,w), pp)])
(ts,lr) :: ([((Int, PP p s), TT (PP p s))], Either (TT [b]) ()) <- ff 1 q []
pure $ case splitAndAlign opts msg1 ts of
Left e -> errorInProgram $ "Unfoldr e=" ++ show (hh e)
Right abcs ->
let vals = map (view _1) abcs
itts = map (view _2 &&& view _3) abcs
in case lr of
Left e -> mkNodeCopy opts e msg1 (hh qq : map (hh . prefixNumberToTT) itts)
Right () ->
let ret = fst <$> catMaybes vals
in mkNode opts (Val ret) (show3' opts msg1 ret "s=" q) (hh qq : map (hh . prefixNumberToTT) itts)
| calculate the return type for ' '
type family UnfoldrT (mbs :: Type) where
UnfoldrT (Maybe (b, _)) = b
UnfoldrT o = GL.TypeError (
'GL.Text "UnfoldrT: expected 'Maybe (b, _)' "
':$$: 'GL.Text "o = "
':<>: 'GL.ShowType o)
| run times with state @s@
data UnfoldN n p s deriving Show
type IterateNT n p s = Unfoldr (MaybeBool (Snd > 0) ((p *** Pred) >> '(L11,'(L12,Snd)))) '(s,n)
instance P (IterateNT n p s) x => P (UnfoldN n p s) x where
type PP (UnfoldN n p s) x = PP (IterateNT n p s) x
eval _ = eval (Proxy @(IterateNT n p s))
| unfolds a value applying @f@ until the condition @p@ is true
> > > pl @(IterateUntil ( I d < 90 ) Pred ) 94
Present [ 94,93,92,91,90 ] ( [ 94,93,92,91,90 ] | s=94 )
Val [ 94,93,92,91,90 ]
data IterateUntil p f deriving Show
type IterateUntilT p f = IterateWhile (Not p) f
instance P (IterateUntilT p f) x => P (IterateUntil p f) x where
type PP (IterateUntil p f) x = PP (IterateUntilT p f) x
eval _ = eval (Proxy @(IterateUntilT p f))
| unfolds a value applying @f@ while the condition @p@ is true
> > > pl @(IterateWhile ( I d > 90 ) Pred ) 94
Present [ 94,93,92,91 ] ( 94 [ 94,93,92,91 ] | s=94 )
Val [ 94,93,92,91 ]
data IterateWhile p f deriving Show
type IterateWhileT p f = Unfoldr (MaybeBool p '(Id, f)) Id
instance P (IterateWhileT p f) x => P (IterateWhile p f) x where
type PP (IterateWhile p f) x = PP (IterateWhileT p f) x
eval _ = eval (Proxy @(IterateWhileT p f))
| unfolds a value applying @f@ while the condition @p@ is true or @n@ times
> > > pl @(IterateNWhile 10 ( I d > 90 ) Pred ) 95
> > > pl @(IterateNWhile 3 ( I d > 90 ) Pred ) 95
Present [ 95,94,93 ] ( ( > > ) [ 95,94,93 ] | { Map [ 95,94,93 ] | [ ( 3,95),(2,94),(1,93 ) ] } )
data IterateNWhile n p f deriving Show
type IterateNWhileT n p f = '(n, Id) >> IterateWhile (Fst > 0 && (Snd >> p)) (Pred *** f) >> Map Snd
instance P (IterateNWhileT n p f) x => P (IterateNWhile n p f) x where
type PP (IterateNWhile n p f) x = PP (IterateNWhileT n p f) x
eval _ = eval (Proxy @(IterateNWhileT n p f))
| unfolds a value applying @f@ until the condition @p@ is true or @n@ times
> > > pl @(IterateNUntil 10 ( I d < = 90 ) Pred ) 95
> > > pl @(IterateNUntil 3 ( I d < = 90 ) Pred ) 95
Present [ 95,94,93 ] ( ( > > ) [ 95,94,93 ] | { Map [ 95,94,93 ] | [ ( 3,95),(2,94),(1,93 ) ] } )
> > > pl @(IterateNUntil 9999 ' False I d ) 1
Error Unfoldr ( 9999,1):recursion limit i=100 ( ( 9999,1 ) )
data IterateNUntil n p f deriving Show
type IterateNUntilT n p f = IterateNWhile n (Not p) f
instance P (IterateNUntilT n p f) x => P (IterateNUntil n p f) x where
type PP (IterateNUntil n p f) x = PP (IterateNUntilT n p f) x
eval _ = eval (Proxy @(IterateNUntilT n p f))
data ParaImpl (n :: Nat) (os :: [k]) deriving Show
> > > pz @(Para ' [ Id , Id + 1,Id * 4 ] ) [ 10,20,30 ]
> > > pz @(Para ' [ Id , Id + 1,Id * 4 ] ) [ 10,20,30,40 ]
Fail " Para : invalid length(4 ) expected 3 "
> > > pl @(Para ' [ W ' True , , W ' False , Lt 2 ] ) [ 1,2,-99,-999 ]
Present [ True , False , False , True ] ( Para(0 ) [ True , False , False , True ] | [ 1,2,-99,-999 ] )
> > > pl @(Para ' [ W ' True , , W ' False , Lt 2 ] ) [ 1,2,-99 ]
Error Para : invalid length(3 ) expected 4
Fail " Para : invalid length(3 ) expected 4 "
> > > pl @(Para ' [ W ' True , , W ' False , Lt 2 ] ) [ 1,2,-99,-999,1,1,2 ]
Error Para : invalid length(7 ) expected 4
Fail " Para : invalid length(7 ) expected 4 "
data Para (ps :: [k]) deriving Show
passthru but adds the length of ps ( replaces LenT in the type synonym to avoid type synonyms being expanded out
instance ( [a] ~ x
, GetLen ps
, P (ParaImpl (LenT ps) ps) x
) => P (Para ps) x where
type PP (Para ps) x = PP (ParaImpl (LenT ps) ps) x
eval _ opts as' = do
let msg0 = "Para"
n = getLen @ps
case chkSize opts msg0 as' [] of
Left e -> pure e
Right (asLen,as)
| n == asLen -> eval (Proxy @(ParaImpl (LenT ps) ps)) opts as
| otherwise ->
let msg1 = msg0 <> badLength asLen n
in pure $ mkNode opts (Fail msg1) "" []
instance GL.TypeError ('GL.Text "ParaImpl '[] invalid: requires at least one value in the list")
=> P (ParaImpl n ('[] :: [k])) x where
type PP (ParaImpl n ('[] :: [k])) x = Void
eval _ _ _ = errorInProgram "ParaImpl empty list"
instance ( KnownNat n
, Show a
, Show (PP p a)
, P p a
, x ~ [a]
) => P (ParaImpl n '[p]) x where
type PP (ParaImpl n '[p]) x = [PP p (ExtractAFromTA x)]
eval _ opts as' = do
let msgbase0 = "Para"
msgbase1 = msgbase0 <> "(" <> show (n-1) <> ")"
n = nat @n @Int
case as' of
[a] -> do
pp <- eval (Proxy @p) opts a
pure $ case getValueLR NoInline opts msgbase1 pp [] of
Left e -> e
Right b ->
let ret = [b]
in mkNode opts (Val ret) (msgbase1 <> " " <> showL opts ret <> showVerbose opts " | " a) [hh pp]
_ -> errorInProgram $ "ParaImpl base case should have exactly one element but found " ++ show as'
instance ( KnownNat n
, GetLen ps
, P p a
, P (ParaImpl n (p1 ': ps)) x
, PP (ParaImpl n (p1 ': ps)) x ~ [PP p a]
, Show a
, Show (PP p a)
, x ~ [a]
)
=> P (ParaImpl n (p ': p1 ': ps)) x where
type PP (ParaImpl n (p ': p1 ': ps)) x = [PP p (ExtractAFromTA x)]
eval _ _ [] = errorInProgram "ParaImpl n+1 case has no data left"
eval _ opts (a:as) = do
let cpos = n-pos-1
msgbase0 = "Para(" <> show cpos <> " of " <> show (n-1) <> ")"
msgbase1 = "Para(" <> show cpos <> ")"
n = nat @n
pos = 1 + getLen @ps
pp <- eval (Proxy @p) opts a
case getValueLR NoInline opts msgbase0 pp [] of
Left e -> pure e
Right b -> do
qq <- eval (Proxy @(ParaImpl n (p1 ': ps))) opts as
pure $ case getValueLR Inline opts "" qq [hh pp] of
Left e -> e
Right bs -> mkNode opts (Val (b:bs)) (msgbase1 <> " " <> showL opts (b:bs) <> showVerbose opts " | " (a:as)) [hh pp, hh qq]
> > > pz @(ParaN 4 Succ ) [ 1 .. 4 ]
Val [ 2,3,4,5 ]
> > > pz @(ParaN 4 Succ ) " "
Fail " Para : invalid length(5 ) expected 4 "
> > > pl @(ParaN 5 ( Guard " 0 - 255 " ( Between 0 255 I d ) ) ) [ 1,2,3,4,12 ]
Present [ 1,2,3,4,12 ] ( Para(0 ) [ 1,2,3,4,12 ] | [ 1,2,3,4,12 ] )
Val [ 1,2,3,4,12 ]
Error 0 - 255 ( Guard | 400 | Para(3 of 4 ) )
> > > pz @(ParaN 5 ( Guard ( PrintF " bad value % d " I d ) ( Between 0 255 I d ) ) ) [ 1,2,3,400,12 ]
Fail " bad value 400 "
> > > pl @(ParaN 4 ( PrintF " % 03d " I d ) ) [ 141,21,3,0 ]
Present [ " 141","021","003","000 " ] ( Para(0 ) [ " 141","021","003","000 " ] | [ 141,21,3,0 ] )
Val [ " 141","021","003","000 " ]
data ParaN (n :: Nat) p deriving Show
instance ( x ~ [a]
, P (ParaImpl (LenT (RepeatT n p)) (RepeatT n p)) x
, GetLen (RepeatT n p)
) => P (ParaN n p) x where
type PP (ParaN n p) x = PP (Para (RepeatT n p)) x
eval _ = eval (Proxy @(Para (RepeatT n p)))
| creates a promoted list of predicates and then evaluates them into a list . see PP instance for ' [ k ]
> > > pz @(Repeat 4 " abc " ) ( )
> > > pl @(Repeat 4 " xy " ) 3
Present [ " xy","xy","xy","xy " ] ( ' [ " xy","xy","xy","xy " ] ( ' " xy " ) | 3 )
data Repeat (n :: Nat) p deriving Show
instance P (RepeatT n p) a => P (Repeat n p) a where
type PP (Repeat n p) a = PP (RepeatT n p) a
eval _ = eval (Proxy @(RepeatT n p))
same as @DoN n p = = FoldN n p Id@ but more efficient
> > > pz @(DoN 4 ( I d < > " | " ) ) " abc "
> > > pz @(DoN 4 ( I d < > " | " < > I d ) ) " abc "
> > > pl @(DoN 4 ( I d + 4 ) ) 1
Present 17 ( ( > > ) 17 | { 13 + 4 = 17 } )
> > > pl @(DoN 4 ( I d + 7 ) ) 3
Present 31 ( ( > > ) 31 | { 24 + 7 = 31 } )
> > > pl @(DoN 4 " xy " ) 3
data DoN (n :: Nat) p deriving Show
type DoNT (n :: Nat) p = Do (RepeatT n p)
instance P (DoNT n p) a => P (DoN n p) a where
type PP (DoN n p) a = PP (DoNT n p) a
eval _ = eval (Proxy @(DoNT n p))
|
a9517f09d1acdc941dff572af00061ae2c118c4603a60b24390e25ff65f4613c | anuragsoni/shuttle | client.mli | open! Core
open! Async
module Address : sig
type t [@@deriving sexp, equal, compare, hash]
include Comparable.S with type t := t
include Hashable.S with type t := t
val of_host_and_port : Host_and_port.t -> t
val of_unix_domain_socket : Filename.t -> t
end
module Ssl_options : sig
type t [@@deriving sexp_of]
* ssl options that should be used when using a client over an encrypted connection .
This can be used either when sending a { { ! Shuttle_http.Client.Oneshot.call } one - shot
request } , or when creating a client that supports keep - alive . If hostname is
provided it 'll be used for validating that the hostname in the peer 's ssl
certificate matches the hostname requested by the client .
This can be used either when sending a {{!Shuttle_http.Client.Oneshot.call} one-shot
request}, or when creating a client that supports keep-alive. If hostname is
provided it'll be used for validating that the hostname in the peer's ssl
certificate matches the hostname requested by the client. *)
val create
: ?version:Async_ssl.Version.t
-> ?options:Async_ssl.Opt.t list
-> ?name:string
-> ?hostname:string
-> ?allowed_ciphers:[ `Only of string list | `Openssl_default | `Secure ]
-> ?ca_file:string
-> ?ca_path:string
-> ?crt_file:string
-> ?key_file:string
-> ?verify_modes:Async_ssl.Verify_mode.t list
-> ?session:Async_ssl.Ssl.Session.t
-> ?verify_certificate:(Async_ssl.Ssl.Connection.t -> unit Or_error.t)
-> unit
-> t
end
(** HTTP/1.1 client that supports keep-alives. A client entity can be created once with an
{{!Shuttle_http.Client.Address.t} address} and re-used for multiple requests. The
client is closed either when a user explicitly {{!Shuttle_http.Client.close} closes}
it, or if there is an exception when performing a HTTP request using the client.
It is the responsiblity of the user to check that a http call raised an exception and
avoid using a connection once an exception is seen in a call. *)
type t [@@deriving sexp_of]
* Initiate a TCP connection targeting the user provided Address and perform SSL
handshake if needed . If an interrup deferred is provided the underlying socket is
closed when it resolves . If address is a host + port pair the client will
automatically populate the Host HTTP header for outgoing calls , and ensure that SNI
and hostname validation is configured if using an SSL connection .
handshake if needed. If an interrup deferred is provided the underlying socket is
closed when it resolves. If address is a host + port pair the client will
automatically populate the Host HTTP header for outgoing calls, and ensure that SNI
and hostname validation is configured if using an SSL connection. *)
val create
: ?interrupt:unit Deferred.t
-> ?connect_timeout:Time.Span.t
-> ?ssl:Ssl_options.t
-> Address.t
-> t Deferred.Or_error.t
* [ Remote_connection_closed ] is raised if attempting if an EOF is reached before the
full response has been read .
full response has been read. *)
exception Remote_connection_closed
(** [Request_aborted] is raised if attempting to enqueue a request within a closed http
client. *)
exception Request_aborted
(** [call] Attempts to perform a HTTP request using the user provided client. If the
response contains a "Connection: close" header or if there's an exception when
performing the call the client will be closed and should not be used for any future
calls. If performing multiple calls on a client, users should ensure to only wait on a
response promise if all previous responses have been fully read. *)
val call : t -> Request.t -> Response.t Deferred.t
(** [is_closed] returns if the client has been closed. *)
val is_closed : t -> bool
(** [closed] returns a deferred that's resolved when the http client is closed. *)
val closed : t -> unit Deferred.t
* [ close ] initiates shutdown for an http client . Any request that 's currently in - flight
will be attempt to run , and any pending requests will fail with
{ { : . Client . Request_aborted } exception } .
will be attempt to run, and any pending requests will fail with
{{:Shuttle.Client.Request_aborted} exception}. *)
val close : t -> unit Deferred.t
module Oneshot : sig
* [ call ] Performs a one - shot http client call to the user provided connection target .
If ssl options are provided the client will attempt to setup a SSL connection . If
ssl options contain a hostname then the client will perform hostname verification to
ensure the hostnames on the peer 's ssl certificate matches the hostname provided by
the caller . To disable this check or to customize how the ssl certificate is
validated users can provide their own implementation of [ verify_certificate ] when
creating the { { ! Shuttle_http . Client . Ssl_options.t } ssl } options .
If ssl options are provided the client will attempt to setup a SSL connection. If
ssl options contain a hostname then the client will perform hostname verification to
ensure the hostnames on the peer's ssl certificate matches the hostname provided by
the caller. To disable this check or to customize how the ssl certificate is
validated users can provide their own implementation of [verify_certificate] when
creating the {{!Shuttle_http.Client.Ssl_options.t} ssl} options. *)
val call
: ?interrupt:unit Deferred.t
-> ?connect_timeout:Time.Span.t
-> ?ssl:Ssl_options.t
-> Address.t
-> Request.t
-> Response.t Deferred.t
end
(** Persistent clients, not to be confused with HTTP/1.1 persistent connections are
durable clients that maintain a connection to a service and eagerly and repeatedly
reconnect if the underlying socket connection is lost. *)
module Persistent : sig
type t [@@deriving sexp_of]
* Create a new persistent http connection . Random state is forwarded to
{ { : Async_kernel . } async } and is used to randomize how
long to wait between re - connection attempts . A user provided callback is used to
retrieve the address to connect to . Users can use this to potentially maintain a
pool of service address to target , and decide to use a new target address if the
underlying tcp connection is closed .
{{:Async_kernel.Persistent_connection_kernel} async} and is used to randomize how
long to wait between re-connection attempts. A user provided callback is used to
retrieve the address to connect to. Users can use this to potentially maintain a
pool of service address to target, and decide to use a new target address if the
underlying tcp connection is closed. *)
val create
: ?random_state:[ `Non_random | `State of Random.State.t ]
-> ?retry_delay:(unit -> Time_ns.Span.t)
-> ?time_source:Time_source.t
-> ?ssl:Ssl_options.t
-> server_name:string
-> (unit -> Address.t Deferred.Or_error.t)
-> t
(** [closed] returns a deferred that's resolved when the http client is closed. *)
val closed : t -> unit Deferred.t
(** [is_closed] returns if the client has been closed. *)
val is_closed : t -> bool
(** [close] tears down the persistent connection. The deferred returned will resolve
once the underlying http connection is closed. *)
val close : t -> unit Deferred.t
* [ call ] Attempts to perform a HTTP request using the user provided client . If the
underlying http connection has closed between two calls , and the user has n't called
{ { ! Shuttle_http.Client.Persistent.close } close } on the persistent connection , this
function will initiate a new http connection and then perform the http client call .
underlying http connection has closed between two calls, and the user hasn't called
{{!Shuttle_http.Client.Persistent.close} close} on the persistent connection, this
function will initiate a new http connection and then perform the http client call. *)
val call : t -> Request.t -> Response.t Deferred.t
end
| null | https://raw.githubusercontent.com/anuragsoni/shuttle/4b6b477cc9cc811ad717e9ac663b4c4e7346f4b6/http11/src/client.mli | ocaml | * HTTP/1.1 client that supports keep-alives. A client entity can be created once with an
{{!Shuttle_http.Client.Address.t} address} and re-used for multiple requests. The
client is closed either when a user explicitly {{!Shuttle_http.Client.close} closes}
it, or if there is an exception when performing a HTTP request using the client.
It is the responsiblity of the user to check that a http call raised an exception and
avoid using a connection once an exception is seen in a call.
* [Request_aborted] is raised if attempting to enqueue a request within a closed http
client.
* [call] Attempts to perform a HTTP request using the user provided client. If the
response contains a "Connection: close" header or if there's an exception when
performing the call the client will be closed and should not be used for any future
calls. If performing multiple calls on a client, users should ensure to only wait on a
response promise if all previous responses have been fully read.
* [is_closed] returns if the client has been closed.
* [closed] returns a deferred that's resolved when the http client is closed.
* Persistent clients, not to be confused with HTTP/1.1 persistent connections are
durable clients that maintain a connection to a service and eagerly and repeatedly
reconnect if the underlying socket connection is lost.
* [closed] returns a deferred that's resolved when the http client is closed.
* [is_closed] returns if the client has been closed.
* [close] tears down the persistent connection. The deferred returned will resolve
once the underlying http connection is closed. | open! Core
open! Async
module Address : sig
type t [@@deriving sexp, equal, compare, hash]
include Comparable.S with type t := t
include Hashable.S with type t := t
val of_host_and_port : Host_and_port.t -> t
val of_unix_domain_socket : Filename.t -> t
end
module Ssl_options : sig
type t [@@deriving sexp_of]
* ssl options that should be used when using a client over an encrypted connection .
This can be used either when sending a { { ! Shuttle_http.Client.Oneshot.call } one - shot
request } , or when creating a client that supports keep - alive . If hostname is
provided it 'll be used for validating that the hostname in the peer 's ssl
certificate matches the hostname requested by the client .
This can be used either when sending a {{!Shuttle_http.Client.Oneshot.call} one-shot
request}, or when creating a client that supports keep-alive. If hostname is
provided it'll be used for validating that the hostname in the peer's ssl
certificate matches the hostname requested by the client. *)
val create
: ?version:Async_ssl.Version.t
-> ?options:Async_ssl.Opt.t list
-> ?name:string
-> ?hostname:string
-> ?allowed_ciphers:[ `Only of string list | `Openssl_default | `Secure ]
-> ?ca_file:string
-> ?ca_path:string
-> ?crt_file:string
-> ?key_file:string
-> ?verify_modes:Async_ssl.Verify_mode.t list
-> ?session:Async_ssl.Ssl.Session.t
-> ?verify_certificate:(Async_ssl.Ssl.Connection.t -> unit Or_error.t)
-> unit
-> t
end
type t [@@deriving sexp_of]
* Initiate a TCP connection targeting the user provided Address and perform SSL
handshake if needed . If an interrup deferred is provided the underlying socket is
closed when it resolves . If address is a host + port pair the client will
automatically populate the Host HTTP header for outgoing calls , and ensure that SNI
and hostname validation is configured if using an SSL connection .
handshake if needed. If an interrup deferred is provided the underlying socket is
closed when it resolves. If address is a host + port pair the client will
automatically populate the Host HTTP header for outgoing calls, and ensure that SNI
and hostname validation is configured if using an SSL connection. *)
val create
: ?interrupt:unit Deferred.t
-> ?connect_timeout:Time.Span.t
-> ?ssl:Ssl_options.t
-> Address.t
-> t Deferred.Or_error.t
* [ Remote_connection_closed ] is raised if attempting if an EOF is reached before the
full response has been read .
full response has been read. *)
exception Remote_connection_closed
exception Request_aborted
val call : t -> Request.t -> Response.t Deferred.t
val is_closed : t -> bool
val closed : t -> unit Deferred.t
* [ close ] initiates shutdown for an http client . Any request that 's currently in - flight
will be attempt to run , and any pending requests will fail with
{ { : . Client . Request_aborted } exception } .
will be attempt to run, and any pending requests will fail with
{{:Shuttle.Client.Request_aborted} exception}. *)
val close : t -> unit Deferred.t
module Oneshot : sig
* [ call ] Performs a one - shot http client call to the user provided connection target .
If ssl options are provided the client will attempt to setup a SSL connection . If
ssl options contain a hostname then the client will perform hostname verification to
ensure the hostnames on the peer 's ssl certificate matches the hostname provided by
the caller . To disable this check or to customize how the ssl certificate is
validated users can provide their own implementation of [ verify_certificate ] when
creating the { { ! Shuttle_http . Client . Ssl_options.t } ssl } options .
If ssl options are provided the client will attempt to setup a SSL connection. If
ssl options contain a hostname then the client will perform hostname verification to
ensure the hostnames on the peer's ssl certificate matches the hostname provided by
the caller. To disable this check or to customize how the ssl certificate is
validated users can provide their own implementation of [verify_certificate] when
creating the {{!Shuttle_http.Client.Ssl_options.t} ssl} options. *)
val call
: ?interrupt:unit Deferred.t
-> ?connect_timeout:Time.Span.t
-> ?ssl:Ssl_options.t
-> Address.t
-> Request.t
-> Response.t Deferred.t
end
module Persistent : sig
type t [@@deriving sexp_of]
* Create a new persistent http connection . Random state is forwarded to
{ { : Async_kernel . } async } and is used to randomize how
long to wait between re - connection attempts . A user provided callback is used to
retrieve the address to connect to . Users can use this to potentially maintain a
pool of service address to target , and decide to use a new target address if the
underlying tcp connection is closed .
{{:Async_kernel.Persistent_connection_kernel} async} and is used to randomize how
long to wait between re-connection attempts. A user provided callback is used to
retrieve the address to connect to. Users can use this to potentially maintain a
pool of service address to target, and decide to use a new target address if the
underlying tcp connection is closed. *)
val create
: ?random_state:[ `Non_random | `State of Random.State.t ]
-> ?retry_delay:(unit -> Time_ns.Span.t)
-> ?time_source:Time_source.t
-> ?ssl:Ssl_options.t
-> server_name:string
-> (unit -> Address.t Deferred.Or_error.t)
-> t
val closed : t -> unit Deferred.t
val is_closed : t -> bool
val close : t -> unit Deferred.t
* [ call ] Attempts to perform a HTTP request using the user provided client . If the
underlying http connection has closed between two calls , and the user has n't called
{ { ! Shuttle_http.Client.Persistent.close } close } on the persistent connection , this
function will initiate a new http connection and then perform the http client call .
underlying http connection has closed between two calls, and the user hasn't called
{{!Shuttle_http.Client.Persistent.close} close} on the persistent connection, this
function will initiate a new http connection and then perform the http client call. *)
val call : t -> Request.t -> Response.t Deferred.t
end
|
615c326488e17aff07e2295689c7cf16ce4a93534429933419365c7c0c4454d7 | dparis/gen-phzr | camera.cljs | (ns phzr.camera
(:require [phzr.impl.utils.core :refer [clj->phaser phaser->clj]]
[phzr.impl.extend :as ex]
[cljsjs.phaser])
(:refer-clojure :exclude [update]))
(defn ->Camera
"A Camera is your view into the game world. It has a position and size and renders only those objects within its field of view.
The game automatically creates a single Stage sized camera on boot. Move the camera around the world with Phaser.Camera.x/y
Parameters:
* game (Phaser.Game) - Game reference to the currently running game.
* id (number) - Not being used at the moment, will be when Phaser supports multiple camera
* x (number) - Position of the camera on the X axis
* y (number) - Position of the camera on the Y axis
* width (number) - The width of the view rectangle
* height (number) - The height of the view rectangle"
([game id x y width height]
(js/Phaser.Camera. (clj->phaser game)
(clj->phaser id)
(clj->phaser x)
(clj->phaser y)
(clj->phaser width)
(clj->phaser height))))
(defn const
[k]
(when-let [cn (get phzr.impl.accessors.camera/camera-constants k)]
(aget js/Phaser.Camera cn)))
(defn check-bounds
"Method called to ensure the camera doesn't venture outside of the game world."
([camera]
(phaser->clj
(.checkBounds camera))))
(defn focus-on
"Move the camera focus on a display object instantly.
Parameters:
* camera (Phaser.Camera) - Targeted instance for method
* display-object (any) - The display object to focus the camera on. Must have visible x/y properties."
([camera display-object]
(phaser->clj
(.focusOn camera
(clj->phaser display-object)))))
(defn focus-on-xy
"Move the camera focus on a location instantly.
Parameters:
* camera (Phaser.Camera) - Targeted instance for method
* x (number) - X position.
* y (number) - Y position."
([camera x y]
(phaser->clj
(.focusOnXY camera
(clj->phaser x)
(clj->phaser y)))))
(defn follow
"Tell the camera which sprite to follow.
If you find you're getting a slight 'jitter' effect when following a Sprite it's probably to do with sub-pixel rendering of the Sprite position.
This can be disabled by setting `game.renderer.renderSession.roundPixels = true` to force full pixel rendering.
Parameters:
* camera (Phaser.Camera) - Targeted instance for method
* target (Phaser.Sprite | Phaser.Image | Phaser.Text) - The object you want the camera to track. Set to null to not follow anything.
* style (number) {optional} - Leverage one of the existing 'deadzone' presets. If you use a custom deadzone, ignore this parameter and manually specify the deadzone after calling follow()."
([camera target]
(phaser->clj
(.follow camera
(clj->phaser target))))
([camera target style]
(phaser->clj
(.follow camera
(clj->phaser target)
(clj->phaser style)))))
(defn pre-update
"Camera preUpdate. Sets the total view counter to zero."
([camera]
(phaser->clj
(.preUpdate camera))))
(defn reset
"Resets the camera back to 0,0 and un-follows any object it may have been tracking."
([camera]
(phaser->clj
(.reset camera))))
(defn set-bounds-to-world
"Update the Camera bounds to match the game world."
([camera]
(phaser->clj
(.setBoundsToWorld camera))))
(defn set-position
"A helper function to set both the X and Y properties of the camera at once
without having to use game.camera.x and game.camera.y.
Parameters:
* camera (Phaser.Camera) - Targeted instance for method
* x (number) - X position.
* y (number) - Y position."
([camera x y]
(phaser->clj
(.setPosition camera
(clj->phaser x)
(clj->phaser y)))))
(defn set-size
"Sets the size of the view rectangle given the width and height in parameters.
Parameters:
* camera (Phaser.Camera) - Targeted instance for method
* width (number) - The desired width.
* height (number) - The desired height."
([camera width height]
(phaser->clj
(.setSize camera
(clj->phaser width)
(clj->phaser height)))))
(defn unfollow
"Sets the Camera follow target to null, stopping it from following an object if it's doing so."
([camera]
(phaser->clj
(.unfollow camera))))
(defn update
"Update focusing and scrolling."
([camera]
(phaser->clj
(.update camera)))) | null | https://raw.githubusercontent.com/dparis/gen-phzr/e4c7b272e225ac343718dc15fc84f5f0dce68023/out/camera.cljs | clojure | (ns phzr.camera
(:require [phzr.impl.utils.core :refer [clj->phaser phaser->clj]]
[phzr.impl.extend :as ex]
[cljsjs.phaser])
(:refer-clojure :exclude [update]))
(defn ->Camera
"A Camera is your view into the game world. It has a position and size and renders only those objects within its field of view.
The game automatically creates a single Stage sized camera on boot. Move the camera around the world with Phaser.Camera.x/y
Parameters:
* game (Phaser.Game) - Game reference to the currently running game.
* id (number) - Not being used at the moment, will be when Phaser supports multiple camera
* x (number) - Position of the camera on the X axis
* y (number) - Position of the camera on the Y axis
* width (number) - The width of the view rectangle
* height (number) - The height of the view rectangle"
([game id x y width height]
(js/Phaser.Camera. (clj->phaser game)
(clj->phaser id)
(clj->phaser x)
(clj->phaser y)
(clj->phaser width)
(clj->phaser height))))
(defn const
[k]
(when-let [cn (get phzr.impl.accessors.camera/camera-constants k)]
(aget js/Phaser.Camera cn)))
(defn check-bounds
"Method called to ensure the camera doesn't venture outside of the game world."
([camera]
(phaser->clj
(.checkBounds camera))))
(defn focus-on
"Move the camera focus on a display object instantly.
Parameters:
* camera (Phaser.Camera) - Targeted instance for method
* display-object (any) - The display object to focus the camera on. Must have visible x/y properties."
([camera display-object]
(phaser->clj
(.focusOn camera
(clj->phaser display-object)))))
(defn focus-on-xy
"Move the camera focus on a location instantly.
Parameters:
* camera (Phaser.Camera) - Targeted instance for method
* x (number) - X position.
* y (number) - Y position."
([camera x y]
(phaser->clj
(.focusOnXY camera
(clj->phaser x)
(clj->phaser y)))))
(defn follow
"Tell the camera which sprite to follow.
If you find you're getting a slight 'jitter' effect when following a Sprite it's probably to do with sub-pixel rendering of the Sprite position.
This can be disabled by setting `game.renderer.renderSession.roundPixels = true` to force full pixel rendering.
Parameters:
* camera (Phaser.Camera) - Targeted instance for method
* target (Phaser.Sprite | Phaser.Image | Phaser.Text) - The object you want the camera to track. Set to null to not follow anything.
* style (number) {optional} - Leverage one of the existing 'deadzone' presets. If you use a custom deadzone, ignore this parameter and manually specify the deadzone after calling follow()."
([camera target]
(phaser->clj
(.follow camera
(clj->phaser target))))
([camera target style]
(phaser->clj
(.follow camera
(clj->phaser target)
(clj->phaser style)))))
(defn pre-update
"Camera preUpdate. Sets the total view counter to zero."
([camera]
(phaser->clj
(.preUpdate camera))))
(defn reset
"Resets the camera back to 0,0 and un-follows any object it may have been tracking."
([camera]
(phaser->clj
(.reset camera))))
(defn set-bounds-to-world
"Update the Camera bounds to match the game world."
([camera]
(phaser->clj
(.setBoundsToWorld camera))))
(defn set-position
"A helper function to set both the X and Y properties of the camera at once
without having to use game.camera.x and game.camera.y.
Parameters:
* camera (Phaser.Camera) - Targeted instance for method
* x (number) - X position.
* y (number) - Y position."
([camera x y]
(phaser->clj
(.setPosition camera
(clj->phaser x)
(clj->phaser y)))))
(defn set-size
"Sets the size of the view rectangle given the width and height in parameters.
Parameters:
* camera (Phaser.Camera) - Targeted instance for method
* width (number) - The desired width.
* height (number) - The desired height."
([camera width height]
(phaser->clj
(.setSize camera
(clj->phaser width)
(clj->phaser height)))))
(defn unfollow
"Sets the Camera follow target to null, stopping it from following an object if it's doing so."
([camera]
(phaser->clj
(.unfollow camera))))
(defn update
"Update focusing and scrolling."
([camera]
(phaser->clj
(.update camera)))) | |
9fdcca722bdbfbab25025b1ca172d0e915ad6839dcf6c043283d88b320cdd483 | fractalide/racket2nix | dump-catalogs.rkt | #lang racket
(require net/url-string)
(require pkg/lib)
(require (prefix-in pkg-private: pkg/private/params))
(require (only-in "racket2nix.rkt" pretty-write-sorted-hash))
(command-line
#:args catalogs
(pkg-private:current-pkg-catalogs (map string->url catalogs))
(pretty-write-sorted-hash (get-all-pkg-details-from-catalogs)))
| null | https://raw.githubusercontent.com/fractalide/racket2nix/c245240574b9a4c8b23e9f476c7f085522c907ba/nix/dump-catalogs.rkt | racket | #lang racket
(require net/url-string)
(require pkg/lib)
(require (prefix-in pkg-private: pkg/private/params))
(require (only-in "racket2nix.rkt" pretty-write-sorted-hash))
(command-line
#:args catalogs
(pkg-private:current-pkg-catalogs (map string->url catalogs))
(pretty-write-sorted-hash (get-all-pkg-details-from-catalogs)))
| |
c3f985a67a59f3da5ac5c72ee044f39539d485bb441397428b5668e6e52aa591 | mpickering/ghcide-reflex | Documentation.hs | Copyright ( c ) 2019 The DAML Authors . All rights reserved .
SPDX - License - Identifier : Apache-2.0
# LANGUAGE CPP #
#include "ghc-api-version.h"
module Development.IDE.Spans.Documentation (
getDocumentation
, getDocumentationTryGhc
) where
import Control.Monad
import Data.List.Extra
import qualified Data.Map as M
import Data.Maybe
import qualified Data.Text as T
import Development.IDE.GHC.Compat
import Development.IDE.GHC.Error
import Development.IDE.Spans.Common
import FastString
import SrcLoc
getDocumentationTryGhc
:: GhcMonad m
=> [ParsedModule]
-> Name
-> m SpanDoc
getDocs goes through the GHCi codepaths which cause problems on ghc - lib .
See for more details .
#if MIN_GHC_API_VERSION(8,6,0) && !defined(GHC_LIB)
getDocumentationTryGhc sources name = do
res <- catchSrcErrors "docs" $ getDocs name
case res of
Right (Right (Just docs, _)) -> return $ SpanDocString docs
_ -> return $ SpanDocText $ getDocumentation sources name
#else
getDocumentationTryGhc sources name = do
return $ SpanDocText $ getDocumentation sources name
#endif
getDocumentation
:: [ParsedModule] -- ^ All of the possible modules it could be defined in.
-> Name -- ^ The name you want documentation for.
-> [T.Text]
-- This finds any documentation between the name you want
-- documentation for and the one before it. This is only an
-- approximately correct algorithm and there are easily constructed
-- cases where it will be wrong (if so then usually slightly but there
-- may be edge cases where it is very wrong).
TODO : Build a version of GHC exactprint to extract this information
-- more accurately.
getDocumentation sources targetName = fromMaybe [] $ do
-- Find the module the target is defined in.
targetNameSpan <- realSpan $ nameSrcSpan targetName
tc <-
find ((==) (Just $ srcSpanFile targetNameSpan) . annotationFileName)
$ reverse sources -- TODO : Is reversing the list here really neccessary?
-- Top level names bound by the module
let bs = [ n | let L _ HsModule{hsmodDecls} = pm_parsed_source tc
, L _ (ValD hsbind) <- hsmodDecls
, Just n <- [name_of_bind hsbind]
]
-- Sort the names' source spans.
let sortedSpans = sortedNameSpans bs
-- Now go ahead and extract the docs.
let docs = ann tc
nameInd <- elemIndex targetNameSpan sortedSpans
let prevNameSpan =
if nameInd >= 1
then sortedSpans !! (nameInd - 1)
else zeroSpan $ srcSpanFile targetNameSpan
-- Annoyingly "-- |" documentation isn't annotated with a location,
-- so you have to pull it out from the elements.
pure
$ docHeaders
$ filter (\(L target _) -> isBetween target prevNameSpan targetNameSpan)
$ mapMaybe (\(L l v) -> L <$> realSpan l <*> pure v)
$ join
$ M.elems
docs
where
-- Get the name bound by a binding. We only concern ourselves with
@FunBind@ ( which covers functions and variables ) .
name_of_bind :: HsBind GhcPs -> Maybe (Located RdrName)
name_of_bind FunBind {fun_id} = Just fun_id
name_of_bind _ = Nothing
-- Get source spans from names, discard unhelpful spans, remove
-- duplicates and sort.
sortedNameSpans :: [Located RdrName] -> [RealSrcSpan]
sortedNameSpans ls = nubSort (mapMaybe (realSpan . getLoc) ls)
isBetween target before after = before <= target && target <= after
ann = snd . pm_annotations
annotationFileName :: ParsedModule -> Maybe FastString
annotationFileName = fmap srcSpanFile . listToMaybe . realSpans . ann
realSpans :: M.Map SrcSpan [Located a] -> [RealSrcSpan]
realSpans =
mapMaybe (realSpan . getLoc)
. join
. M.elems
-- | Shows this part of the documentation
docHeaders :: [RealLocated AnnotationComment]
-> [T.Text]
docHeaders = mapMaybe (\(L _ x) -> wrk x)
where
wrk = \case
-- When `Opt_Haddock` is enabled.
AnnDocCommentNext s -> Just $ T.pack s
-- When `Opt_KeepRawTokenStream` enabled.
AnnLineComment s -> if "-- |" `isPrefixOf` s
then Just $ T.pack s
else Nothing
_ -> Nothing
| null | https://raw.githubusercontent.com/mpickering/ghcide-reflex/67ddebf0c1d4728e850057b9bd7c6852c0d96350/src/Development/IDE/Spans/Documentation.hs | haskell | ^ All of the possible modules it could be defined in.
^ The name you want documentation for.
This finds any documentation between the name you want
documentation for and the one before it. This is only an
approximately correct algorithm and there are easily constructed
cases where it will be wrong (if so then usually slightly but there
may be edge cases where it is very wrong).
more accurately.
Find the module the target is defined in.
TODO : Is reversing the list here really neccessary?
Top level names bound by the module
Sort the names' source spans.
Now go ahead and extract the docs.
Annoyingly "-- |" documentation isn't annotated with a location,
so you have to pull it out from the elements.
Get the name bound by a binding. We only concern ourselves with
Get source spans from names, discard unhelpful spans, remove
duplicates and sort.
| Shows this part of the documentation
When `Opt_Haddock` is enabled.
When `Opt_KeepRawTokenStream` enabled. | Copyright ( c ) 2019 The DAML Authors . All rights reserved .
SPDX - License - Identifier : Apache-2.0
# LANGUAGE CPP #
#include "ghc-api-version.h"
module Development.IDE.Spans.Documentation (
getDocumentation
, getDocumentationTryGhc
) where
import Control.Monad
import Data.List.Extra
import qualified Data.Map as M
import Data.Maybe
import qualified Data.Text as T
import Development.IDE.GHC.Compat
import Development.IDE.GHC.Error
import Development.IDE.Spans.Common
import FastString
import SrcLoc
getDocumentationTryGhc
:: GhcMonad m
=> [ParsedModule]
-> Name
-> m SpanDoc
getDocs goes through the GHCi codepaths which cause problems on ghc - lib .
See for more details .
#if MIN_GHC_API_VERSION(8,6,0) && !defined(GHC_LIB)
getDocumentationTryGhc sources name = do
res <- catchSrcErrors "docs" $ getDocs name
case res of
Right (Right (Just docs, _)) -> return $ SpanDocString docs
_ -> return $ SpanDocText $ getDocumentation sources name
#else
getDocumentationTryGhc sources name = do
return $ SpanDocText $ getDocumentation sources name
#endif
getDocumentation
-> [T.Text]
TODO : Build a version of GHC exactprint to extract this information
getDocumentation sources targetName = fromMaybe [] $ do
targetNameSpan <- realSpan $ nameSrcSpan targetName
tc <-
find ((==) (Just $ srcSpanFile targetNameSpan) . annotationFileName)
let bs = [ n | let L _ HsModule{hsmodDecls} = pm_parsed_source tc
, L _ (ValD hsbind) <- hsmodDecls
, Just n <- [name_of_bind hsbind]
]
let sortedSpans = sortedNameSpans bs
let docs = ann tc
nameInd <- elemIndex targetNameSpan sortedSpans
let prevNameSpan =
if nameInd >= 1
then sortedSpans !! (nameInd - 1)
else zeroSpan $ srcSpanFile targetNameSpan
pure
$ docHeaders
$ filter (\(L target _) -> isBetween target prevNameSpan targetNameSpan)
$ mapMaybe (\(L l v) -> L <$> realSpan l <*> pure v)
$ join
$ M.elems
docs
where
@FunBind@ ( which covers functions and variables ) .
name_of_bind :: HsBind GhcPs -> Maybe (Located RdrName)
name_of_bind FunBind {fun_id} = Just fun_id
name_of_bind _ = Nothing
sortedNameSpans :: [Located RdrName] -> [RealSrcSpan]
sortedNameSpans ls = nubSort (mapMaybe (realSpan . getLoc) ls)
isBetween target before after = before <= target && target <= after
ann = snd . pm_annotations
annotationFileName :: ParsedModule -> Maybe FastString
annotationFileName = fmap srcSpanFile . listToMaybe . realSpans . ann
realSpans :: M.Map SrcSpan [Located a] -> [RealSrcSpan]
realSpans =
mapMaybe (realSpan . getLoc)
. join
. M.elems
docHeaders :: [RealLocated AnnotationComment]
-> [T.Text]
docHeaders = mapMaybe (\(L _ x) -> wrk x)
where
wrk = \case
AnnDocCommentNext s -> Just $ T.pack s
AnnLineComment s -> if "-- |" `isPrefixOf` s
then Just $ T.pack s
else Nothing
_ -> Nothing
|
1c0b9f81f6411587e5a102c4c48e5467785d91dc8239865356c9cffb2565bc12 | mirage/ocaml-vchan | vchan_xen.ml |
* Copyright ( c ) 2014 Citrix Systems Inc
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2014 Citrix Systems Inc
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
include Vchan.Endpoint.Make(Events_xen)(Memory_xen)(Vchan.Xenstore.Make(Xen_os.Xs))
| null | https://raw.githubusercontent.com/mirage/ocaml-vchan/cb116c1d79b97f1271189924ddbee73fa5a49456/xen/vchan_xen.ml | ocaml |
* Copyright ( c ) 2014 Citrix Systems Inc
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2014 Citrix Systems Inc
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
include Vchan.Endpoint.Make(Events_xen)(Memory_xen)(Vchan.Xenstore.Make(Xen_os.Xs))
| |
dba128873dd967977f7f4bc62c2845903bd9703aa144306d8436572b28226bd7 | gerritjvv/kafka-fast | cmd_consume.clj | (ns
^{:doc "Consume command "}
kafka-clj.apputils.cmd-consume
(:require [kafka-clj.apputils.util :as app-util]
[kafka-clj.consumer.node :as node]))
(comment
(def)
(def node (create-node! consumer-conf ["ping"]))
(read-msg! node))
(defn consume [topic brokers redis-host]
(let [brokers' (app-util/format-brokers (clojure.string/split brokers #"[,;]"))
consumer-conf {:bootstrap-brokers brokers' :redis-conf {:host redis-host :max-active 5 :timeout 1000 :group-name (str "test-" (System/currentTimeMillis))} :conf {:use-earliest true :consumer-reporting true}}
connector (node/create-node! consumer-conf [topic])
k 100000
last-seen-ts (atom (System/currentTimeMillis))
counter (atom 0)]
(while (node/read-msg! connector)
(let [i (swap! counter inc)]
(when (zero? (rem i k))
(let [ts (- (System/currentTimeMillis) @last-seen-ts)]
(swap! last-seen-ts (constantly (System/currentTimeMillis)))
(println (.getName (Thread/currentThread))
"Read " i " messages " ts "ms Rate " (int (/ k (/ ts 1000))) "p/s")))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;; public
(defn consume-data [topic brokers redis-host]
(consume topic brokers redis-host)) | null | https://raw.githubusercontent.com/gerritjvv/kafka-fast/fd149d8744c8100b2a8f4d09f1a251812e7baf6a/kafka-clj/src/kafka_clj/apputils/cmd_consume.clj | clojure |
public | (ns
^{:doc "Consume command "}
kafka-clj.apputils.cmd-consume
(:require [kafka-clj.apputils.util :as app-util]
[kafka-clj.consumer.node :as node]))
(comment
(def)
(def node (create-node! consumer-conf ["ping"]))
(read-msg! node))
(defn consume [topic brokers redis-host]
(let [brokers' (app-util/format-brokers (clojure.string/split brokers #"[,;]"))
consumer-conf {:bootstrap-brokers brokers' :redis-conf {:host redis-host :max-active 5 :timeout 1000 :group-name (str "test-" (System/currentTimeMillis))} :conf {:use-earliest true :consumer-reporting true}}
connector (node/create-node! consumer-conf [topic])
k 100000
last-seen-ts (atom (System/currentTimeMillis))
counter (atom 0)]
(while (node/read-msg! connector)
(let [i (swap! counter inc)]
(when (zero? (rem i k))
(let [ts (- (System/currentTimeMillis) @last-seen-ts)]
(swap! last-seen-ts (constantly (System/currentTimeMillis)))
(println (.getName (Thread/currentThread))
"Read " i " messages " ts "ms Rate " (int (/ k (/ ts 1000))) "p/s")))))))
(defn consume-data [topic brokers redis-host]
(consume topic brokers redis-host)) |
a2f5c7b86e2b00754b6cc4e1a14821a7f74934998452827d78f4069f10209573 | aws-beam/aws-erlang | aws_forecast.erl | %% WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
See -beam/aws-codegen for more details .
@doc Provides APIs for creating and managing Amazon Forecast resources .
-module(aws_forecast).
-export([create_auto_predictor/2,
create_auto_predictor/3,
create_dataset/2,
create_dataset/3,
create_dataset_group/2,
create_dataset_group/3,
create_dataset_import_job/2,
create_dataset_import_job/3,
create_explainability/2,
create_explainability/3,
create_explainability_export/2,
create_explainability_export/3,
create_forecast/2,
create_forecast/3,
create_forecast_export_job/2,
create_forecast_export_job/3,
create_monitor/2,
create_monitor/3,
create_predictor/2,
create_predictor/3,
create_predictor_backtest_export_job/2,
create_predictor_backtest_export_job/3,
create_what_if_analysis/2,
create_what_if_analysis/3,
create_what_if_forecast/2,
create_what_if_forecast/3,
create_what_if_forecast_export/2,
create_what_if_forecast_export/3,
delete_dataset/2,
delete_dataset/3,
delete_dataset_group/2,
delete_dataset_group/3,
delete_dataset_import_job/2,
delete_dataset_import_job/3,
delete_explainability/2,
delete_explainability/3,
delete_explainability_export/2,
delete_explainability_export/3,
delete_forecast/2,
delete_forecast/3,
delete_forecast_export_job/2,
delete_forecast_export_job/3,
delete_monitor/2,
delete_monitor/3,
delete_predictor/2,
delete_predictor/3,
delete_predictor_backtest_export_job/2,
delete_predictor_backtest_export_job/3,
delete_resource_tree/2,
delete_resource_tree/3,
delete_what_if_analysis/2,
delete_what_if_analysis/3,
delete_what_if_forecast/2,
delete_what_if_forecast/3,
delete_what_if_forecast_export/2,
delete_what_if_forecast_export/3,
describe_auto_predictor/2,
describe_auto_predictor/3,
describe_dataset/2,
describe_dataset/3,
describe_dataset_group/2,
describe_dataset_group/3,
describe_dataset_import_job/2,
describe_dataset_import_job/3,
describe_explainability/2,
describe_explainability/3,
describe_explainability_export/2,
describe_explainability_export/3,
describe_forecast/2,
describe_forecast/3,
describe_forecast_export_job/2,
describe_forecast_export_job/3,
describe_monitor/2,
describe_monitor/3,
describe_predictor/2,
describe_predictor/3,
describe_predictor_backtest_export_job/2,
describe_predictor_backtest_export_job/3,
describe_what_if_analysis/2,
describe_what_if_analysis/3,
describe_what_if_forecast/2,
describe_what_if_forecast/3,
describe_what_if_forecast_export/2,
describe_what_if_forecast_export/3,
get_accuracy_metrics/2,
get_accuracy_metrics/3,
list_dataset_groups/2,
list_dataset_groups/3,
list_dataset_import_jobs/2,
list_dataset_import_jobs/3,
list_datasets/2,
list_datasets/3,
list_explainabilities/2,
list_explainabilities/3,
list_explainability_exports/2,
list_explainability_exports/3,
list_forecast_export_jobs/2,
list_forecast_export_jobs/3,
list_forecasts/2,
list_forecasts/3,
list_monitor_evaluations/2,
list_monitor_evaluations/3,
list_monitors/2,
list_monitors/3,
list_predictor_backtest_export_jobs/2,
list_predictor_backtest_export_jobs/3,
list_predictors/2,
list_predictors/3,
list_tags_for_resource/2,
list_tags_for_resource/3,
list_what_if_analyses/2,
list_what_if_analyses/3,
list_what_if_forecast_exports/2,
list_what_if_forecast_exports/3,
list_what_if_forecasts/2,
list_what_if_forecasts/3,
resume_resource/2,
resume_resource/3,
stop_resource/2,
stop_resource/3,
tag_resource/2,
tag_resource/3,
untag_resource/2,
untag_resource/3,
update_dataset_group/2,
update_dataset_group/3]).
-include_lib("hackney/include/hackney_lib.hrl").
%%====================================================================
%% API
%%====================================================================
%% @doc Creates an Amazon Forecast predictor.
%%
Amazon Forecast creates predictors with AutoPredictor , which involves
%% applying the optimal combination of algorithms to each time series in your
%% datasets. You can use `CreateAutoPredictor' to create new predictors
%% or upgrade/retrain existing predictors.
%%
%% Creating new predictors
%%
%% The following parameters are required when creating a new predictor:
%%
%% <ul> <li> `PredictorName' - A unique name for the predictor.
%%
< /li > < li > ` DatasetGroupArn ' - The ARN of the dataset group used to
%% train the predictor.
%%
%% </li> <li> `ForecastFrequency' - The granularity of your forecasts
( hourly , daily , weekly , etc ) .
%%
< /li > < li > ` ForecastHorizon ' - The number of time - steps that the model
%% predicts. The forecast horizon is also called the prediction length.
%%
%% </li> </ul> When creating a new predictor, do not specify a value for
%% `ReferencePredictorArn'.
%%
%% Upgrading and retraining predictors
%%
%% The following parameters are required when retraining or upgrading a
%% predictor:
%%
%% <ul> <li> `PredictorName' - A unique name for the predictor.
%%
< /li > < li > ` ReferencePredictorArn ' - The ARN of the predictor to
%% retrain or upgrade.
%%
%% </li> </ul> When upgrading or retraining a predictor, only specify values
%% for the `ReferencePredictorArn' and `PredictorName'.
create_auto_predictor(Client, Input)
when is_map(Client), is_map(Input) ->
create_auto_predictor(Client, Input, []).
create_auto_predictor(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateAutoPredictor">>, Input, Options).
%% @doc Creates an Amazon Forecast dataset.
%%
%% The information about the dataset that you provide helps Forecast
%% understand how to consume the data for model training. This includes the
%% following:
%%
%% <ul> <li> `DataFrequency' - How frequently your historical time-series
%% data is collected.
%%
< /li > < li > ` Domain ' and ` DatasetType ' - Each dataset has an
associated dataset domain and a type within the domain . Amazon Forecast
%% provides a list of predefined domains and types within each domain. For
each unique dataset domain and type within the domain , Amazon Forecast
%% requires your data to include a minimum set of predefined fields.
%%
%% </li> <li> `Schema' - A schema specifies the fields in the dataset,
%% including the field name and data type.
%%
%% </li> </ul> After creating a dataset, you import your training data into
%% it and add the dataset to a dataset group. You use the dataset group to
%% create a predictor. For more information, see Importing datasets.
%%
To get a list of all your datasets , use the ListDatasets operation .
%%
For example Forecast datasets , see the Amazon Forecast Sample GitHub
%% repository.
%%
%% The `Status' of a dataset must be `ACTIVE' before you can import
%% training data. Use the DescribeDataset operation to get the status.
create_dataset(Client, Input)
when is_map(Client), is_map(Input) ->
create_dataset(Client, Input, []).
create_dataset(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateDataset">>, Input, Options).
%% @doc Creates a dataset group, which holds a collection of related
%% datasets.
%%
%% You can add datasets to the dataset group when you create the dataset
%% group, or later by using the UpdateDatasetGroup operation.
%%
%% After creating a dataset group and adding datasets, you use the dataset
group when you create a predictor . For more information , see Dataset
%% groups.
%%
%% To get a list of all your datasets groups, use the ListDatasetGroups
%% operation.
%%
%% The `Status' of a dataset group must be `ACTIVE' before you can
%% use the dataset group to create a predictor. To get the status, use the
DescribeDatasetGroup operation .
create_dataset_group(Client, Input)
when is_map(Client), is_map(Input) ->
create_dataset_group(Client, Input, []).
create_dataset_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateDatasetGroup">>, Input, Options).
@doc Imports your training data to an Amazon Forecast dataset .
%%
You provide the location of your training data in an Amazon Simple Storage
Service ( Amazon S3 ) bucket and the Amazon Resource Name ( ARN ) of the
%% dataset that you want to import the data to.
%%
You must specify a DataSource object that includes an Identity and Access
Management ( IAM ) role that Amazon Forecast can assume to access the data ,
as Amazon Forecast makes a copy of your data and processes it in an
internal Amazon Web Services system . For more information , see Set up
%% permissions.
%%
The training data must be in CSV or Parquet format . The delimiter must be
%% a comma (,).
%%
%% You can specify the path to a specific file, the S3 bucket, or to a folder
in the S3 bucket . For the latter two cases , Amazon Forecast imports all
files up to the limit of 10,000 files .
%%
%% Because dataset imports are not aggregated, your most recent dataset
%% import is the one that is used when training a predictor or generating a
%% forecast. Make sure that your most recent dataset import contains all of
%% the data you want to model off of, and not just the new data collected
%% since the previous import.
%%
%% To get a list of all your dataset import jobs, filtered by specified
%% criteria, use the ListDatasetImportJobs operation.
create_dataset_import_job(Client, Input)
when is_map(Client), is_map(Input) ->
create_dataset_import_job(Client, Input, []).
create_dataset_import_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateDatasetImportJob">>, Input, Options).
%% @doc Explainability is only available for Forecasts and Predictors
generated from an AutoPredictor ( ` CreateAutoPredictor ' )
%%
%% Creates an Amazon Forecast Explainability.
%%
%% Explainability helps you better understand how the attributes in your
datasets impact forecast . Amazon Forecast uses a metric called Impact
%% scores to quantify the relative impact of each attribute and determine
%% whether they increase or decrease forecast values.
%%
%% To enable Forecast Explainability, your predictor must include at least
%% one of the following: related time series, item metadata, or additional
datasets like Holidays and the Weather Index .
%%
CreateExplainability accepts either a Predictor ARN or Forecast ARN . To
%% receive aggregated Impact scores for all time series and time points in
%% your datasets, provide a Predictor ARN. To receive Impact scores for
%% specific time series and time points, provide a Forecast ARN.
%%
CreateExplainability with a Predictor ARN
%%
You can only have one Explainability resource per predictor . If you
already enabled ` ExplainPredictor ' in ` CreateAutoPredictor ' , that
%% predictor already has an Explainability resource.
%%
%% The following parameters are required when providing a Predictor ARN:
%%
< ul > < li > ` ExplainabilityName ' - A unique name for the Explainability .
%%
< /li > < li > ` ResourceArn ' - The Arn of the predictor .
%%
%% </li> <li> `TimePointGranularity' - Must be set to “ALL”.
%%
%% </li> <li> `TimeSeriesGranularity' - Must be set to “ALL”.
%%
%% </li> </ul> Do not specify a value for the following parameters:
%%
%% <ul> <li> `DataSource' - Only valid when TimeSeriesGranularity is
%% “SPECIFIC”.
%%
%% </li> <li> `Schema' - Only valid when TimeSeriesGranularity is
%% “SPECIFIC”.
%%
%% </li> <li> `StartDateTime' - Only valid when TimePointGranularity is
%% “SPECIFIC”.
%%
%% </li> <li> `EndDateTime' - Only valid when TimePointGranularity is
%% “SPECIFIC”.
%%
< /li > < /ul > CreateExplainability with a Forecast ARN
%%
You can specify a maximum of 50 time series and 500 time points .
%%
%% The following parameters are required when providing a Predictor ARN:
%%
< ul > < li > ` ExplainabilityName ' - A unique name for the Explainability .
%%
< /li > < li > ` ResourceArn ' - The Arn of the forecast .
%%
%% </li> <li> `TimePointGranularity' - Either “ALL” or “SPECIFIC”.
%%
%% </li> <li> `TimeSeriesGranularity' - Either “ALL” or “SPECIFIC”.
%%
%% </li> </ul> If you set TimeSeriesGranularity to “SPECIFIC”, you must also
%% provide the following:
%%
%% <ul> <li> `DataSource' - The S3 location of the CSV file specifying
%% your time series.
%%
%% </li> <li> `Schema' - The Schema defines the attributes and attribute
%% types listed in the Data Source.
%%
%% </li> </ul> If you set TimePointGranularity to “SPECIFIC”, you must also
%% provide the following:
%%
< ul > < li > ` StartDateTime ' - The first timestamp in the range of time
%% points.
%%
%% </li> <li> `EndDateTime' - The last timestamp in the range of time
%% points.
%%
%% </li> </ul>
create_explainability(Client, Input)
when is_map(Client), is_map(Input) ->
create_explainability(Client, Input, []).
create_explainability(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateExplainability">>, Input, Options).
%% @doc Exports an Explainability resource created by the
` CreateExplainability ' operation .
%%
Exported files are exported to an Amazon Simple Storage Service ( Amazon
%% S3) bucket.
%%
You must specify a ` DataDestination ' object that includes an Amazon S3
bucket and an Identity and Access Management ( IAM ) role that Amazon
Forecast can assume to access the Amazon S3 bucket . For more information ,
%% see `aws-forecast-iam-roles'.
%%
%% The `Status' of the export job must be `ACTIVE' before you can
access the export in your Amazon S3 bucket . To get the status , use the
` DescribeExplainabilityExport ' operation .
create_explainability_export(Client, Input)
when is_map(Client), is_map(Input) ->
create_explainability_export(Client, Input, []).
create_explainability_export(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateExplainabilityExport">>, Input, Options).
%% @doc Creates a forecast for each item in the `TARGET_TIME_SERIES'
%% dataset that was used to train the predictor.
%%
%% This is known as inference. To retrieve the forecast for a single item at
%% low latency, use the operation. To export the complete forecast into your
Amazon Simple Storage Service ( Amazon S3 ) bucket , use the
%% `CreateForecastExportJob' operation.
%%
The range of the forecast is determined by the ` ForecastHorizon '
value , which you specify in the ` CreatePredictor ' request . When you
%% query a forecast, you can request a specific date range within the
%% forecast.
%%
To get a list of all your forecasts , use the ` ListForecasts '
%% operation.
%%
The forecasts generated by Amazon Forecast are in the same time zone as
%% the dataset that was used to create the predictor.
%%
%% For more information, see `howitworks-forecast'.
%%
%% The `Status' of the forecast must be `ACTIVE' before you can query
%% or export the forecast. Use the `DescribeForecast' operation to get
%% the status.
%%
%% By default, a forecast includes predictions for every item (`item_id')
%% in the dataset group that was used to train the predictor. However, you
can use the ` TimeSeriesSelector ' object to generate a forecast on a
%% subset of time series. Forecast creation is skipped for any time series
%% that you specify that are not in the input dataset. The forecast export
%% file will not contain these time series or their forecasted values.
create_forecast(Client, Input)
when is_map(Client), is_map(Input) ->
create_forecast(Client, Input, []).
create_forecast(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateForecast">>, Input, Options).
%% @doc Exports a forecast created by the `CreateForecast' operation to
your Amazon Simple Storage Service ( Amazon S3 ) bucket .
%%
%% The forecast file name will match the following conventions:
%%
%% <ForecastExportJobName>_<ExportTimestamp>_<PartNumber>
%%
where the & lt;ExportTimestamp> ; component is in Java SimpleDateFormat
%% (yyyy-MM-ddTHH-mm-ssZ).
%%
%% You must specify a `DataDestination' object that includes an Identity
and Access Management ( IAM ) role that Amazon Forecast can assume to access
the Amazon S3 bucket . For more information , see
%% `aws-forecast-iam-roles'.
%%
%% For more information, see `howitworks-forecast'.
%%
%% To get a list of all your forecast export jobs, use the
%% `ListForecastExportJobs' operation.
%%
%% The `Status' of the forecast export job must be `ACTIVE' before
you can access the forecast in your Amazon S3 bucket . To get the status ,
%% use the `DescribeForecastExportJob' operation.
create_forecast_export_job(Client, Input)
when is_map(Client), is_map(Input) ->
create_forecast_export_job(Client, Input, []).
create_forecast_export_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateForecastExportJob">>, Input, Options).
%% @doc Creates a predictor monitor resource for an existing auto predictor.
%%
%% Predictor monitoring allows you to see how your predictor's
%% performance changes over time. For more information, see Predictor
%% Monitoring.
create_monitor(Client, Input)
when is_map(Client), is_map(Input) ->
create_monitor(Client, Input, []).
create_monitor(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateMonitor">>, Input, Options).
%% @doc This operation creates a legacy predictor that does not include all
the predictor functionalities provided by Amazon Forecast .
%%
%% To create a predictor that is compatible with all aspects of Forecast, use
%% `CreateAutoPredictor'.
%%
%% Creates an Amazon Forecast predictor.
%%
%% In the request, provide a dataset group and either specify an algorithm or
let Amazon Forecast choose an algorithm for you using AutoML . If you
%% specify an algorithm, you also can override algorithm-specific
%% hyperparameters.
%%
Amazon Forecast uses the algorithm to train a predictor using the latest
%% version of the datasets in the specified dataset group. You can then
%% generate a forecast using the `CreateForecast' operation.
%%
To see the evaluation metrics , use the ` GetAccuracyMetrics ' operation .
%%
%% You can specify a featurization configuration to fill and aggregate the
%% data fields in the `TARGET_TIME_SERIES' dataset to improve model
training . For more information , see ` FeaturizationConfig ' .
%%
For RELATED_TIME_SERIES datasets , ` CreatePredictor ' verifies that the
%% `DataFrequency' specified when the dataset was created matches the
` ForecastFrequency ' . TARGET_TIME_SERIES datasets do n't have this
restriction . Amazon Forecast also verifies the delimiter and timestamp
%% format. For more information, see `howitworks-datasets-groups'.
%%
By default , predictors are trained and evaluated at the 0.1 ( P10 ) , 0.5
( P50 ) , and 0.9 ( P90 ) quantiles . You can choose custom forecast types to
%% train and evaluate your predictor by setting the `ForecastTypes'.
%%
AutoML
%%
If you want Amazon Forecast to evaluate each algorithm and choose the one
%% that minimizes the `objective function', set `PerformAutoML' to
%% `true'. The `objective function' is defined as the mean of the
weighted losses over the forecast types . By default , these are the p10 ,
p50 , and p90 quantile losses . For more information , see
` ' .
%%
When AutoML is enabled , the following properties are disallowed :
%%
< ul > < li > ` AlgorithmArn '
%%
< /li > < li > ` HPOConfig '
%%
%% </li> <li> `PerformHPO'
%%
< /li > < li > ` TrainingParameters '
%%
%% </li> </ul> To get a list of all of your predictors, use the
` ListPredictors ' operation .
%%
%% Before you can use the predictor to create a forecast, the `Status' of
%% the predictor must be `ACTIVE', signifying that training has
completed . To get the status , use the ` DescribePredictor ' operation .
create_predictor(Client, Input)
when is_map(Client), is_map(Input) ->
create_predictor(Client, Input, []).
create_predictor(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreatePredictor">>, Input, Options).
%% @doc Exports backtest forecasts and accuracy metrics generated by the
` CreateAutoPredictor ' or ` CreatePredictor ' operations .
%%
Two folders containing CSV or Parquet files are exported to your specified
%% S3 bucket.
%%
%% The export file names will match the following conventions:
%%
%% `<ExportJobName>_<ExportTimestamp>_<PartNumber>.csv'
%%
The & lt;ExportTimestamp> ; component is in Java SimpleDate format
%% (yyyy-MM-ddTHH-mm-ssZ).
%%
You must specify a ` DataDestination ' object that includes an Amazon S3
bucket and an Identity and Access Management ( IAM ) role that Amazon
Forecast can assume to access the Amazon S3 bucket . For more information ,
%% see `aws-forecast-iam-roles'.
%%
%% The `Status' of the export job must be `ACTIVE' before you can
access the export in your Amazon S3 bucket . To get the status , use the
%% `DescribePredictorBacktestExportJob' operation.
create_predictor_backtest_export_job(Client, Input)
when is_map(Client), is_map(Input) ->
create_predictor_backtest_export_job(Client, Input, []).
create_predictor_backtest_export_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreatePredictorBacktestExportJob">>, Input, Options).
%% @doc What-if analysis is a scenario modeling technique where you make a
%% hypothetical change to a time series and compare the forecasts generated
%% by these changes against the baseline, unchanged time series.
%%
%% It is important to remember that the purpose of a what-if analysis is to
%% understand how a forecast can change given different modifications to the
%% baseline time series.
%%
%% For example, imagine you are a clothing retailer who is considering an end
of season sale to clear space for new styles . After creating a baseline
%% forecast, you can use a what-if analysis to investigate how different
%% sales tactics might affect your goals.
%%
You could create a scenario where everything is given a 25 % markdown , and
%% another where everything is given a fixed dollar markdown. You could
create a scenario where the sale lasts for one week and another where the
sale lasts for one month . With a what - if analysis , you can compare many
%% different scenarios against each other.
%%
%% Note that a what-if analysis is meant to display what the forecasting
%% model has learned and how it will behave in the scenarios that you are
%% evaluating. Do not blindly use the results of the what-if analysis to make
%% business decisions. For instance, forecasts might not be accurate for
%% novel scenarios where there is no reference available to determine whether
%% a forecast is good.
%%
%% The `TimeSeriesSelector' object defines the items that you want in the
%% what-if analysis.
create_what_if_analysis(Client, Input)
when is_map(Client), is_map(Input) ->
create_what_if_analysis(Client, Input, []).
create_what_if_analysis(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateWhatIfAnalysis">>, Input, Options).
%% @doc A what-if forecast is a forecast that is created from a modified
%% version of the baseline forecast.
%%
%% Each what-if forecast incorporates either a replacement dataset or a set
%% of transformations to the original dataset.
create_what_if_forecast(Client, Input)
when is_map(Client), is_map(Input) ->
create_what_if_forecast(Client, Input, []).
create_what_if_forecast(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateWhatIfForecast">>, Input, Options).
%% @doc Exports a forecast created by the `CreateWhatIfForecast'
operation to your Amazon Simple Storage Service ( Amazon S3 ) bucket .
%%
%% The forecast file name will match the following conventions:
%%
%% `≈<ForecastExportJobName>_<ExportTimestamp>_<PartNumber>'
%%
The & lt;ExportTimestamp> ; component is in Java SimpleDateFormat
%% (yyyy-MM-ddTHH-mm-ssZ).
%%
%% You must specify a `DataDestination' object that includes an Identity
and Access Management ( IAM ) role that Amazon Forecast can assume to access
the Amazon S3 bucket . For more information , see
%% `aws-forecast-iam-roles'.
%%
%% For more information, see `howitworks-forecast'.
%%
%% To get a list of all your what-if forecast export jobs, use the
%% `ListWhatIfForecastExports' operation.
%%
%% The `Status' of the forecast export job must be `ACTIVE' before
you can access the forecast in your Amazon S3 bucket . To get the status ,
use the ` DescribeWhatIfForecastExport ' operation .
create_what_if_forecast_export(Client, Input)
when is_map(Client), is_map(Input) ->
create_what_if_forecast_export(Client, Input, []).
create_what_if_forecast_export(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateWhatIfForecastExport">>, Input, Options).
@doc Deletes an Amazon Forecast dataset that was created using the
CreateDataset operation .
%%
%% You can only delete datasets that have a status of `ACTIVE' or
%% `CREATE_FAILED'. To get the status use the DescribeDataset operation.
%%
%% Forecast does not automatically update any dataset groups that contain the
%% deleted dataset. In order to update the dataset group, use the
UpdateDatasetGroup operation , omitting the deleted dataset 's ARN .
delete_dataset(Client, Input)
when is_map(Client), is_map(Input) ->
delete_dataset(Client, Input, []).
delete_dataset(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteDataset">>, Input, Options).
%% @doc Deletes a dataset group created using the CreateDatasetGroup
%% operation.
%%
%% You can only delete dataset groups that have a status of `ACTIVE',
%% `CREATE_FAILED', or `UPDATE_FAILED'. To get the status, use the
DescribeDatasetGroup operation .
%%
%% This operation deletes only the dataset group, not the datasets in the
%% group.
delete_dataset_group(Client, Input)
when is_map(Client), is_map(Input) ->
delete_dataset_group(Client, Input, []).
delete_dataset_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteDatasetGroup">>, Input, Options).
%% @doc Deletes a dataset import job created using the CreateDatasetImportJob
%% operation.
%%
%% You can delete only dataset import jobs that have a status of `ACTIVE'
%% or `CREATE_FAILED'. To get the status, use the
%% DescribeDatasetImportJob operation.
delete_dataset_import_job(Client, Input)
when is_map(Client), is_map(Input) ->
delete_dataset_import_job(Client, Input, []).
delete_dataset_import_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteDatasetImportJob">>, Input, Options).
%% @doc Deletes an Explainability resource.
%%
%% You can delete only predictor that have a status of `ACTIVE' or
%% `CREATE_FAILED'. To get the status, use the
%% `DescribeExplainability' operation.
delete_explainability(Client, Input)
when is_map(Client), is_map(Input) ->
delete_explainability(Client, Input, []).
delete_explainability(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteExplainability">>, Input, Options).
%% @doc Deletes an Explainability export.
delete_explainability_export(Client, Input)
when is_map(Client), is_map(Input) ->
delete_explainability_export(Client, Input, []).
delete_explainability_export(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteExplainabilityExport">>, Input, Options).
%% @doc Deletes a forecast created using the `CreateForecast' operation.
%%
%% You can delete only forecasts that have a status of `ACTIVE' or
%% `CREATE_FAILED'. To get the status, use the `DescribeForecast'
%% operation.
%%
%% You can't delete a forecast while it is being exported. After a
%% forecast is deleted, you can no longer query the forecast.
delete_forecast(Client, Input)
when is_map(Client), is_map(Input) ->
delete_forecast(Client, Input, []).
delete_forecast(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteForecast">>, Input, Options).
%% @doc Deletes a forecast export job created using the
%% `CreateForecastExportJob' operation.
%%
%% You can delete only export jobs that have a status of `ACTIVE' or
%% `CREATE_FAILED'. To get the status, use the
%% `DescribeForecastExportJob' operation.
delete_forecast_export_job(Client, Input)
when is_map(Client), is_map(Input) ->
delete_forecast_export_job(Client, Input, []).
delete_forecast_export_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteForecastExportJob">>, Input, Options).
%% @doc Deletes a monitor resource.
%%
%% You can only delete a monitor resource with a status of `ACTIVE',
` ACTIVE_STOPPED ' , ` CREATE_FAILED ' , or ` CREATE_STOPPED ' .
delete_monitor(Client, Input)
when is_map(Client), is_map(Input) ->
delete_monitor(Client, Input, []).
delete_monitor(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteMonitor">>, Input, Options).
@doc Deletes a predictor created using the ` DescribePredictor ' or
` CreatePredictor ' operations .
%%
%% You can delete only predictor that have a status of `ACTIVE' or
` CREATE_FAILED ' . To get the status , use the ` DescribePredictor '
%% operation.
delete_predictor(Client, Input)
when is_map(Client), is_map(Input) ->
delete_predictor(Client, Input, []).
delete_predictor(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeletePredictor">>, Input, Options).
%% @doc Deletes a predictor backtest export job.
delete_predictor_backtest_export_job(Client, Input)
when is_map(Client), is_map(Input) ->
delete_predictor_backtest_export_job(Client, Input, []).
delete_predictor_backtest_export_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeletePredictorBacktestExportJob">>, Input, Options).
%% @doc Deletes an entire resource tree.
%%
%% This operation will delete the parent resource and its child resources.
%%
%% Child resources are resources that were created from another resource. For
%% example, when a forecast is generated from a predictor, the forecast is
%% the child resource and the predictor is the parent resource.
%%
Amazon Forecast resources possess the following parent - child resource
%% hierarchies:
%%
%% <ul> <li> Dataset: dataset import jobs
%%
< /li > < li > Dataset Group : predictors , predictor backtest export jobs ,
%% forecasts, forecast export jobs
%%
%% </li> <li> Predictor: predictor backtest export jobs, forecasts, forecast
%% export jobs
%%
%% </li> <li> Forecast: forecast export jobs
%%
< /li > < /ul > ` DeleteResourceTree ' will only delete Amazon Forecast
resources , and will not delete datasets or exported files stored in Amazon
%% S3.
delete_resource_tree(Client, Input)
when is_map(Client), is_map(Input) ->
delete_resource_tree(Client, Input, []).
delete_resource_tree(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteResourceTree">>, Input, Options).
%% @doc Deletes a what-if analysis created using the
%% `CreateWhatIfAnalysis' operation.
%%
%% You can delete only what-if analyses that have a status of `ACTIVE' or
%% `CREATE_FAILED'. To get the status, use the
` DescribeWhatIfAnalysis ' operation .
%%
%% You can't delete a what-if analysis while any of its forecasts are
%% being exported.
delete_what_if_analysis(Client, Input)
when is_map(Client), is_map(Input) ->
delete_what_if_analysis(Client, Input, []).
delete_what_if_analysis(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteWhatIfAnalysis">>, Input, Options).
%% @doc Deletes a what-if forecast created using the
%% `CreateWhatIfForecast' operation.
%%
%% You can delete only what-if forecasts that have a status of `ACTIVE'
%% or `CREATE_FAILED'. To get the status, use the
%% `DescribeWhatIfForecast' operation.
%%
%% You can't delete a what-if forecast while it is being exported. After
%% a what-if forecast is deleted, you can no longer query the what-if
%% analysis.
delete_what_if_forecast(Client, Input)
when is_map(Client), is_map(Input) ->
delete_what_if_forecast(Client, Input, []).
delete_what_if_forecast(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteWhatIfForecast">>, Input, Options).
%% @doc Deletes a what-if forecast export created using the
%% `CreateWhatIfForecastExport' operation.
%%
%% You can delete only what-if forecast exports that have a status of
%% `ACTIVE' or `CREATE_FAILED'. To get the status, use the
` DescribeWhatIfForecastExport ' operation .
delete_what_if_forecast_export(Client, Input)
when is_map(Client), is_map(Input) ->
delete_what_if_forecast_export(Client, Input, []).
delete_what_if_forecast_export(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteWhatIfForecastExport">>, Input, Options).
%% @doc Describes a predictor created using the CreateAutoPredictor
%% operation.
describe_auto_predictor(Client, Input)
when is_map(Client), is_map(Input) ->
describe_auto_predictor(Client, Input, []).
describe_auto_predictor(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeAutoPredictor">>, Input, Options).
@doc Describes an Amazon Forecast dataset created using the CreateDataset
%% operation.
%%
%% In addition to listing the parameters specified in the `CreateDataset'
%% request, this operation includes the following dataset properties:
%%
< ul > < li > ` CreationTime '
%%
%% </li> <li> `LastModificationTime'
%%
%% </li> <li> `Status'
%%
%% </li> </ul>
describe_dataset(Client, Input)
when is_map(Client), is_map(Input) ->
describe_dataset(Client, Input, []).
describe_dataset(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeDataset">>, Input, Options).
%% @doc Describes a dataset group created using the CreateDatasetGroup
%% operation.
%%
%% In addition to listing the parameters provided in the
%% `CreateDatasetGroup' request, this operation includes the following
%% properties:
%%
< ul > < li > ` DatasetArns ' - The datasets belonging to the group .
%%
< /li > < li > ` CreationTime '
%%
%% </li> <li> `LastModificationTime'
%%
%% </li> <li> `Status'
%%
%% </li> </ul>
describe_dataset_group(Client, Input)
when is_map(Client), is_map(Input) ->
describe_dataset_group(Client, Input, []).
describe_dataset_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeDatasetGroup">>, Input, Options).
%% @doc Describes a dataset import job created using the
%% CreateDatasetImportJob operation.
%%
%% In addition to listing the parameters provided in the
%% `CreateDatasetImportJob' request, this operation includes the
%% following properties:
%%
< ul > < li > ` CreationTime '
%%
%% </li> <li> `LastModificationTime'
%%
< /li > < li > ` DataSize '
%%
%% </li> <li> `FieldStatistics'
%%
%% </li> <li> `Status'
%%
%% </li> <li> `Message' - If an error occurred, information about the
%% error.
%%
%% </li> </ul>
describe_dataset_import_job(Client, Input)
when is_map(Client), is_map(Input) ->
describe_dataset_import_job(Client, Input, []).
describe_dataset_import_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeDatasetImportJob">>, Input, Options).
%% @doc Describes an Explainability resource created using the
` CreateExplainability ' operation .
describe_explainability(Client, Input)
when is_map(Client), is_map(Input) ->
describe_explainability(Client, Input, []).
describe_explainability(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeExplainability">>, Input, Options).
%% @doc Describes an Explainability export created using the
%% `CreateExplainabilityExport' operation.
describe_explainability_export(Client, Input)
when is_map(Client), is_map(Input) ->
describe_explainability_export(Client, Input, []).
describe_explainability_export(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeExplainabilityExport">>, Input, Options).
%% @doc Describes a forecast created using the `CreateForecast'
%% operation.
%%
%% In addition to listing the properties provided in the `CreateForecast'
%% request, this operation lists the following properties:
%%
%% <ul> <li> `DatasetGroupArn' - The dataset group that provided the
%% training data.
%%
< /li > < li > ` CreationTime '
%%
%% </li> <li> `LastModificationTime'
%%
%% </li> <li> `Status'
%%
%% </li> <li> `Message' - If an error occurred, information about the
%% error.
%%
%% </li> </ul>
describe_forecast(Client, Input)
when is_map(Client), is_map(Input) ->
describe_forecast(Client, Input, []).
describe_forecast(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeForecast">>, Input, Options).
%% @doc Describes a forecast export job created using the
%% `CreateForecastExportJob' operation.
%%
%% In addition to listing the properties provided by the user in the
%% `CreateForecastExportJob' request, this operation lists the following
%% properties:
%%
< ul > < li > ` CreationTime '
%%
%% </li> <li> `LastModificationTime'
%%
%% </li> <li> `Status'
%%
%% </li> <li> `Message' - If an error occurred, information about the
%% error.
%%
%% </li> </ul>
describe_forecast_export_job(Client, Input)
when is_map(Client), is_map(Input) ->
describe_forecast_export_job(Client, Input, []).
describe_forecast_export_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeForecastExportJob">>, Input, Options).
%% @doc Describes a monitor resource.
%%
In addition to listing the properties provided in the ` CreateMonitor '
%% request, this operation lists the following properties:
%%
%% <ul> <li> `Baseline'
%%
< /li > < li > ` CreationTime '
%%
%% </li> <li> `LastEvaluationTime'
%%
%% </li> <li> `LastEvaluationState'
%%
%% </li> <li> `LastModificationTime'
%%
%% </li> <li> `Message'
%%
%% </li> <li> `Status'
%%
%% </li> </ul>
describe_monitor(Client, Input)
when is_map(Client), is_map(Input) ->
describe_monitor(Client, Input, []).
describe_monitor(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeMonitor">>, Input, Options).
%% @doc This operation is only valid for legacy predictors created with
CreatePredictor .
%%
%% If you are not using a legacy predictor, use `DescribeAutoPredictor'.
%%
Describes a predictor created using the ` CreatePredictor ' operation .
%%
%% In addition to listing the properties provided in the
` CreatePredictor ' request , this operation lists the following
%% properties:
%%
%% <ul> <li> `DatasetImportJobArns' - The dataset import jobs used to
%% import training data.
%%
%% </li> <li> `AutoMLAlgorithmArns' - If AutoML is performed, the
%% algorithms that were evaluated.
%%
< /li > < li > ` CreationTime '
%%
%% </li> <li> `LastModificationTime'
%%
%% </li> <li> `Status'
%%
%% </li> <li> `Message' - If an error occurred, information about the
%% error.
%%
%% </li> </ul>
describe_predictor(Client, Input)
when is_map(Client), is_map(Input) ->
describe_predictor(Client, Input, []).
describe_predictor(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribePredictor">>, Input, Options).
%% @doc Describes a predictor backtest export job created using the
%% `CreatePredictorBacktestExportJob' operation.
%%
%% In addition to listing the properties provided by the user in the
%% `CreatePredictorBacktestExportJob' request, this operation lists the
%% following properties:
%%
< ul > < li > ` CreationTime '
%%
%% </li> <li> `LastModificationTime'
%%
%% </li> <li> `Status'
%%
%% </li> <li> `Message' (if an error occurred)
%%
%% </li> </ul>
describe_predictor_backtest_export_job(Client, Input)
when is_map(Client), is_map(Input) ->
describe_predictor_backtest_export_job(Client, Input, []).
describe_predictor_backtest_export_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribePredictorBacktestExportJob">>, Input, Options).
%% @doc Describes the what-if analysis created using the
%% `CreateWhatIfAnalysis' operation.
%%
%% In addition to listing the properties provided in the
%% `CreateWhatIfAnalysis' request, this operation lists the following
%% properties:
%%
< ul > < li > ` CreationTime '
%%
%% </li> <li> `LastModificationTime'
%%
%% </li> <li> `Message' - If an error occurred, information about the
%% error.
%%
%% </li> <li> `Status'
%%
%% </li> </ul>
describe_what_if_analysis(Client, Input)
when is_map(Client), is_map(Input) ->
describe_what_if_analysis(Client, Input, []).
describe_what_if_analysis(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeWhatIfAnalysis">>, Input, Options).
%% @doc Describes the what-if forecast created using the
%% `CreateWhatIfForecast' operation.
%%
%% In addition to listing the properties provided in the
%% `CreateWhatIfForecast' request, this operation lists the following
%% properties:
%%
< ul > < li > ` CreationTime '
%%
%% </li> <li> `LastModificationTime'
%%
%% </li> <li> `Message' - If an error occurred, information about the
%% error.
%%
%% </li> <li> `Status'
%%
%% </li> </ul>
describe_what_if_forecast(Client, Input)
when is_map(Client), is_map(Input) ->
describe_what_if_forecast(Client, Input, []).
describe_what_if_forecast(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeWhatIfForecast">>, Input, Options).
%% @doc Describes the what-if forecast export created using the
%% `CreateWhatIfForecastExport' operation.
%%
%% In addition to listing the properties provided in the
%% `CreateWhatIfForecastExport' request, this operation lists the
%% following properties:
%%
< ul > < li > ` CreationTime '
%%
%% </li> <li> `LastModificationTime'
%%
%% </li> <li> `Message' - If an error occurred, information about the
%% error.
%%
%% </li> <li> `Status'
%%
%% </li> </ul>
describe_what_if_forecast_export(Client, Input)
when is_map(Client), is_map(Input) ->
describe_what_if_forecast_export(Client, Input, []).
describe_what_if_forecast_export(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeWhatIfForecastExport">>, Input, Options).
%% @doc Provides metrics on the accuracy of the models that were trained by
the ` CreatePredictor ' operation .
%%
%% Use metrics to see how well the model performed and to decide whether to
%% use the predictor to generate a forecast. For more information, see
%% Predictor Metrics.
%%
%% This operation generates metrics for each backtest window that was
%% evaluated. The number of backtest windows (`NumberOfBacktestWindows')
is specified using the ` EvaluationParameters ' object , which is
optionally included in the ` CreatePredictor ' request . If
%% `NumberOfBacktestWindows' isn't specified, the number defaults to
%% one.
%%
%% The parameters of the `filling' method determine which items
%% contribute to the metrics. If you want all items to contribute, specify
` zero ' . If you want only those items that have complete data in the
%% range being evaluated to contribute, specify `nan'. For more
%% information, see `FeaturizationMethod'.
%%
%% Before you can get accuracy metrics, the `Status' of the predictor
%% must be `ACTIVE', signifying that training has completed. To get the
status , use the ` DescribePredictor ' operation .
get_accuracy_metrics(Client, Input)
when is_map(Client), is_map(Input) ->
get_accuracy_metrics(Client, Input, []).
get_accuracy_metrics(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetAccuracyMetrics">>, Input, Options).
%% @doc Returns a list of dataset groups created using the CreateDatasetGroup
%% operation.
%%
%% For each dataset group, this operation returns a summary of its
properties , including its Amazon Resource Name ( ARN ) . You can retrieve the
complete set of properties by using the dataset group ARN with the
DescribeDatasetGroup operation .
list_dataset_groups(Client, Input)
when is_map(Client), is_map(Input) ->
list_dataset_groups(Client, Input, []).
list_dataset_groups(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListDatasetGroups">>, Input, Options).
%% @doc Returns a list of dataset import jobs created using the
%% CreateDatasetImportJob operation.
%%
%% For each import job, this operation returns a summary of its properties,
including its Amazon Resource Name ( ARN ) . You can retrieve the complete
set of properties by using the ARN with the DescribeDatasetImportJob
%% operation. You can filter the list by providing an array of Filter
%% objects.
list_dataset_import_jobs(Client, Input)
when is_map(Client), is_map(Input) ->
list_dataset_import_jobs(Client, Input, []).
list_dataset_import_jobs(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListDatasetImportJobs">>, Input, Options).
%% @doc Returns a list of datasets created using the CreateDataset operation.
%%
For each dataset , a summary of its properties , including its Amazon
Resource Name ( ARN ) , is returned . To retrieve the complete set of
properties , use the ARN with the DescribeDataset operation .
list_datasets(Client, Input)
when is_map(Client), is_map(Input) ->
list_datasets(Client, Input, []).
list_datasets(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListDatasets">>, Input, Options).
@doc Returns a list of Explainability resources created using the
` CreateExplainability ' operation .
%%
This operation returns a summary for each Explainability . You can filter
%% the list using an array of `Filter' objects.
%%
To retrieve the complete set of properties for a particular Explainability
resource , use the ARN with the ` DescribeExplainability ' operation .
list_explainabilities(Client, Input)
when is_map(Client), is_map(Input) ->
list_explainabilities(Client, Input, []).
list_explainabilities(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListExplainabilities">>, Input, Options).
@doc Returns a list of Explainability exports created using the
%% `CreateExplainabilityExport' operation.
%%
%% This operation returns a summary for each Explainability export. You can
%% filter the list using an array of `Filter' objects.
%%
To retrieve the complete set of properties for a particular Explainability
export , use the ARN with the ` DescribeExplainability ' operation .
list_explainability_exports(Client, Input)
when is_map(Client), is_map(Input) ->
list_explainability_exports(Client, Input, []).
list_explainability_exports(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListExplainabilityExports">>, Input, Options).
%% @doc Returns a list of forecast export jobs created using the
%% `CreateForecastExportJob' operation.
%%
%% For each forecast export job, this operation returns a summary of its
properties , including its Amazon Resource Name ( ARN ) . To retrieve the
complete set of properties , use the ARN with the
%% `DescribeForecastExportJob' operation. You can filter the list using
%% an array of `Filter' objects.
list_forecast_export_jobs(Client, Input)
when is_map(Client), is_map(Input) ->
list_forecast_export_jobs(Client, Input, []).
list_forecast_export_jobs(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListForecastExportJobs">>, Input, Options).
%% @doc Returns a list of forecasts created using the `CreateForecast'
%% operation.
%%
%% For each forecast, this operation returns a summary of its properties,
including its Amazon Resource Name ( ARN ) . To retrieve the complete set of
properties , specify the ARN with the ` DescribeForecast ' operation . You
%% can filter the list using an array of `Filter' objects.
list_forecasts(Client, Input)
when is_map(Client), is_map(Input) ->
list_forecasts(Client, Input, []).
list_forecasts(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListForecasts">>, Input, Options).
%% @doc Returns a list of the monitoring evaluation results and predictor
%% events collected by the monitor resource during different windows of time.
%%
%% For information about monitoring see `predictor-monitoring'. For more
%% information about retrieving monitoring results see Viewing Monitoring
%% Results.
list_monitor_evaluations(Client, Input)
when is_map(Client), is_map(Input) ->
list_monitor_evaluations(Client, Input, []).
list_monitor_evaluations(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListMonitorEvaluations">>, Input, Options).
@doc Returns a list of monitors created with the ` CreateMonitor '
%% operation and `CreateAutoPredictor' operation.
%%
%% For each monitor resource, this operation returns of a summary of its
properties , including its Amazon Resource Name ( ARN ) . You can retrieve a
%% complete set of properties of a monitor resource by specify the
monitor 's ARN in the ` DescribeMonitor ' operation .
list_monitors(Client, Input)
when is_map(Client), is_map(Input) ->
list_monitors(Client, Input, []).
list_monitors(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListMonitors">>, Input, Options).
%% @doc Returns a list of predictor backtest export jobs created using the
%% `CreatePredictorBacktestExportJob' operation.
%%
%% This operation returns a summary for each backtest export job. You can
%% filter the list using an array of `Filter' objects.
%%
%% To retrieve the complete set of properties for a particular backtest
export job , use the ARN with the ` DescribePredictorBacktestExportJob '
%% operation.
list_predictor_backtest_export_jobs(Client, Input)
when is_map(Client), is_map(Input) ->
list_predictor_backtest_export_jobs(Client, Input, []).
list_predictor_backtest_export_jobs(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListPredictorBacktestExportJobs">>, Input, Options).
%% @doc Returns a list of predictors created using the
` CreateAutoPredictor ' or ` CreatePredictor ' operations .
%%
%% For each predictor, this operation returns a summary of its properties,
including its Amazon Resource Name ( ARN ) .
%%
You can retrieve the complete set of properties by using the ARN with the
%% `DescribeAutoPredictor' and `DescribePredictor' operations. You
%% can filter the list using an array of `Filter' objects.
list_predictors(Client, Input)
when is_map(Client), is_map(Input) ->
list_predictors(Client, Input, []).
list_predictors(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListPredictors">>, Input, Options).
@doc Lists the tags for an Amazon Forecast resource .
list_tags_for_resource(Client, Input)
when is_map(Client), is_map(Input) ->
list_tags_for_resource(Client, Input, []).
list_tags_for_resource(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListTagsForResource">>, Input, Options).
%% @doc Returns a list of what-if analyses created using the
%% `CreateWhatIfAnalysis' operation.
%%
%% For each what-if analysis, this operation returns a summary of its
properties , including its Amazon Resource Name ( ARN ) . You can retrieve the
complete set of properties by using the what - if analysis ARN with the
` DescribeWhatIfAnalysis ' operation .
list_what_if_analyses(Client, Input)
when is_map(Client), is_map(Input) ->
list_what_if_analyses(Client, Input, []).
list_what_if_analyses(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListWhatIfAnalyses">>, Input, Options).
%% @doc Returns a list of what-if forecast exports created using the
%% `CreateWhatIfForecastExport' operation.
%%
%% For each what-if forecast export, this operation returns a summary of its
properties , including its Amazon Resource Name ( ARN ) . You can retrieve the
complete set of properties by using the what - if forecast export ARN with
the ` DescribeWhatIfForecastExport ' operation .
list_what_if_forecast_exports(Client, Input)
when is_map(Client), is_map(Input) ->
list_what_if_forecast_exports(Client, Input, []).
list_what_if_forecast_exports(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListWhatIfForecastExports">>, Input, Options).
%% @doc Returns a list of what-if forecasts created using the
%% `CreateWhatIfForecast' operation.
%%
%% For each what-if forecast, this operation returns a summary of its
properties , including its Amazon Resource Name ( ARN ) . You can retrieve the
complete set of properties by using the what - if forecast ARN with the
%% `DescribeWhatIfForecast' operation.
list_what_if_forecasts(Client, Input)
when is_map(Client), is_map(Input) ->
list_what_if_forecasts(Client, Input, []).
list_what_if_forecasts(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListWhatIfForecasts">>, Input, Options).
%% @doc Resumes a stopped monitor resource.
resume_resource(Client, Input)
when is_map(Client), is_map(Input) ->
resume_resource(Client, Input, []).
resume_resource(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ResumeResource">>, Input, Options).
%% @doc Stops a resource.
%%
%% The resource undergoes the following states: `CREATE_STOPPING' and
` CREATE_STOPPED ' . You can not resume a resource once it has been
%% stopped.
%%
%% This operation can be applied to the following resources (and their
%% corresponding child resources):
%%
%% <ul> <li> Dataset Import Job
%%
%% </li> <li> Predictor Job
%%
%% </li> <li> Forecast Job
%%
%% </li> <li> Forecast Export Job
%%
%% </li> <li> Predictor Backtest Export Job
%%
%% </li> <li> Explainability Job
%%
%% </li> <li> Explainability Export Job
%%
%% </li> </ul>
stop_resource(Client, Input)
when is_map(Client), is_map(Input) ->
stop_resource(Client, Input, []).
stop_resource(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"StopResource">>, Input, Options).
%% @doc Associates the specified tags to a resource with the specified
%% `resourceArn'.
%%
%% If existing tags on a resource are not specified in the request
%% parameters, they are not changed. When a resource is deleted, the tags
%% associated with that resource are also deleted.
tag_resource(Client, Input)
when is_map(Client), is_map(Input) ->
tag_resource(Client, Input, []).
tag_resource(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"TagResource">>, Input, Options).
%% @doc Deletes the specified tags from a resource.
untag_resource(Client, Input)
when is_map(Client), is_map(Input) ->
untag_resource(Client, Input, []).
untag_resource(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UntagResource">>, Input, Options).
%% @doc Replaces the datasets in a dataset group with the specified datasets.
%%
%% The `Status' of the dataset group must be `ACTIVE' before you can
use the dataset group to create a predictor . Use the DescribeDatasetGroup
%% operation to get the status.
update_dataset_group(Client, Input)
when is_map(Client), is_map(Input) ->
update_dataset_group(Client, Input, []).
update_dataset_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdateDatasetGroup">>, Input, Options).
%%====================================================================
Internal functions
%%====================================================================
-spec request(aws_client:aws_client(), binary(), map(), list()) ->
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map() | undefined,
Error :: map().
request(Client, Action, Input, Options) ->
RequestFun = fun() -> do_request(Client, Action, Input, Options) end,
aws_request:request(RequestFun, Options).
do_request(Client, Action, Input0, Options) ->
Client1 = Client#{service => <<"forecast">>},
Host = build_host(<<"forecast">>, Client1),
URL = build_url(Host, Client1),
Headers = [
{<<"Host">>, Host},
{<<"Content-Type">>, <<"application/x-amz-json-1.1">>},
{<<"X-Amz-Target">>, <<"AmazonForecast.", Action/binary>>}
],
Input = Input0,
Payload = jsx:encode(Input),
SignedHeaders = aws_request:sign_request(Client1, <<"POST">>, URL, Headers, Payload),
Response = hackney:request(post, URL, SignedHeaders, Payload, Options),
handle_response(Response).
handle_response({ok, 200, ResponseHeaders, Client}) ->
case hackney:body(Client) of
{ok, <<>>} ->
{ok, undefined, {200, ResponseHeaders, Client}};
{ok, Body} ->
Result = jsx:decode(Body),
{ok, Result, {200, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, ResponseHeaders, Client}) ->
{ok, Body} = hackney:body(Client),
Error = jsx:decode(Body),
{error, Error, {StatusCode, ResponseHeaders, Client}};
handle_response({error, Reason}) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{region := Region, endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Region, Endpoint], <<".">>).
build_url(Host, Client) ->
Proto = aws_client:proto(Client),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, <<"/">>], <<"">>).
| null | https://raw.githubusercontent.com/aws-beam/aws-erlang/699287cee7dfc9dc8c08ced5f090dcc192c9cba8/src/aws_forecast.erl | erlang | WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
====================================================================
API
====================================================================
@doc Creates an Amazon Forecast predictor.
applying the optimal combination of algorithms to each time series in your
datasets. You can use `CreateAutoPredictor' to create new predictors
or upgrade/retrain existing predictors.
Creating new predictors
The following parameters are required when creating a new predictor:
<ul> <li> `PredictorName' - A unique name for the predictor.
train the predictor.
</li> <li> `ForecastFrequency' - The granularity of your forecasts
predicts. The forecast horizon is also called the prediction length.
</li> </ul> When creating a new predictor, do not specify a value for
`ReferencePredictorArn'.
Upgrading and retraining predictors
The following parameters are required when retraining or upgrading a
predictor:
<ul> <li> `PredictorName' - A unique name for the predictor.
retrain or upgrade.
</li> </ul> When upgrading or retraining a predictor, only specify values
for the `ReferencePredictorArn' and `PredictorName'.
@doc Creates an Amazon Forecast dataset.
The information about the dataset that you provide helps Forecast
understand how to consume the data for model training. This includes the
following:
<ul> <li> `DataFrequency' - How frequently your historical time-series
data is collected.
provides a list of predefined domains and types within each domain. For
requires your data to include a minimum set of predefined fields.
</li> <li> `Schema' - A schema specifies the fields in the dataset,
including the field name and data type.
</li> </ul> After creating a dataset, you import your training data into
it and add the dataset to a dataset group. You use the dataset group to
create a predictor. For more information, see Importing datasets.
repository.
The `Status' of a dataset must be `ACTIVE' before you can import
training data. Use the DescribeDataset operation to get the status.
@doc Creates a dataset group, which holds a collection of related
datasets.
You can add datasets to the dataset group when you create the dataset
group, or later by using the UpdateDatasetGroup operation.
After creating a dataset group and adding datasets, you use the dataset
groups.
To get a list of all your datasets groups, use the ListDatasetGroups
operation.
The `Status' of a dataset group must be `ACTIVE' before you can
use the dataset group to create a predictor. To get the status, use the
dataset that you want to import the data to.
permissions.
a comma (,).
You can specify the path to a specific file, the S3 bucket, or to a folder
Because dataset imports are not aggregated, your most recent dataset
import is the one that is used when training a predictor or generating a
forecast. Make sure that your most recent dataset import contains all of
the data you want to model off of, and not just the new data collected
since the previous import.
To get a list of all your dataset import jobs, filtered by specified
criteria, use the ListDatasetImportJobs operation.
@doc Explainability is only available for Forecasts and Predictors
Creates an Amazon Forecast Explainability.
Explainability helps you better understand how the attributes in your
scores to quantify the relative impact of each attribute and determine
whether they increase or decrease forecast values.
To enable Forecast Explainability, your predictor must include at least
one of the following: related time series, item metadata, or additional
receive aggregated Impact scores for all time series and time points in
your datasets, provide a Predictor ARN. To receive Impact scores for
specific time series and time points, provide a Forecast ARN.
predictor already has an Explainability resource.
The following parameters are required when providing a Predictor ARN:
</li> <li> `TimePointGranularity' - Must be set to “ALL”.
</li> <li> `TimeSeriesGranularity' - Must be set to “ALL”.
</li> </ul> Do not specify a value for the following parameters:
<ul> <li> `DataSource' - Only valid when TimeSeriesGranularity is
“SPECIFIC”.
</li> <li> `Schema' - Only valid when TimeSeriesGranularity is
“SPECIFIC”.
</li> <li> `StartDateTime' - Only valid when TimePointGranularity is
“SPECIFIC”.
</li> <li> `EndDateTime' - Only valid when TimePointGranularity is
“SPECIFIC”.
The following parameters are required when providing a Predictor ARN:
</li> <li> `TimePointGranularity' - Either “ALL” or “SPECIFIC”.
</li> <li> `TimeSeriesGranularity' - Either “ALL” or “SPECIFIC”.
</li> </ul> If you set TimeSeriesGranularity to “SPECIFIC”, you must also
provide the following:
<ul> <li> `DataSource' - The S3 location of the CSV file specifying
your time series.
</li> <li> `Schema' - The Schema defines the attributes and attribute
types listed in the Data Source.
</li> </ul> If you set TimePointGranularity to “SPECIFIC”, you must also
provide the following:
points.
</li> <li> `EndDateTime' - The last timestamp in the range of time
points.
</li> </ul>
@doc Exports an Explainability resource created by the
S3) bucket.
see `aws-forecast-iam-roles'.
The `Status' of the export job must be `ACTIVE' before you can
@doc Creates a forecast for each item in the `TARGET_TIME_SERIES'
dataset that was used to train the predictor.
This is known as inference. To retrieve the forecast for a single item at
low latency, use the operation. To export the complete forecast into your
`CreateForecastExportJob' operation.
query a forecast, you can request a specific date range within the
forecast.
operation.
the dataset that was used to create the predictor.
For more information, see `howitworks-forecast'.
The `Status' of the forecast must be `ACTIVE' before you can query
or export the forecast. Use the `DescribeForecast' operation to get
the status.
By default, a forecast includes predictions for every item (`item_id')
in the dataset group that was used to train the predictor. However, you
subset of time series. Forecast creation is skipped for any time series
that you specify that are not in the input dataset. The forecast export
file will not contain these time series or their forecasted values.
@doc Exports a forecast created by the `CreateForecast' operation to
The forecast file name will match the following conventions:
<ForecastExportJobName>_<ExportTimestamp>_<PartNumber>
(yyyy-MM-ddTHH-mm-ssZ).
You must specify a `DataDestination' object that includes an Identity
`aws-forecast-iam-roles'.
For more information, see `howitworks-forecast'.
To get a list of all your forecast export jobs, use the
`ListForecastExportJobs' operation.
The `Status' of the forecast export job must be `ACTIVE' before
use the `DescribeForecastExportJob' operation.
@doc Creates a predictor monitor resource for an existing auto predictor.
Predictor monitoring allows you to see how your predictor's
performance changes over time. For more information, see Predictor
Monitoring.
@doc This operation creates a legacy predictor that does not include all
To create a predictor that is compatible with all aspects of Forecast, use
`CreateAutoPredictor'.
Creates an Amazon Forecast predictor.
In the request, provide a dataset group and either specify an algorithm or
specify an algorithm, you also can override algorithm-specific
hyperparameters.
version of the datasets in the specified dataset group. You can then
generate a forecast using the `CreateForecast' operation.
You can specify a featurization configuration to fill and aggregate the
data fields in the `TARGET_TIME_SERIES' dataset to improve model
`DataFrequency' specified when the dataset was created matches the
format. For more information, see `howitworks-datasets-groups'.
train and evaluate your predictor by setting the `ForecastTypes'.
that minimizes the `objective function', set `PerformAutoML' to
`true'. The `objective function' is defined as the mean of the
</li> <li> `PerformHPO'
</li> </ul> To get a list of all of your predictors, use the
Before you can use the predictor to create a forecast, the `Status' of
the predictor must be `ACTIVE', signifying that training has
@doc Exports backtest forecasts and accuracy metrics generated by the
S3 bucket.
The export file names will match the following conventions:
`<ExportJobName>_<ExportTimestamp>_<PartNumber>.csv'
(yyyy-MM-ddTHH-mm-ssZ).
see `aws-forecast-iam-roles'.
The `Status' of the export job must be `ACTIVE' before you can
`DescribePredictorBacktestExportJob' operation.
@doc What-if analysis is a scenario modeling technique where you make a
hypothetical change to a time series and compare the forecasts generated
by these changes against the baseline, unchanged time series.
It is important to remember that the purpose of a what-if analysis is to
understand how a forecast can change given different modifications to the
baseline time series.
For example, imagine you are a clothing retailer who is considering an end
forecast, you can use a what-if analysis to investigate how different
sales tactics might affect your goals.
markdown , and
another where everything is given a fixed dollar markdown. You could
different scenarios against each other.
Note that a what-if analysis is meant to display what the forecasting
model has learned and how it will behave in the scenarios that you are
evaluating. Do not blindly use the results of the what-if analysis to make
business decisions. For instance, forecasts might not be accurate for
novel scenarios where there is no reference available to determine whether
a forecast is good.
The `TimeSeriesSelector' object defines the items that you want in the
what-if analysis.
@doc A what-if forecast is a forecast that is created from a modified
version of the baseline forecast.
Each what-if forecast incorporates either a replacement dataset or a set
of transformations to the original dataset.
@doc Exports a forecast created by the `CreateWhatIfForecast'
The forecast file name will match the following conventions:
`≈<ForecastExportJobName>_<ExportTimestamp>_<PartNumber>'
(yyyy-MM-ddTHH-mm-ssZ).
You must specify a `DataDestination' object that includes an Identity
`aws-forecast-iam-roles'.
For more information, see `howitworks-forecast'.
To get a list of all your what-if forecast export jobs, use the
`ListWhatIfForecastExports' operation.
The `Status' of the forecast export job must be `ACTIVE' before
You can only delete datasets that have a status of `ACTIVE' or
`CREATE_FAILED'. To get the status use the DescribeDataset operation.
Forecast does not automatically update any dataset groups that contain the
deleted dataset. In order to update the dataset group, use the
@doc Deletes a dataset group created using the CreateDatasetGroup
operation.
You can only delete dataset groups that have a status of `ACTIVE',
`CREATE_FAILED', or `UPDATE_FAILED'. To get the status, use the
This operation deletes only the dataset group, not the datasets in the
group.
@doc Deletes a dataset import job created using the CreateDatasetImportJob
operation.
You can delete only dataset import jobs that have a status of `ACTIVE'
or `CREATE_FAILED'. To get the status, use the
DescribeDatasetImportJob operation.
@doc Deletes an Explainability resource.
You can delete only predictor that have a status of `ACTIVE' or
`CREATE_FAILED'. To get the status, use the
`DescribeExplainability' operation.
@doc Deletes an Explainability export.
@doc Deletes a forecast created using the `CreateForecast' operation.
You can delete only forecasts that have a status of `ACTIVE' or
`CREATE_FAILED'. To get the status, use the `DescribeForecast'
operation.
You can't delete a forecast while it is being exported. After a
forecast is deleted, you can no longer query the forecast.
@doc Deletes a forecast export job created using the
`CreateForecastExportJob' operation.
You can delete only export jobs that have a status of `ACTIVE' or
`CREATE_FAILED'. To get the status, use the
`DescribeForecastExportJob' operation.
@doc Deletes a monitor resource.
You can only delete a monitor resource with a status of `ACTIVE',
You can delete only predictor that have a status of `ACTIVE' or
operation.
@doc Deletes a predictor backtest export job.
@doc Deletes an entire resource tree.
This operation will delete the parent resource and its child resources.
Child resources are resources that were created from another resource. For
example, when a forecast is generated from a predictor, the forecast is
the child resource and the predictor is the parent resource.
hierarchies:
<ul> <li> Dataset: dataset import jobs
forecasts, forecast export jobs
</li> <li> Predictor: predictor backtest export jobs, forecasts, forecast
export jobs
</li> <li> Forecast: forecast export jobs
S3.
@doc Deletes a what-if analysis created using the
`CreateWhatIfAnalysis' operation.
You can delete only what-if analyses that have a status of `ACTIVE' or
`CREATE_FAILED'. To get the status, use the
You can't delete a what-if analysis while any of its forecasts are
being exported.
@doc Deletes a what-if forecast created using the
`CreateWhatIfForecast' operation.
You can delete only what-if forecasts that have a status of `ACTIVE'
or `CREATE_FAILED'. To get the status, use the
`DescribeWhatIfForecast' operation.
You can't delete a what-if forecast while it is being exported. After
a what-if forecast is deleted, you can no longer query the what-if
analysis.
@doc Deletes a what-if forecast export created using the
`CreateWhatIfForecastExport' operation.
You can delete only what-if forecast exports that have a status of
`ACTIVE' or `CREATE_FAILED'. To get the status, use the
@doc Describes a predictor created using the CreateAutoPredictor
operation.
operation.
In addition to listing the parameters specified in the `CreateDataset'
request, this operation includes the following dataset properties:
</li> <li> `LastModificationTime'
</li> <li> `Status'
</li> </ul>
@doc Describes a dataset group created using the CreateDatasetGroup
operation.
In addition to listing the parameters provided in the
`CreateDatasetGroup' request, this operation includes the following
properties:
</li> <li> `LastModificationTime'
</li> <li> `Status'
</li> </ul>
@doc Describes a dataset import job created using the
CreateDatasetImportJob operation.
In addition to listing the parameters provided in the
`CreateDatasetImportJob' request, this operation includes the
following properties:
</li> <li> `LastModificationTime'
</li> <li> `FieldStatistics'
</li> <li> `Status'
</li> <li> `Message' - If an error occurred, information about the
error.
</li> </ul>
@doc Describes an Explainability resource created using the
@doc Describes an Explainability export created using the
`CreateExplainabilityExport' operation.
@doc Describes a forecast created using the `CreateForecast'
operation.
In addition to listing the properties provided in the `CreateForecast'
request, this operation lists the following properties:
<ul> <li> `DatasetGroupArn' - The dataset group that provided the
training data.
</li> <li> `LastModificationTime'
</li> <li> `Status'
</li> <li> `Message' - If an error occurred, information about the
error.
</li> </ul>
@doc Describes a forecast export job created using the
`CreateForecastExportJob' operation.
In addition to listing the properties provided by the user in the
`CreateForecastExportJob' request, this operation lists the following
properties:
</li> <li> `LastModificationTime'
</li> <li> `Status'
</li> <li> `Message' - If an error occurred, information about the
error.
</li> </ul>
@doc Describes a monitor resource.
request, this operation lists the following properties:
<ul> <li> `Baseline'
</li> <li> `LastEvaluationTime'
</li> <li> `LastEvaluationState'
</li> <li> `LastModificationTime'
</li> <li> `Message'
</li> <li> `Status'
</li> </ul>
@doc This operation is only valid for legacy predictors created with
If you are not using a legacy predictor, use `DescribeAutoPredictor'.
In addition to listing the properties provided in the
properties:
<ul> <li> `DatasetImportJobArns' - The dataset import jobs used to
import training data.
</li> <li> `AutoMLAlgorithmArns' - If AutoML is performed, the
algorithms that were evaluated.
</li> <li> `LastModificationTime'
</li> <li> `Status'
</li> <li> `Message' - If an error occurred, information about the
error.
</li> </ul>
@doc Describes a predictor backtest export job created using the
`CreatePredictorBacktestExportJob' operation.
In addition to listing the properties provided by the user in the
`CreatePredictorBacktestExportJob' request, this operation lists the
following properties:
</li> <li> `LastModificationTime'
</li> <li> `Status'
</li> <li> `Message' (if an error occurred)
</li> </ul>
@doc Describes the what-if analysis created using the
`CreateWhatIfAnalysis' operation.
In addition to listing the properties provided in the
`CreateWhatIfAnalysis' request, this operation lists the following
properties:
</li> <li> `LastModificationTime'
</li> <li> `Message' - If an error occurred, information about the
error.
</li> <li> `Status'
</li> </ul>
@doc Describes the what-if forecast created using the
`CreateWhatIfForecast' operation.
In addition to listing the properties provided in the
`CreateWhatIfForecast' request, this operation lists the following
properties:
</li> <li> `LastModificationTime'
</li> <li> `Message' - If an error occurred, information about the
error.
</li> <li> `Status'
</li> </ul>
@doc Describes the what-if forecast export created using the
`CreateWhatIfForecastExport' operation.
In addition to listing the properties provided in the
`CreateWhatIfForecastExport' request, this operation lists the
following properties:
</li> <li> `LastModificationTime'
</li> <li> `Message' - If an error occurred, information about the
error.
</li> <li> `Status'
</li> </ul>
@doc Provides metrics on the accuracy of the models that were trained by
Use metrics to see how well the model performed and to decide whether to
use the predictor to generate a forecast. For more information, see
Predictor Metrics.
This operation generates metrics for each backtest window that was
evaluated. The number of backtest windows (`NumberOfBacktestWindows')
`NumberOfBacktestWindows' isn't specified, the number defaults to
one.
The parameters of the `filling' method determine which items
contribute to the metrics. If you want all items to contribute, specify
range being evaluated to contribute, specify `nan'. For more
information, see `FeaturizationMethod'.
Before you can get accuracy metrics, the `Status' of the predictor
must be `ACTIVE', signifying that training has completed. To get the
@doc Returns a list of dataset groups created using the CreateDatasetGroup
operation.
For each dataset group, this operation returns a summary of its
@doc Returns a list of dataset import jobs created using the
CreateDatasetImportJob operation.
For each import job, this operation returns a summary of its properties,
operation. You can filter the list by providing an array of Filter
objects.
@doc Returns a list of datasets created using the CreateDataset operation.
the list using an array of `Filter' objects.
`CreateExplainabilityExport' operation.
This operation returns a summary for each Explainability export. You can
filter the list using an array of `Filter' objects.
@doc Returns a list of forecast export jobs created using the
`CreateForecastExportJob' operation.
For each forecast export job, this operation returns a summary of its
`DescribeForecastExportJob' operation. You can filter the list using
an array of `Filter' objects.
@doc Returns a list of forecasts created using the `CreateForecast'
operation.
For each forecast, this operation returns a summary of its properties,
can filter the list using an array of `Filter' objects.
@doc Returns a list of the monitoring evaluation results and predictor
events collected by the monitor resource during different windows of time.
For information about monitoring see `predictor-monitoring'. For more
information about retrieving monitoring results see Viewing Monitoring
Results.
operation and `CreateAutoPredictor' operation.
For each monitor resource, this operation returns of a summary of its
complete set of properties of a monitor resource by specify the
@doc Returns a list of predictor backtest export jobs created using the
`CreatePredictorBacktestExportJob' operation.
This operation returns a summary for each backtest export job. You can
filter the list using an array of `Filter' objects.
To retrieve the complete set of properties for a particular backtest
operation.
@doc Returns a list of predictors created using the
For each predictor, this operation returns a summary of its properties,
`DescribeAutoPredictor' and `DescribePredictor' operations. You
can filter the list using an array of `Filter' objects.
@doc Returns a list of what-if analyses created using the
`CreateWhatIfAnalysis' operation.
For each what-if analysis, this operation returns a summary of its
@doc Returns a list of what-if forecast exports created using the
`CreateWhatIfForecastExport' operation.
For each what-if forecast export, this operation returns a summary of its
@doc Returns a list of what-if forecasts created using the
`CreateWhatIfForecast' operation.
For each what-if forecast, this operation returns a summary of its
`DescribeWhatIfForecast' operation.
@doc Resumes a stopped monitor resource.
@doc Stops a resource.
The resource undergoes the following states: `CREATE_STOPPING' and
stopped.
This operation can be applied to the following resources (and their
corresponding child resources):
<ul> <li> Dataset Import Job
</li> <li> Predictor Job
</li> <li> Forecast Job
</li> <li> Forecast Export Job
</li> <li> Predictor Backtest Export Job
</li> <li> Explainability Job
</li> <li> Explainability Export Job
</li> </ul>
@doc Associates the specified tags to a resource with the specified
`resourceArn'.
If existing tags on a resource are not specified in the request
parameters, they are not changed. When a resource is deleted, the tags
associated with that resource are also deleted.
@doc Deletes the specified tags from a resource.
@doc Replaces the datasets in a dataset group with the specified datasets.
The `Status' of the dataset group must be `ACTIVE' before you can
operation to get the status.
====================================================================
==================================================================== | See -beam/aws-codegen for more details .
@doc Provides APIs for creating and managing Amazon Forecast resources .
-module(aws_forecast).
-export([create_auto_predictor/2,
create_auto_predictor/3,
create_dataset/2,
create_dataset/3,
create_dataset_group/2,
create_dataset_group/3,
create_dataset_import_job/2,
create_dataset_import_job/3,
create_explainability/2,
create_explainability/3,
create_explainability_export/2,
create_explainability_export/3,
create_forecast/2,
create_forecast/3,
create_forecast_export_job/2,
create_forecast_export_job/3,
create_monitor/2,
create_monitor/3,
create_predictor/2,
create_predictor/3,
create_predictor_backtest_export_job/2,
create_predictor_backtest_export_job/3,
create_what_if_analysis/2,
create_what_if_analysis/3,
create_what_if_forecast/2,
create_what_if_forecast/3,
create_what_if_forecast_export/2,
create_what_if_forecast_export/3,
delete_dataset/2,
delete_dataset/3,
delete_dataset_group/2,
delete_dataset_group/3,
delete_dataset_import_job/2,
delete_dataset_import_job/3,
delete_explainability/2,
delete_explainability/3,
delete_explainability_export/2,
delete_explainability_export/3,
delete_forecast/2,
delete_forecast/3,
delete_forecast_export_job/2,
delete_forecast_export_job/3,
delete_monitor/2,
delete_monitor/3,
delete_predictor/2,
delete_predictor/3,
delete_predictor_backtest_export_job/2,
delete_predictor_backtest_export_job/3,
delete_resource_tree/2,
delete_resource_tree/3,
delete_what_if_analysis/2,
delete_what_if_analysis/3,
delete_what_if_forecast/2,
delete_what_if_forecast/3,
delete_what_if_forecast_export/2,
delete_what_if_forecast_export/3,
describe_auto_predictor/2,
describe_auto_predictor/3,
describe_dataset/2,
describe_dataset/3,
describe_dataset_group/2,
describe_dataset_group/3,
describe_dataset_import_job/2,
describe_dataset_import_job/3,
describe_explainability/2,
describe_explainability/3,
describe_explainability_export/2,
describe_explainability_export/3,
describe_forecast/2,
describe_forecast/3,
describe_forecast_export_job/2,
describe_forecast_export_job/3,
describe_monitor/2,
describe_monitor/3,
describe_predictor/2,
describe_predictor/3,
describe_predictor_backtest_export_job/2,
describe_predictor_backtest_export_job/3,
describe_what_if_analysis/2,
describe_what_if_analysis/3,
describe_what_if_forecast/2,
describe_what_if_forecast/3,
describe_what_if_forecast_export/2,
describe_what_if_forecast_export/3,
get_accuracy_metrics/2,
get_accuracy_metrics/3,
list_dataset_groups/2,
list_dataset_groups/3,
list_dataset_import_jobs/2,
list_dataset_import_jobs/3,
list_datasets/2,
list_datasets/3,
list_explainabilities/2,
list_explainabilities/3,
list_explainability_exports/2,
list_explainability_exports/3,
list_forecast_export_jobs/2,
list_forecast_export_jobs/3,
list_forecasts/2,
list_forecasts/3,
list_monitor_evaluations/2,
list_monitor_evaluations/3,
list_monitors/2,
list_monitors/3,
list_predictor_backtest_export_jobs/2,
list_predictor_backtest_export_jobs/3,
list_predictors/2,
list_predictors/3,
list_tags_for_resource/2,
list_tags_for_resource/3,
list_what_if_analyses/2,
list_what_if_analyses/3,
list_what_if_forecast_exports/2,
list_what_if_forecast_exports/3,
list_what_if_forecasts/2,
list_what_if_forecasts/3,
resume_resource/2,
resume_resource/3,
stop_resource/2,
stop_resource/3,
tag_resource/2,
tag_resource/3,
untag_resource/2,
untag_resource/3,
update_dataset_group/2,
update_dataset_group/3]).
-include_lib("hackney/include/hackney_lib.hrl").
Amazon Forecast creates predictors with AutoPredictor , which involves
< /li > < li > ` DatasetGroupArn ' - The ARN of the dataset group used to
( hourly , daily , weekly , etc ) .
< /li > < li > ` ForecastHorizon ' - The number of time - steps that the model
< /li > < li > ` ReferencePredictorArn ' - The ARN of the predictor to
create_auto_predictor(Client, Input)
when is_map(Client), is_map(Input) ->
create_auto_predictor(Client, Input, []).
create_auto_predictor(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateAutoPredictor">>, Input, Options).
< /li > < li > ` Domain ' and ` DatasetType ' - Each dataset has an
associated dataset domain and a type within the domain . Amazon Forecast
each unique dataset domain and type within the domain , Amazon Forecast
To get a list of all your datasets , use the ListDatasets operation .
For example Forecast datasets , see the Amazon Forecast Sample GitHub
create_dataset(Client, Input)
when is_map(Client), is_map(Input) ->
create_dataset(Client, Input, []).
create_dataset(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateDataset">>, Input, Options).
group when you create a predictor . For more information , see Dataset
DescribeDatasetGroup operation .
create_dataset_group(Client, Input)
when is_map(Client), is_map(Input) ->
create_dataset_group(Client, Input, []).
create_dataset_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateDatasetGroup">>, Input, Options).
@doc Imports your training data to an Amazon Forecast dataset .
You provide the location of your training data in an Amazon Simple Storage
Service ( Amazon S3 ) bucket and the Amazon Resource Name ( ARN ) of the
You must specify a DataSource object that includes an Identity and Access
Management ( IAM ) role that Amazon Forecast can assume to access the data ,
as Amazon Forecast makes a copy of your data and processes it in an
internal Amazon Web Services system . For more information , see Set up
The training data must be in CSV or Parquet format . The delimiter must be
in the S3 bucket . For the latter two cases , Amazon Forecast imports all
files up to the limit of 10,000 files .
create_dataset_import_job(Client, Input)
when is_map(Client), is_map(Input) ->
create_dataset_import_job(Client, Input, []).
create_dataset_import_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateDatasetImportJob">>, Input, Options).
generated from an AutoPredictor ( ` CreateAutoPredictor ' )
datasets impact forecast . Amazon Forecast uses a metric called Impact
datasets like Holidays and the Weather Index .
CreateExplainability accepts either a Predictor ARN or Forecast ARN . To
CreateExplainability with a Predictor ARN
You can only have one Explainability resource per predictor . If you
already enabled ` ExplainPredictor ' in ` CreateAutoPredictor ' , that
< ul > < li > ` ExplainabilityName ' - A unique name for the Explainability .
< /li > < li > ` ResourceArn ' - The Arn of the predictor .
< /li > < /ul > CreateExplainability with a Forecast ARN
You can specify a maximum of 50 time series and 500 time points .
< ul > < li > ` ExplainabilityName ' - A unique name for the Explainability .
< /li > < li > ` ResourceArn ' - The Arn of the forecast .
< ul > < li > ` StartDateTime ' - The first timestamp in the range of time
create_explainability(Client, Input)
when is_map(Client), is_map(Input) ->
create_explainability(Client, Input, []).
create_explainability(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateExplainability">>, Input, Options).
` CreateExplainability ' operation .
Exported files are exported to an Amazon Simple Storage Service ( Amazon
You must specify a ` DataDestination ' object that includes an Amazon S3
bucket and an Identity and Access Management ( IAM ) role that Amazon
Forecast can assume to access the Amazon S3 bucket . For more information ,
access the export in your Amazon S3 bucket . To get the status , use the
` DescribeExplainabilityExport ' operation .
create_explainability_export(Client, Input)
when is_map(Client), is_map(Input) ->
create_explainability_export(Client, Input, []).
create_explainability_export(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateExplainabilityExport">>, Input, Options).
Amazon Simple Storage Service ( Amazon S3 ) bucket , use the
The range of the forecast is determined by the ` ForecastHorizon '
value , which you specify in the ` CreatePredictor ' request . When you
To get a list of all your forecasts , use the ` ListForecasts '
The forecasts generated by Amazon Forecast are in the same time zone as
can use the ` TimeSeriesSelector ' object to generate a forecast on a
create_forecast(Client, Input)
when is_map(Client), is_map(Input) ->
create_forecast(Client, Input, []).
create_forecast(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateForecast">>, Input, Options).
your Amazon Simple Storage Service ( Amazon S3 ) bucket .
where the & lt;ExportTimestamp> ; component is in Java SimpleDateFormat
and Access Management ( IAM ) role that Amazon Forecast can assume to access
the Amazon S3 bucket . For more information , see
you can access the forecast in your Amazon S3 bucket . To get the status ,
create_forecast_export_job(Client, Input)
when is_map(Client), is_map(Input) ->
create_forecast_export_job(Client, Input, []).
create_forecast_export_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateForecastExportJob">>, Input, Options).
create_monitor(Client, Input)
when is_map(Client), is_map(Input) ->
create_monitor(Client, Input, []).
create_monitor(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateMonitor">>, Input, Options).
the predictor functionalities provided by Amazon Forecast .
let Amazon Forecast choose an algorithm for you using AutoML . If you
Amazon Forecast uses the algorithm to train a predictor using the latest
To see the evaluation metrics , use the ` GetAccuracyMetrics ' operation .
training . For more information , see ` FeaturizationConfig ' .
For RELATED_TIME_SERIES datasets , ` CreatePredictor ' verifies that the
` ForecastFrequency ' . TARGET_TIME_SERIES datasets do n't have this
restriction . Amazon Forecast also verifies the delimiter and timestamp
By default , predictors are trained and evaluated at the 0.1 ( P10 ) , 0.5
( P50 ) , and 0.9 ( P90 ) quantiles . You can choose custom forecast types to
AutoML
If you want Amazon Forecast to evaluate each algorithm and choose the one
weighted losses over the forecast types . By default , these are the p10 ,
p50 , and p90 quantile losses . For more information , see
` ' .
When AutoML is enabled , the following properties are disallowed :
< ul > < li > ` AlgorithmArn '
< /li > < li > ` HPOConfig '
< /li > < li > ` TrainingParameters '
` ListPredictors ' operation .
completed . To get the status , use the ` DescribePredictor ' operation .
create_predictor(Client, Input)
when is_map(Client), is_map(Input) ->
create_predictor(Client, Input, []).
create_predictor(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreatePredictor">>, Input, Options).
` CreateAutoPredictor ' or ` CreatePredictor ' operations .
Two folders containing CSV or Parquet files are exported to your specified
The & lt;ExportTimestamp> ; component is in Java SimpleDate format
You must specify a ` DataDestination ' object that includes an Amazon S3
bucket and an Identity and Access Management ( IAM ) role that Amazon
Forecast can assume to access the Amazon S3 bucket . For more information ,
access the export in your Amazon S3 bucket . To get the status , use the
create_predictor_backtest_export_job(Client, Input)
when is_map(Client), is_map(Input) ->
create_predictor_backtest_export_job(Client, Input, []).
create_predictor_backtest_export_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreatePredictorBacktestExportJob">>, Input, Options).
of season sale to clear space for new styles . After creating a baseline
create a scenario where the sale lasts for one week and another where the
sale lasts for one month . With a what - if analysis , you can compare many
create_what_if_analysis(Client, Input)
when is_map(Client), is_map(Input) ->
create_what_if_analysis(Client, Input, []).
create_what_if_analysis(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateWhatIfAnalysis">>, Input, Options).
create_what_if_forecast(Client, Input)
when is_map(Client), is_map(Input) ->
create_what_if_forecast(Client, Input, []).
create_what_if_forecast(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateWhatIfForecast">>, Input, Options).
operation to your Amazon Simple Storage Service ( Amazon S3 ) bucket .
The & lt;ExportTimestamp> ; component is in Java SimpleDateFormat
and Access Management ( IAM ) role that Amazon Forecast can assume to access
the Amazon S3 bucket . For more information , see
you can access the forecast in your Amazon S3 bucket . To get the status ,
use the ` DescribeWhatIfForecastExport ' operation .
create_what_if_forecast_export(Client, Input)
when is_map(Client), is_map(Input) ->
create_what_if_forecast_export(Client, Input, []).
create_what_if_forecast_export(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateWhatIfForecastExport">>, Input, Options).
@doc Deletes an Amazon Forecast dataset that was created using the
CreateDataset operation .
UpdateDatasetGroup operation , omitting the deleted dataset 's ARN .
delete_dataset(Client, Input)
when is_map(Client), is_map(Input) ->
delete_dataset(Client, Input, []).
delete_dataset(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteDataset">>, Input, Options).
DescribeDatasetGroup operation .
delete_dataset_group(Client, Input)
when is_map(Client), is_map(Input) ->
delete_dataset_group(Client, Input, []).
delete_dataset_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteDatasetGroup">>, Input, Options).
delete_dataset_import_job(Client, Input)
when is_map(Client), is_map(Input) ->
delete_dataset_import_job(Client, Input, []).
delete_dataset_import_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteDatasetImportJob">>, Input, Options).
delete_explainability(Client, Input)
when is_map(Client), is_map(Input) ->
delete_explainability(Client, Input, []).
delete_explainability(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteExplainability">>, Input, Options).
delete_explainability_export(Client, Input)
when is_map(Client), is_map(Input) ->
delete_explainability_export(Client, Input, []).
delete_explainability_export(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteExplainabilityExport">>, Input, Options).
delete_forecast(Client, Input)
when is_map(Client), is_map(Input) ->
delete_forecast(Client, Input, []).
delete_forecast(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteForecast">>, Input, Options).
delete_forecast_export_job(Client, Input)
when is_map(Client), is_map(Input) ->
delete_forecast_export_job(Client, Input, []).
delete_forecast_export_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteForecastExportJob">>, Input, Options).
` ACTIVE_STOPPED ' , ` CREATE_FAILED ' , or ` CREATE_STOPPED ' .
delete_monitor(Client, Input)
when is_map(Client), is_map(Input) ->
delete_monitor(Client, Input, []).
delete_monitor(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteMonitor">>, Input, Options).
@doc Deletes a predictor created using the ` DescribePredictor ' or
` CreatePredictor ' operations .
` CREATE_FAILED ' . To get the status , use the ` DescribePredictor '
delete_predictor(Client, Input)
when is_map(Client), is_map(Input) ->
delete_predictor(Client, Input, []).
delete_predictor(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeletePredictor">>, Input, Options).
delete_predictor_backtest_export_job(Client, Input)
when is_map(Client), is_map(Input) ->
delete_predictor_backtest_export_job(Client, Input, []).
delete_predictor_backtest_export_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeletePredictorBacktestExportJob">>, Input, Options).
Amazon Forecast resources possess the following parent - child resource
< /li > < li > Dataset Group : predictors , predictor backtest export jobs ,
< /li > < /ul > ` DeleteResourceTree ' will only delete Amazon Forecast
resources , and will not delete datasets or exported files stored in Amazon
delete_resource_tree(Client, Input)
when is_map(Client), is_map(Input) ->
delete_resource_tree(Client, Input, []).
delete_resource_tree(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteResourceTree">>, Input, Options).
` DescribeWhatIfAnalysis ' operation .
delete_what_if_analysis(Client, Input)
when is_map(Client), is_map(Input) ->
delete_what_if_analysis(Client, Input, []).
delete_what_if_analysis(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteWhatIfAnalysis">>, Input, Options).
delete_what_if_forecast(Client, Input)
when is_map(Client), is_map(Input) ->
delete_what_if_forecast(Client, Input, []).
delete_what_if_forecast(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteWhatIfForecast">>, Input, Options).
` DescribeWhatIfForecastExport ' operation .
delete_what_if_forecast_export(Client, Input)
when is_map(Client), is_map(Input) ->
delete_what_if_forecast_export(Client, Input, []).
delete_what_if_forecast_export(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteWhatIfForecastExport">>, Input, Options).
describe_auto_predictor(Client, Input)
when is_map(Client), is_map(Input) ->
describe_auto_predictor(Client, Input, []).
describe_auto_predictor(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeAutoPredictor">>, Input, Options).
@doc Describes an Amazon Forecast dataset created using the CreateDataset
< ul > < li > ` CreationTime '
describe_dataset(Client, Input)
when is_map(Client), is_map(Input) ->
describe_dataset(Client, Input, []).
describe_dataset(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeDataset">>, Input, Options).
< ul > < li > ` DatasetArns ' - The datasets belonging to the group .
< /li > < li > ` CreationTime '
describe_dataset_group(Client, Input)
when is_map(Client), is_map(Input) ->
describe_dataset_group(Client, Input, []).
describe_dataset_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeDatasetGroup">>, Input, Options).
< ul > < li > ` CreationTime '
< /li > < li > ` DataSize '
describe_dataset_import_job(Client, Input)
when is_map(Client), is_map(Input) ->
describe_dataset_import_job(Client, Input, []).
describe_dataset_import_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeDatasetImportJob">>, Input, Options).
` CreateExplainability ' operation .
describe_explainability(Client, Input)
when is_map(Client), is_map(Input) ->
describe_explainability(Client, Input, []).
describe_explainability(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeExplainability">>, Input, Options).
describe_explainability_export(Client, Input)
when is_map(Client), is_map(Input) ->
describe_explainability_export(Client, Input, []).
describe_explainability_export(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeExplainabilityExport">>, Input, Options).
< /li > < li > ` CreationTime '
describe_forecast(Client, Input)
when is_map(Client), is_map(Input) ->
describe_forecast(Client, Input, []).
describe_forecast(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeForecast">>, Input, Options).
< ul > < li > ` CreationTime '
describe_forecast_export_job(Client, Input)
when is_map(Client), is_map(Input) ->
describe_forecast_export_job(Client, Input, []).
describe_forecast_export_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeForecastExportJob">>, Input, Options).
In addition to listing the properties provided in the ` CreateMonitor '
< /li > < li > ` CreationTime '
describe_monitor(Client, Input)
when is_map(Client), is_map(Input) ->
describe_monitor(Client, Input, []).
describe_monitor(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeMonitor">>, Input, Options).
CreatePredictor .
Describes a predictor created using the ` CreatePredictor ' operation .
` CreatePredictor ' request , this operation lists the following
< /li > < li > ` CreationTime '
describe_predictor(Client, Input)
when is_map(Client), is_map(Input) ->
describe_predictor(Client, Input, []).
describe_predictor(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribePredictor">>, Input, Options).
< ul > < li > ` CreationTime '
describe_predictor_backtest_export_job(Client, Input)
when is_map(Client), is_map(Input) ->
describe_predictor_backtest_export_job(Client, Input, []).
describe_predictor_backtest_export_job(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribePredictorBacktestExportJob">>, Input, Options).
< ul > < li > ` CreationTime '
describe_what_if_analysis(Client, Input)
when is_map(Client), is_map(Input) ->
describe_what_if_analysis(Client, Input, []).
describe_what_if_analysis(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeWhatIfAnalysis">>, Input, Options).
< ul > < li > ` CreationTime '
describe_what_if_forecast(Client, Input)
when is_map(Client), is_map(Input) ->
describe_what_if_forecast(Client, Input, []).
describe_what_if_forecast(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeWhatIfForecast">>, Input, Options).
< ul > < li > ` CreationTime '
describe_what_if_forecast_export(Client, Input)
when is_map(Client), is_map(Input) ->
describe_what_if_forecast_export(Client, Input, []).
describe_what_if_forecast_export(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeWhatIfForecastExport">>, Input, Options).
the ` CreatePredictor ' operation .
is specified using the ` EvaluationParameters ' object , which is
optionally included in the ` CreatePredictor ' request . If
` zero ' . If you want only those items that have complete data in the
status , use the ` DescribePredictor ' operation .
get_accuracy_metrics(Client, Input)
when is_map(Client), is_map(Input) ->
get_accuracy_metrics(Client, Input, []).
get_accuracy_metrics(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetAccuracyMetrics">>, Input, Options).
properties , including its Amazon Resource Name ( ARN ) . You can retrieve the
complete set of properties by using the dataset group ARN with the
DescribeDatasetGroup operation .
list_dataset_groups(Client, Input)
when is_map(Client), is_map(Input) ->
list_dataset_groups(Client, Input, []).
list_dataset_groups(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListDatasetGroups">>, Input, Options).
including its Amazon Resource Name ( ARN ) . You can retrieve the complete
set of properties by using the ARN with the DescribeDatasetImportJob
list_dataset_import_jobs(Client, Input)
when is_map(Client), is_map(Input) ->
list_dataset_import_jobs(Client, Input, []).
list_dataset_import_jobs(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListDatasetImportJobs">>, Input, Options).
For each dataset , a summary of its properties , including its Amazon
Resource Name ( ARN ) , is returned . To retrieve the complete set of
properties , use the ARN with the DescribeDataset operation .
list_datasets(Client, Input)
when is_map(Client), is_map(Input) ->
list_datasets(Client, Input, []).
list_datasets(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListDatasets">>, Input, Options).
@doc Returns a list of Explainability resources created using the
` CreateExplainability ' operation .
This operation returns a summary for each Explainability . You can filter
To retrieve the complete set of properties for a particular Explainability
resource , use the ARN with the ` DescribeExplainability ' operation .
list_explainabilities(Client, Input)
when is_map(Client), is_map(Input) ->
list_explainabilities(Client, Input, []).
list_explainabilities(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListExplainabilities">>, Input, Options).
@doc Returns a list of Explainability exports created using the
To retrieve the complete set of properties for a particular Explainability
export , use the ARN with the ` DescribeExplainability ' operation .
list_explainability_exports(Client, Input)
when is_map(Client), is_map(Input) ->
list_explainability_exports(Client, Input, []).
list_explainability_exports(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListExplainabilityExports">>, Input, Options).
properties , including its Amazon Resource Name ( ARN ) . To retrieve the
complete set of properties , use the ARN with the
list_forecast_export_jobs(Client, Input)
when is_map(Client), is_map(Input) ->
list_forecast_export_jobs(Client, Input, []).
list_forecast_export_jobs(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListForecastExportJobs">>, Input, Options).
including its Amazon Resource Name ( ARN ) . To retrieve the complete set of
properties , specify the ARN with the ` DescribeForecast ' operation . You
list_forecasts(Client, Input)
when is_map(Client), is_map(Input) ->
list_forecasts(Client, Input, []).
list_forecasts(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListForecasts">>, Input, Options).
list_monitor_evaluations(Client, Input)
when is_map(Client), is_map(Input) ->
list_monitor_evaluations(Client, Input, []).
list_monitor_evaluations(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListMonitorEvaluations">>, Input, Options).
@doc Returns a list of monitors created with the ` CreateMonitor '
properties , including its Amazon Resource Name ( ARN ) . You can retrieve a
monitor 's ARN in the ` DescribeMonitor ' operation .
list_monitors(Client, Input)
when is_map(Client), is_map(Input) ->
list_monitors(Client, Input, []).
list_monitors(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListMonitors">>, Input, Options).
export job , use the ARN with the ` DescribePredictorBacktestExportJob '
list_predictor_backtest_export_jobs(Client, Input)
when is_map(Client), is_map(Input) ->
list_predictor_backtest_export_jobs(Client, Input, []).
list_predictor_backtest_export_jobs(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListPredictorBacktestExportJobs">>, Input, Options).
` CreateAutoPredictor ' or ` CreatePredictor ' operations .
including its Amazon Resource Name ( ARN ) .
You can retrieve the complete set of properties by using the ARN with the
list_predictors(Client, Input)
when is_map(Client), is_map(Input) ->
list_predictors(Client, Input, []).
list_predictors(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListPredictors">>, Input, Options).
@doc Lists the tags for an Amazon Forecast resource .
list_tags_for_resource(Client, Input)
when is_map(Client), is_map(Input) ->
list_tags_for_resource(Client, Input, []).
list_tags_for_resource(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListTagsForResource">>, Input, Options).
properties , including its Amazon Resource Name ( ARN ) . You can retrieve the
complete set of properties by using the what - if analysis ARN with the
` DescribeWhatIfAnalysis ' operation .
list_what_if_analyses(Client, Input)
when is_map(Client), is_map(Input) ->
list_what_if_analyses(Client, Input, []).
list_what_if_analyses(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListWhatIfAnalyses">>, Input, Options).
properties , including its Amazon Resource Name ( ARN ) . You can retrieve the
complete set of properties by using the what - if forecast export ARN with
the ` DescribeWhatIfForecastExport ' operation .
list_what_if_forecast_exports(Client, Input)
when is_map(Client), is_map(Input) ->
list_what_if_forecast_exports(Client, Input, []).
list_what_if_forecast_exports(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListWhatIfForecastExports">>, Input, Options).
properties , including its Amazon Resource Name ( ARN ) . You can retrieve the
complete set of properties by using the what - if forecast ARN with the
list_what_if_forecasts(Client, Input)
when is_map(Client), is_map(Input) ->
list_what_if_forecasts(Client, Input, []).
list_what_if_forecasts(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListWhatIfForecasts">>, Input, Options).
resume_resource(Client, Input)
when is_map(Client), is_map(Input) ->
resume_resource(Client, Input, []).
resume_resource(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ResumeResource">>, Input, Options).
` CREATE_STOPPED ' . You can not resume a resource once it has been
stop_resource(Client, Input)
when is_map(Client), is_map(Input) ->
stop_resource(Client, Input, []).
stop_resource(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"StopResource">>, Input, Options).
tag_resource(Client, Input)
when is_map(Client), is_map(Input) ->
tag_resource(Client, Input, []).
tag_resource(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"TagResource">>, Input, Options).
untag_resource(Client, Input)
when is_map(Client), is_map(Input) ->
untag_resource(Client, Input, []).
untag_resource(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UntagResource">>, Input, Options).
use the dataset group to create a predictor . Use the DescribeDatasetGroup
update_dataset_group(Client, Input)
when is_map(Client), is_map(Input) ->
update_dataset_group(Client, Input, []).
update_dataset_group(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdateDatasetGroup">>, Input, Options).
Internal functions
-spec request(aws_client:aws_client(), binary(), map(), list()) ->
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map() | undefined,
Error :: map().
request(Client, Action, Input, Options) ->
RequestFun = fun() -> do_request(Client, Action, Input, Options) end,
aws_request:request(RequestFun, Options).
do_request(Client, Action, Input0, Options) ->
Client1 = Client#{service => <<"forecast">>},
Host = build_host(<<"forecast">>, Client1),
URL = build_url(Host, Client1),
Headers = [
{<<"Host">>, Host},
{<<"Content-Type">>, <<"application/x-amz-json-1.1">>},
{<<"X-Amz-Target">>, <<"AmazonForecast.", Action/binary>>}
],
Input = Input0,
Payload = jsx:encode(Input),
SignedHeaders = aws_request:sign_request(Client1, <<"POST">>, URL, Headers, Payload),
Response = hackney:request(post, URL, SignedHeaders, Payload, Options),
handle_response(Response).
handle_response({ok, 200, ResponseHeaders, Client}) ->
case hackney:body(Client) of
{ok, <<>>} ->
{ok, undefined, {200, ResponseHeaders, Client}};
{ok, Body} ->
Result = jsx:decode(Body),
{ok, Result, {200, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, ResponseHeaders, Client}) ->
{ok, Body} = hackney:body(Client),
Error = jsx:decode(Body),
{error, Error, {StatusCode, ResponseHeaders, Client}};
handle_response({error, Reason}) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{region := Region, endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Region, Endpoint], <<".">>).
build_url(Host, Client) ->
Proto = aws_client:proto(Client),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, <<"/">>], <<"">>).
|
bf128bfeb84b974dc513d874f899343785cf54addfc8f2f044d6c276d48f5428 | mumuki/mulang | Interpreter.hs | # LANGUAGE ScopedTypeVariables #
module Language.Mulang.Interpreter (
defaultContext,
dereference,
dereference',
eval,
eval',
evalExpr,
evalRaising,
nullRef,
ExecutionContext(..),
Value(..)
) where
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.List (find, intercalate, genericLength)
import Control.Monad (forM, (>=>))
import Control.Monad.State.Class
import Control.Monad.Loops
import Control.Monad.State.Strict
import Control.Monad.Cont
import Data.Fixed (mod')
import qualified Language.Mulang.Ast as M
import qualified Language.Mulang.Ast.Operator as O
import Language.Mulang.Ast.Operator (opposite)
import Language.Mulang.Interpreter.Internals
eval' :: ExecutionContext -> Executable Reference -> IO (Reference, ExecutionContext)
eval' ctx ref = runStateT (runContT ref return) ctx
eval :: ExecutionContext -> M.Expression -> IO (Reference, ExecutionContext)
eval ctx expr = eval' ctx (evalExpr expr)
evalRaising :: ExecutionContext -> M.Expression -> Executable (Reference, Maybe Reference)
evalRaising context expr = do
resultRef <- callCC $ \raiseCallback -> do
put (context { currentRaiseCallback = raiseCallback })
evalExpr expr
return nullRef
lastException <- gets currentException
return (resultRef, lastException)
evalExpressionsWith :: [M.Expression] -> ([Value] -> Executable Reference) -> Executable Reference
evalExpressionsWith expressions f = do
params <- forM expressions evalExprValue
f params
evalExpressionsWith' :: [M.Expression] -> ([(Reference, Value)] -> Executable Reference) -> Executable Reference
evalExpressionsWith' expressions f = do
refs <- forM expressions evalExpr
values <- forM refs dereference
f $ zip refs values
evalExprValue :: M.Expression -> Executable Value
evalExprValue = evalExpr >=> dereference
evalExpr :: M.Expression -> Executable Reference
evalExpr (M.Sequence expressions) = last <$> forM expressions evalExpr
evalExpr (M.Lambda params body) = do
executionFrames <- gets scopes
createRef $
MuFunction executionFrames [M.Equation params (M.UnguardedBody body)]
evalExpr (M.Subroutine name body) = do
executionFrames <- gets scopes
let function = MuFunction executionFrames body
ref <- createRef function
unless (null name) (setLocalVariable name ref) -- if function has no name we avoid registering it
return ref
evalExpr (M.Print expression) = do
parameter <- evalExprValue expression
liftIO $ print parameter
return nullRef
evalExpr (M.Assert negated (M.Truth expression)) =
evalExpressionsWith [expression] f
where f [MuBool result]
| result /= negated = return nullRef
| otherwise = raiseString $ "Expected " ++ (show . not $ negated) ++ " but got: " ++ show result
evalExpr (M.Assert negated (M.Equality expected actual)) =
evalExpressionsWith [expected, actual] f
where f [v1, v2]
| muEquals v1 v2 /= negated = return nullRef
| otherwise = raiseString $ "Expected " ++ show v1 ++ " but got: " ++ show v2
evalExpr (M.Application (M.Primitive O.GreaterOrEqualThan) expressions) = evalBinaryNumeric expressions (>=) createBool
evalExpr (M.Application (M.Primitive O.Modulo) expressions) = evalBinaryNumeric expressions (mod') createNumber
evalExpr (M.Application (M.Primitive O.GreaterThan) expressions) = evalBinaryNumeric expressions (>) createBool
evalExpr (M.Application (M.Primitive O.Or) expressions) = evalBinaryBoolean expressions (||)
evalExpr (M.Application (M.Primitive O.And) expressions) = evalBinaryBoolean expressions (&&)
evalExpr (M.Application (M.Primitive O.Negation) expressions) =
evalExpressionsWith expressions f
where f [MuBool b] = createBool $ not b
f params = raiseTypeError "expected one boolean" params
evalExpr (M.Application (M.Primitive O.Push) expressions) =
evalExpressionsWith' expressions f
where f [(lr, MuList xs), (vr, _)] = updateRef lr (MuList (xs ++ [vr])) >> return vr
f params = raiseTypeError "{Push} expected a list" (map snd params)
evalExpr (M.Application (M.Primitive O.Size) expressions) =
evalExpressionsWith expressions f
where f [MuList xs] = createNumber $ genericLength xs
f [MuString s] = createNumber $ genericLength s
f params = raiseTypeError "{Size} expected a list or string" params
evalExpr (M.Application (M.Primitive O.GetAt) expressions) =
evalExpressionsWith expressions f
where f [MuObject m, MuString s] | Just ref <- Map.lookup s m = return ref
| otherwise = raiseString ("key error: " ++ s)
f params = raiseTypeError "expected an object" params
evalExpr (M.Application (M.Primitive O.SetAt) expressions) =
evalExpressionsWith' expressions f
where f [(or, MuObject _), (_, MuString s), (vr, _)] = modifyRef or (setObjectAt s vr) >> return vr
f params = raiseTypeError "expected an object" (map snd params)
evalExpr (M.Application (M.Primitive O.Multiply) expressions) = evalBinaryNumeric expressions (*) createNumber
evalExpr (M.Application (M.Primitive O.Like) expressions) = do
params <- forM expressions evalExpr
let [r1, r2] = params
muValuesEqual r1 r2
evalExpr (M.Application (M.Primitive ) expressions) = do
evalExpr $ M.Application (M.Primitive O.Negation) [M.Application (M.Primitive (opposite op)) expressions]
evalExpr (M.Application (M.Primitive O.LessOrEqualThan) expressions) = evalBinaryNumeric expressions (<=) createBool
evalExpr (M.Application (M.Primitive O.LessThan) expressions) = evalBinaryNumeric expressions (<) createBool
evalExpr (M.Application (M.Primitive O.Plus) expressions) = evalBinaryNumeric expressions (+) createNumber
evalExpr (M.Application (M.Primitive O.Minus) expressions) = evalBinaryNumeric expressions (-) createNumber
evalExpr (M.MuList expressions) = do
refs <- forM expressions evalExpr
createRef $ MuList refs
evalExpr (M.MuDict expression) = evalObject expression
evalExpr (M.MuObject expression) = evalObject expression
evalExpr (M.Object name expression) = evalExpr (M.Variable name (M.MuObject expression))
evalExpr (M.FieldAssignment e1 k e2) = evalExpr (M.Application (M.Primitive O.SetAt) [e1, M.MuString k, e2])
evalExpr (M.FieldReference expression k) = evalExpr (M.Application (M.Primitive O.GetAt) [expression, M.MuString k])
evalExpr (M.New klass expressions) = do
(MuFunction locals ([M.SimpleEquation params body])) <- evalExprValue klass
objReference <- createObject Map.empty
thisContext <- createObject $ Map.singleton "this" objReference
paramsContext <- evalParams params expressions
runFunction (thisContext:paramsContext:locals) body
return objReference
evalExpr (M.Application function expressions) = do
(MuFunction locals ([M.SimpleEquation params body])) <- evalExprValue function
paramsContext <- evalParams params expressions
returnValue <- runFunction (paramsContext:locals) (body)
return returnValue
evalExpr (M.If cond thenBranch elseBranch) = do
v <- evalCondition cond
if v then evalExpr thenBranch else evalExpr elseBranch
evalExpr (M.MuNumber n) = createNumber n
evalExpr (M.MuNil) = return nullRef
evalExpr (M.MuBool b) = createBool b
evalExpr (M.MuString s) = createRef $ MuString s
evalExpr (M.Return e) = do
ref <- evalExpr e
currentReturn <- gets (currentReturnCallback)
currentReturn ref
Unreachable
evalExpr (M.Variable name expr) = do
r <- evalExpr expr
setLocalVariable name r
return r
evalExpr (M.While cond expr) = do
whileM (evalCondition cond) (evalExpr expr)
return nullRef
evalExpr (M.For [M.Generator (M.LValuePattern name) iterable] body) = do
(MuList elementRefs) <- evalExprValue iterable
forM elementRefs (\r -> do
setLocalVariable name r
evalExpr body)
return nullRef
evalExpr (M.ForLoop beforeExpr cond afterExpr expr) = do
evalExpr beforeExpr
whileM (evalCondition cond) $ do
evalExpr expr
evalExpr afterExpr
return nullRef
evalExpr (M.Assignment name expr) = do
valueRef <- evalExpr expr
frameRef <- findFrameForName' name
case frameRef of
Just ref -> modifyRef ref (setObjectAt name valueRef)
Nothing -> setLocalVariable name valueRef
return valueRef
evalExpr (M.Try expr [( M.VariablePattern exName, catchExpr)] finallyExpr) = do
context <- get
(resultRef, lastException) <- evalRaising context expr
modify' (\c ->
c { currentReturnCallback = currentReturnCallback context
, currentRaiseCallback = currentRaiseCallback context
, currentException = Nothing
})
case lastException of
Nothing -> return resultRef
Just ref -> do
setLocalVariable exName ref
evalExpr catchExpr
evalExpr finallyExpr
evalExpr (M.Raise expr) = raiseInternal =<< evalExpr expr
evalExpr (M.Reference name) = findReferenceForName name
evalExpr (M.None) = return nullRef
evalExpr e = raiseString $ "Unkown expression: " ++ show e
evalObject :: M.Expression -> Executable Reference
evalObject M.None = createObject (Map.empty)
evalObject (M.Sequence es) = do
arrowRefs <- forM es evalArrow
createObject $ Map.fromList arrowRefs
evalObject e = do
(s, vRef) <- evalArrow e
createObject $ Map.singleton s vRef
evalArrow :: M.Expression -> Executable (String, Reference)
evalArrow (M.LValue n v) = evalArrow (M.Arrow (M.MuString n) v)
evalArrow (M.Arrow k v) = do
(MuString s) <- evalExprValue k
vRef <- evalExpr v
return (s, vRef)
evalArrow e = raiseString ("malformed object arrow: " ++ show e)
TODO make this evaluation non strict on both parameters
evalBinaryBoolean :: [M.Expression] -> (Bool -> Bool -> Bool) -> Executable Reference
evalBinaryBoolean expressions op = evalExpressionsWith expressions f
where f [MuBool b1, MuBool b2] = createBool $ op b1 b2
f params = raiseTypeError "expected two booleans" params
evalBinaryNumeric :: [M.Expression] -> (Double -> Double -> a) -> (a -> Executable Reference) -> Executable Reference
evalBinaryNumeric expressions op pack = evalExpressionsWith expressions f
where f [MuNumber n1, MuNumber n2] = pack $ op n1 n2
f params = raiseTypeError "expected two numbers" params
evalCondition :: M.Expression -> Executable Bool
evalCondition cond = evalExprValue cond >>= muBool
where
muBool (MuBool value) = return value
muBool v = raiseTypeError "expected boolean" [v]
evalParams :: [M.Pattern] -> [M.Expression] -> Executable Reference
evalParams params arguments = do
evaluatedParams <- forM arguments evalExpr
let localsAfterParameters = Map.fromList $ zip (getParamNames params) (evaluatedParams ++ repeat nullRef)
createObject localsAfterParameters
raiseInternal :: Reference -> Executable b
raiseInternal exceptionRef = do
raiseCallback <- gets currentRaiseCallback
modify' (\c -> c {currentException = Just exceptionRef})
raiseCallback exceptionRef
raiseString "Unreachable" -- the callback above should never allow this to execute
raiseString :: String -> Executable a
raiseString s = do
raiseInternal =<< (createRef $ MuString s)
raiseTypeError :: String -> [Value] ->Executable a
raiseTypeError message values = raiseString $ "Type error: " ++ message ++ " but got " ++ (intercalate ", " . map debug $ values)
muValuesEqual r1 r2
| r1 == r2 = createRef $ MuBool True
| otherwise = do
v1 <- dereference r1
v2 <- dereference r2
createBool $ muEquals v1 v2
muEquals (MuBool b1) (MuBool b2) = b1 == b2
muEquals (MuNumber n1) (MuNumber n2) = n1 == n2
muEquals (MuString s1) (MuString s2) = s1 == s2
muEquals MuNull MuNull = True
muEquals _ _ = False
getParamNames :: [M.Pattern] -> [String]
getParamNames = fmap getParamName
where
getParamName (M.LValuePattern n) = n
getParamName other = error $ "Unsupported pattern " ++ (show other)
runFunction :: [Reference] -> M.Expression -> Executable Reference
runFunction functionEnv body = do
context <- get
returnValue <- callCC $ \(returnCallback) -> do
put (context { scopes = functionEnv, currentReturnCallback = returnCallback })
evalExpr body
return nullRef
modify' (\c -> c { scopes = scopes context
, currentReturnCallback = currentReturnCallback context
, currentRaiseCallback = currentRaiseCallback context
, currentException = currentException context
})
return returnValue
findFrameForName :: String -> Executable Reference
findFrameForName name = do
maybe (raiseString $ "Reference not found for name '" ++ name ++ "'") return
=<< findFrameForName' name
findFrameForName' :: String -> Executable (Maybe Reference)
findFrameForName' name = do
framesRefs <- gets scopes
frames :: [(Reference, Map String Reference)] <- forM framesRefs $ \ref -> do
dereference ref >>= \value -> case value of
(MuObject context) -> return (ref, context)
v -> error $ "Finding '" ++ name ++ "' the frame I got a non object " ++ show v
return $ fmap fst . find (Map.member name . snd) $ frames
findReferenceForName :: String -> Executable Reference
findReferenceForName name = do
ref <- findFrameForName name
(MuObject context) <- dereference ref
return $ context Map.! name
nullRef = Reference 0
createBool = createRef . MuBool
createNumber = createRef . MuNumber
createObject = createRef . MuObject
setLocalVariable :: String -> Reference -> Executable ()
setLocalVariable name ref = do
frame <- currentFrame
modifyRef frame (setObjectAt name ref)
where
currentFrame :: Executable Reference
currentFrame = gets (head . scopes)
setObjectAt :: String -> Reference -> Value -> Value
setObjectAt k r (MuObject map) = MuObject $ Map.insert k r map
setObjectAt k _r v = error $ "Tried adding " ++ k ++ " to a non object: " ++ show v | null | https://raw.githubusercontent.com/mumuki/mulang/aee5d3d8aba787698862ca0deb6ad54623baaa13/src/Language/Mulang/Interpreter.hs | haskell | if function has no name we avoid registering it
the callback above should never allow this to execute | # LANGUAGE ScopedTypeVariables #
module Language.Mulang.Interpreter (
defaultContext,
dereference,
dereference',
eval,
eval',
evalExpr,
evalRaising,
nullRef,
ExecutionContext(..),
Value(..)
) where
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.List (find, intercalate, genericLength)
import Control.Monad (forM, (>=>))
import Control.Monad.State.Class
import Control.Monad.Loops
import Control.Monad.State.Strict
import Control.Monad.Cont
import Data.Fixed (mod')
import qualified Language.Mulang.Ast as M
import qualified Language.Mulang.Ast.Operator as O
import Language.Mulang.Ast.Operator (opposite)
import Language.Mulang.Interpreter.Internals
eval' :: ExecutionContext -> Executable Reference -> IO (Reference, ExecutionContext)
eval' ctx ref = runStateT (runContT ref return) ctx
eval :: ExecutionContext -> M.Expression -> IO (Reference, ExecutionContext)
eval ctx expr = eval' ctx (evalExpr expr)
evalRaising :: ExecutionContext -> M.Expression -> Executable (Reference, Maybe Reference)
evalRaising context expr = do
resultRef <- callCC $ \raiseCallback -> do
put (context { currentRaiseCallback = raiseCallback })
evalExpr expr
return nullRef
lastException <- gets currentException
return (resultRef, lastException)
evalExpressionsWith :: [M.Expression] -> ([Value] -> Executable Reference) -> Executable Reference
evalExpressionsWith expressions f = do
params <- forM expressions evalExprValue
f params
evalExpressionsWith' :: [M.Expression] -> ([(Reference, Value)] -> Executable Reference) -> Executable Reference
evalExpressionsWith' expressions f = do
refs <- forM expressions evalExpr
values <- forM refs dereference
f $ zip refs values
evalExprValue :: M.Expression -> Executable Value
evalExprValue = evalExpr >=> dereference
evalExpr :: M.Expression -> Executable Reference
evalExpr (M.Sequence expressions) = last <$> forM expressions evalExpr
evalExpr (M.Lambda params body) = do
executionFrames <- gets scopes
createRef $
MuFunction executionFrames [M.Equation params (M.UnguardedBody body)]
evalExpr (M.Subroutine name body) = do
executionFrames <- gets scopes
let function = MuFunction executionFrames body
ref <- createRef function
return ref
evalExpr (M.Print expression) = do
parameter <- evalExprValue expression
liftIO $ print parameter
return nullRef
evalExpr (M.Assert negated (M.Truth expression)) =
evalExpressionsWith [expression] f
where f [MuBool result]
| result /= negated = return nullRef
| otherwise = raiseString $ "Expected " ++ (show . not $ negated) ++ " but got: " ++ show result
evalExpr (M.Assert negated (M.Equality expected actual)) =
evalExpressionsWith [expected, actual] f
where f [v1, v2]
| muEquals v1 v2 /= negated = return nullRef
| otherwise = raiseString $ "Expected " ++ show v1 ++ " but got: " ++ show v2
evalExpr (M.Application (M.Primitive O.GreaterOrEqualThan) expressions) = evalBinaryNumeric expressions (>=) createBool
evalExpr (M.Application (M.Primitive O.Modulo) expressions) = evalBinaryNumeric expressions (mod') createNumber
evalExpr (M.Application (M.Primitive O.GreaterThan) expressions) = evalBinaryNumeric expressions (>) createBool
evalExpr (M.Application (M.Primitive O.Or) expressions) = evalBinaryBoolean expressions (||)
evalExpr (M.Application (M.Primitive O.And) expressions) = evalBinaryBoolean expressions (&&)
evalExpr (M.Application (M.Primitive O.Negation) expressions) =
evalExpressionsWith expressions f
where f [MuBool b] = createBool $ not b
f params = raiseTypeError "expected one boolean" params
evalExpr (M.Application (M.Primitive O.Push) expressions) =
evalExpressionsWith' expressions f
where f [(lr, MuList xs), (vr, _)] = updateRef lr (MuList (xs ++ [vr])) >> return vr
f params = raiseTypeError "{Push} expected a list" (map snd params)
evalExpr (M.Application (M.Primitive O.Size) expressions) =
evalExpressionsWith expressions f
where f [MuList xs] = createNumber $ genericLength xs
f [MuString s] = createNumber $ genericLength s
f params = raiseTypeError "{Size} expected a list or string" params
evalExpr (M.Application (M.Primitive O.GetAt) expressions) =
evalExpressionsWith expressions f
where f [MuObject m, MuString s] | Just ref <- Map.lookup s m = return ref
| otherwise = raiseString ("key error: " ++ s)
f params = raiseTypeError "expected an object" params
evalExpr (M.Application (M.Primitive O.SetAt) expressions) =
evalExpressionsWith' expressions f
where f [(or, MuObject _), (_, MuString s), (vr, _)] = modifyRef or (setObjectAt s vr) >> return vr
f params = raiseTypeError "expected an object" (map snd params)
evalExpr (M.Application (M.Primitive O.Multiply) expressions) = evalBinaryNumeric expressions (*) createNumber
evalExpr (M.Application (M.Primitive O.Like) expressions) = do
params <- forM expressions evalExpr
let [r1, r2] = params
muValuesEqual r1 r2
evalExpr (M.Application (M.Primitive ) expressions) = do
evalExpr $ M.Application (M.Primitive O.Negation) [M.Application (M.Primitive (opposite op)) expressions]
evalExpr (M.Application (M.Primitive O.LessOrEqualThan) expressions) = evalBinaryNumeric expressions (<=) createBool
evalExpr (M.Application (M.Primitive O.LessThan) expressions) = evalBinaryNumeric expressions (<) createBool
evalExpr (M.Application (M.Primitive O.Plus) expressions) = evalBinaryNumeric expressions (+) createNumber
evalExpr (M.Application (M.Primitive O.Minus) expressions) = evalBinaryNumeric expressions (-) createNumber
evalExpr (M.MuList expressions) = do
refs <- forM expressions evalExpr
createRef $ MuList refs
evalExpr (M.MuDict expression) = evalObject expression
evalExpr (M.MuObject expression) = evalObject expression
evalExpr (M.Object name expression) = evalExpr (M.Variable name (M.MuObject expression))
evalExpr (M.FieldAssignment e1 k e2) = evalExpr (M.Application (M.Primitive O.SetAt) [e1, M.MuString k, e2])
evalExpr (M.FieldReference expression k) = evalExpr (M.Application (M.Primitive O.GetAt) [expression, M.MuString k])
evalExpr (M.New klass expressions) = do
(MuFunction locals ([M.SimpleEquation params body])) <- evalExprValue klass
objReference <- createObject Map.empty
thisContext <- createObject $ Map.singleton "this" objReference
paramsContext <- evalParams params expressions
runFunction (thisContext:paramsContext:locals) body
return objReference
evalExpr (M.Application function expressions) = do
(MuFunction locals ([M.SimpleEquation params body])) <- evalExprValue function
paramsContext <- evalParams params expressions
returnValue <- runFunction (paramsContext:locals) (body)
return returnValue
evalExpr (M.If cond thenBranch elseBranch) = do
v <- evalCondition cond
if v then evalExpr thenBranch else evalExpr elseBranch
evalExpr (M.MuNumber n) = createNumber n
evalExpr (M.MuNil) = return nullRef
evalExpr (M.MuBool b) = createBool b
evalExpr (M.MuString s) = createRef $ MuString s
evalExpr (M.Return e) = do
ref <- evalExpr e
currentReturn <- gets (currentReturnCallback)
currentReturn ref
Unreachable
evalExpr (M.Variable name expr) = do
r <- evalExpr expr
setLocalVariable name r
return r
evalExpr (M.While cond expr) = do
whileM (evalCondition cond) (evalExpr expr)
return nullRef
evalExpr (M.For [M.Generator (M.LValuePattern name) iterable] body) = do
(MuList elementRefs) <- evalExprValue iterable
forM elementRefs (\r -> do
setLocalVariable name r
evalExpr body)
return nullRef
evalExpr (M.ForLoop beforeExpr cond afterExpr expr) = do
evalExpr beforeExpr
whileM (evalCondition cond) $ do
evalExpr expr
evalExpr afterExpr
return nullRef
evalExpr (M.Assignment name expr) = do
valueRef <- evalExpr expr
frameRef <- findFrameForName' name
case frameRef of
Just ref -> modifyRef ref (setObjectAt name valueRef)
Nothing -> setLocalVariable name valueRef
return valueRef
evalExpr (M.Try expr [( M.VariablePattern exName, catchExpr)] finallyExpr) = do
context <- get
(resultRef, lastException) <- evalRaising context expr
modify' (\c ->
c { currentReturnCallback = currentReturnCallback context
, currentRaiseCallback = currentRaiseCallback context
, currentException = Nothing
})
case lastException of
Nothing -> return resultRef
Just ref -> do
setLocalVariable exName ref
evalExpr catchExpr
evalExpr finallyExpr
evalExpr (M.Raise expr) = raiseInternal =<< evalExpr expr
evalExpr (M.Reference name) = findReferenceForName name
evalExpr (M.None) = return nullRef
evalExpr e = raiseString $ "Unkown expression: " ++ show e
evalObject :: M.Expression -> Executable Reference
evalObject M.None = createObject (Map.empty)
evalObject (M.Sequence es) = do
arrowRefs <- forM es evalArrow
createObject $ Map.fromList arrowRefs
evalObject e = do
(s, vRef) <- evalArrow e
createObject $ Map.singleton s vRef
evalArrow :: M.Expression -> Executable (String, Reference)
evalArrow (M.LValue n v) = evalArrow (M.Arrow (M.MuString n) v)
evalArrow (M.Arrow k v) = do
(MuString s) <- evalExprValue k
vRef <- evalExpr v
return (s, vRef)
evalArrow e = raiseString ("malformed object arrow: " ++ show e)
TODO make this evaluation non strict on both parameters
evalBinaryBoolean :: [M.Expression] -> (Bool -> Bool -> Bool) -> Executable Reference
evalBinaryBoolean expressions op = evalExpressionsWith expressions f
where f [MuBool b1, MuBool b2] = createBool $ op b1 b2
f params = raiseTypeError "expected two booleans" params
evalBinaryNumeric :: [M.Expression] -> (Double -> Double -> a) -> (a -> Executable Reference) -> Executable Reference
evalBinaryNumeric expressions op pack = evalExpressionsWith expressions f
where f [MuNumber n1, MuNumber n2] = pack $ op n1 n2
f params = raiseTypeError "expected two numbers" params
evalCondition :: M.Expression -> Executable Bool
evalCondition cond = evalExprValue cond >>= muBool
where
muBool (MuBool value) = return value
muBool v = raiseTypeError "expected boolean" [v]
evalParams :: [M.Pattern] -> [M.Expression] -> Executable Reference
evalParams params arguments = do
evaluatedParams <- forM arguments evalExpr
let localsAfterParameters = Map.fromList $ zip (getParamNames params) (evaluatedParams ++ repeat nullRef)
createObject localsAfterParameters
raiseInternal :: Reference -> Executable b
raiseInternal exceptionRef = do
raiseCallback <- gets currentRaiseCallback
modify' (\c -> c {currentException = Just exceptionRef})
raiseCallback exceptionRef
raiseString :: String -> Executable a
raiseString s = do
raiseInternal =<< (createRef $ MuString s)
raiseTypeError :: String -> [Value] ->Executable a
raiseTypeError message values = raiseString $ "Type error: " ++ message ++ " but got " ++ (intercalate ", " . map debug $ values)
muValuesEqual r1 r2
| r1 == r2 = createRef $ MuBool True
| otherwise = do
v1 <- dereference r1
v2 <- dereference r2
createBool $ muEquals v1 v2
muEquals (MuBool b1) (MuBool b2) = b1 == b2
muEquals (MuNumber n1) (MuNumber n2) = n1 == n2
muEquals (MuString s1) (MuString s2) = s1 == s2
muEquals MuNull MuNull = True
muEquals _ _ = False
getParamNames :: [M.Pattern] -> [String]
getParamNames = fmap getParamName
where
getParamName (M.LValuePattern n) = n
getParamName other = error $ "Unsupported pattern " ++ (show other)
runFunction :: [Reference] -> M.Expression -> Executable Reference
runFunction functionEnv body = do
context <- get
returnValue <- callCC $ \(returnCallback) -> do
put (context { scopes = functionEnv, currentReturnCallback = returnCallback })
evalExpr body
return nullRef
modify' (\c -> c { scopes = scopes context
, currentReturnCallback = currentReturnCallback context
, currentRaiseCallback = currentRaiseCallback context
, currentException = currentException context
})
return returnValue
findFrameForName :: String -> Executable Reference
findFrameForName name = do
maybe (raiseString $ "Reference not found for name '" ++ name ++ "'") return
=<< findFrameForName' name
findFrameForName' :: String -> Executable (Maybe Reference)
findFrameForName' name = do
framesRefs <- gets scopes
frames :: [(Reference, Map String Reference)] <- forM framesRefs $ \ref -> do
dereference ref >>= \value -> case value of
(MuObject context) -> return (ref, context)
v -> error $ "Finding '" ++ name ++ "' the frame I got a non object " ++ show v
return $ fmap fst . find (Map.member name . snd) $ frames
findReferenceForName :: String -> Executable Reference
findReferenceForName name = do
ref <- findFrameForName name
(MuObject context) <- dereference ref
return $ context Map.! name
nullRef = Reference 0
createBool = createRef . MuBool
createNumber = createRef . MuNumber
createObject = createRef . MuObject
setLocalVariable :: String -> Reference -> Executable ()
setLocalVariable name ref = do
frame <- currentFrame
modifyRef frame (setObjectAt name ref)
where
currentFrame :: Executable Reference
currentFrame = gets (head . scopes)
setObjectAt :: String -> Reference -> Value -> Value
setObjectAt k r (MuObject map) = MuObject $ Map.insert k r map
setObjectAt k _r v = error $ "Tried adding " ++ k ++ " to a non object: " ++ show v |
f276434a25fe89c8fa9335327c6a20357711baccd9ad688e52b38db448734c5d | argp/bap | batInt.mli |
* BatInt - Extended operations on integers
* Copyright ( C ) 2008
* 2008
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version ,
* with the special exception on linking described in file LICENSE .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
* BatInt - Extended operations on integers
* Copyright (C) 2008 Gabriel Scherer
* 2008 David Teller
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version,
* with the special exception on linking described in file LICENSE.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
*
Operations on integers .
This module provides operations on the type [ int ] of
integers . Values of this type may be either 31 bits on 32 - bit
processors or 63 bits on 64 - bit processors . All arithmetic
operations over [ int ] are taken modulo 2{^number of bits } .
This module implements { ! Number . Numeric } ,
{ ! Number . Bounded } , { ! Number . Discrete } .
@author @author
@documents Int
Operations on integers.
This module provides operations on the type [int] of
integers. Values of this type may be either 31 bits on 32-bit
processors or 63 bits on 64-bit processors. All arithmetic
operations over [int] are taken modulo 2{^number of bits}.
This module implements {!Number.Numeric},
{!Number.Bounded}, {!Number.Discrete}.
@author Gabriel Scherer
@author David Teller
@documents Int
*)
type t = int
(** An alias for the type of integers. *)
val zero : int
(** The integer [0]. *)
val one : int
* The integer [ 1 ] .
val minus_one : int
(** The integer [-1]. *)
external neg : int -> int = "%negint"
(** Unary negation. *)
external add : int -> int -> int = "%addint"
(** Addition. *)
external ( + ) : int -> int -> int = "%addint"
(** Addition. *)
external sub : int -> int -> int = "%subint"
(** Subtraction. *)
external ( - ) : int -> int -> int = "%subint"
(** Subtraction. *)
external mul : int -> int -> int = "%mulint"
(** Multiplication. *)
external ( * ) : int -> int -> int = "%mulint"
(** Multiplication. *)
external div : int -> int -> int = "%divint"
* Integer division .
This division rounds the real quotient of
its arguments towards zero , as specified for { ! Pervasives.(/ ) } .
@raise Division_by_zero if the second argument is zero .
This division rounds the real quotient of
its arguments towards zero, as specified for {!Pervasives.(/)}.
@raise Division_by_zero if the second argument is zero. *)
external ( / ) : int -> int -> int = "%divint"
* Integer division . This division rounds the real quotient of
its arguments towards zero , as specified for { ! Pervasives.(/ ) } .
@raise Division_by_zero if the second argument is zero .
its arguments towards zero, as specified for {!Pervasives.(/)}.
@raise Division_by_zero if the second argument is zero. *)
external rem : int -> int -> int = "%modint"
* Integer remainder . If [ y ] is not zero , the result
of [ Int.rem x y ] satisfies the following property :
[ x = ( Int.mul ( Int.div x y ) y ) ( Int.rem x y ) ] .
@raise Division_by_zero if the second argument is zero .
of [Int.rem x y] satisfies the following property:
[x = Int.add (Int.mul (Int.div x y) y) (Int.rem x y)].
@raise Division_by_zero if the second argument is zero. *)
external modulo : int -> int -> int = "%modint"
(** [modulo a b] computes the remainder of the integer
division of [a] by [b]. This is defined only if [b <> 0].
The result of [modulo a b] is a number [m] between
[0] and [abs ( b - 1 )] if [a >= 0] or between [~- ( abs ( b - 1 ) ) ]
if [a < 0] and such that [a * k + (abs b) = m],
for some [k]. *)
val pow : int -> int -> int
(** [pow a b] computes a{^b}.
@raise Invalid_argument when [b] is negative. *)
val ( ** ) : int -> int -> int
(** [a ** b] computes a{^b}*)
val ( <> ) : int -> int -> bool
val ( > ) : int -> int -> bool
val ( < ) : int -> int -> bool
val ( >= ) : int -> int -> bool
val ( <= ) : int -> int -> bool
val ( = ) : int -> int -> bool
val min_num : int
(** The smallest representable integer, -2{^30} or -2{^62}. *)
val max_num : int
* The greatest representable integer , which is either 2{^30}-1 or 2{^62}-1 .
external succ: int -> int = "%succint"
* Successor . [ Int.succ x ] is [ x Int.one ] .
external pred: int -> int = "%predint"
* Predecessor . [ Int.pred x ] is [ Int.sub x Int.one ] .
val abs : int -> int
(** Return the absolute value of its argument, except when the argument is
[min_num]. In that case, [abs min_num = min_num]. *)
external of_float : float -> int = "%intoffloat"
(** Convert the given floating-point number to integer integer,
discarding the fractional part (truncate towards 0).
The result of the conversion is undefined if, after truncation,
the number is outside the range \[{!Int.min_int}, {!Int.max_int}\]. *)
external to_float : int -> float = "%floatofint"
(** Convert the given integer to a floating-point number. *)
val of_string : string -> int
* Convert the given string to an integer
The string is read in decimal ( by default ) or in hexadecimal ,
octal or binary if the string begins with [ 0x ] , [ 0o ] or [ 0b ]
respectively .
@raise Invalid_argument if the given string is not
a valid representation of an integer , or if the integer represented
exceeds the range of integers representable in type [ int ] .
The string is read in decimal (by default) or in hexadecimal,
octal or binary if the string begins with [0x], [0o] or [0b]
respectively.
@raise Invalid_argument if the given string is not
a valid representation of an integer, or if the integer represented
exceeds the range of integers representable in type [int]. *)
val to_string : int -> string
(** Return the string representation of its argument, in signed decimal. *)
* The minimum of two integers . Faster than the polymorphic [ min ] from the
standard library .
standard library. *)
val min : int -> int -> int
* The maximum of two integers . Faster than the polymorphic [ min ] from the
standard library .
standard library. *)
val max : int -> int -> int
val mid : int -> int -> int
(** Midpoint function; [mid a b] returns [floor((a+b)/2)], but done
correctly to compensate for numeric overflows. The result is an
integer that lies between [a] and [b] and is as equidistant from
both as possible. *)
* Returns the number of 1 bits set in the binary representation of
the number . Maybe has problems with negative numbers
the number. Maybe has problems with negative numbers *)
val popcount : int -> int
(**/**)
val popcount_sparse : int -> int
(**/**)
val operations : int BatNumber.numeric
val ( -- ) : t -> t -> t BatEnum.t
* Enumerate an interval .
[ 5 -- 10 ] is the enumeration 5,6,7,8,9,10 .
[ 10 -- 5 ] is the empty enumeration
[5 -- 10] is the enumeration 5,6,7,8,9,10.
[10 -- 5] is the empty enumeration*)
val ( --- ) : t -> t -> t BatEnum.t
* Enumerate an interval .
[ 5 --- 10 ] is the enumeration 5,6,7,8,9,10 .
[ 10 --- 5 ] is the enumeration 10,9,8,7,6,5 .
[5 --- 10] is the enumeration 5,6,7,8,9,10.
[10 --- 5] is the enumeration 10,9,8,7,6,5.*)
external of_int : int -> int = "%identity"
external to_int : int -> int = "%identity"
* { 6 Submodules regrouping all infix operations }
module Infix : BatNumber.Infix with type bat__infix_t = t
module Compare : BatNumber.Compare with type bat__compare_t = t
(** {6 Boilerplate code}*)
* { 7 Printing }
val print: 'a BatInnerIO.output -> int -> unit
(** prints as decimal string *)
val print_hex: 'a BatInnerIO.output -> int -> unit
(** prints as hex string *)
: ' a BatInnerIO.output - > t - > unit
( * * prints as binary string
(** prints as binary string *) *)
(** {7 Compare} *)
val compare: t -> t -> int
(** The comparison function for integers, with the same specification as
{!Pervasives.compare}. Along with the type [t], this function [compare]
allows the module [Int] to be passed as argument to the functors
{!Set.Make} and {!Map.Make}. *)
val equal : t -> t -> bool
(** Equality function for integers, useful for {!HashedType}. *)
val ord : t -> t -> BatOrd.order
*
Safe operations on integers .
This module provides operations on the type [ int ] of
integers . Values of this type may be either 31 bits on 32 - bit
processors or 63 bits on 64 - bit processors . Operations which
overflow raise exception { ! Number . Overflow } .
This module implements { ! Number . Numeric } ,
{ ! Number . Bounded } , { ! Number . Discrete } .
{ b Important note } Untested .
Safe operations on integers.
This module provides operations on the type [int] of
integers. Values of this type may be either 31 bits on 32-bit
processors or 63 bits on 64-bit processors. Operations which
overflow raise exception {!Number.Overflow}.
This module implements {!Number.Numeric},
{!Number.Bounded}, {!Number.Discrete}.
{b Important note} Untested.
*)
module Safe_int : sig
type t = int
(** An alias for the type of integers. *)
val zero : t
(** The integer [0]. *)
val one : t
* The integer [ 1 ] .
val minus_one : t
(** The integer [-1]. *)
val neg : t -> t
(** Unary negation. *)
val add : t -> t -> t
(** Addition. *)
val ( + ) : t -> t -> t
(** Addition. *)
val sub : t -> t -> t
(** Substraction. *)
val ( - ) : t -> t -> t
(** Substraction. *)
val mul : t -> t -> t
(** Multiplication. *)
val ( * ) : t -> t -> t
(** Multiplication. *)
external div : t -> t -> t = "%divint"
* Integer division .
This division rounds the real quotient of
its arguments towards zero , as specified for { ! Pervasives.(/ ) } .
@raise Division_by_zero if the second argument is zero .
This division rounds the real quotient of
its arguments towards zero, as specified for {!Pervasives.(/)}.
@raise Division_by_zero if the second argument is zero. *)
external ( / ) : t -> t -> t = "%divint"
* Integer division . This division rounds the real quotient of
its arguments towards zero , as specified for { ! Pervasives.(/ ) } .
@raise Division_by_zero if the second argument is zero .
its arguments towards zero, as specified for {!Pervasives.(/)}.
@raise Division_by_zero if the second argument is zero. *)
external rem : t -> t -> t = "%modint"
* Integer remainder . If [ y ] is not zero , the result
of [ Int.rem x y ] satisfies the following property :
[ x = ( Int.mul ( Int.div x y ) y ) ( Int.rem x y ) ] .
@raise Division_by_zero if the second argument is zero .
of [Int.rem x y] satisfies the following property:
[x = Int.add (Int.mul (Int.div x y) y) (Int.rem x y)].
@raise Division_by_zero if the second argument is zero. *)
external modulo : t -> t -> t = "%modint"
(** [modulo a b] computes the remainder of the integer
division of [a] by [b]. This is defined only if [b <> 0].
The result of [modulo a b] is a number [m] between
[0] and [abs ( b - 1 )] if [a >= 0] or between [~- ( abs ( b - 1 ) ) ]
if [a < 0] and such that [a * k + (abs b) = m],
for some [k]. *)
val pow : t -> t -> t
(** [pow a b] computes a{^b}.
@raise Invalid_argument when [b] is negative. *)
val ( ** ) : t -> t -> t
(** [a ** b] computes a{^b}*)
val ( <> ) : t -> t -> bool
* : [ a < > b ] is true if and only if [ a ] and [ b ] have
different values .
different values. *)
val ( > ) : t -> t -> bool
* : [ a > b ] is true if and only if [ a ] is strictly greater than [ b ] .
val ( < ) : t -> t -> bool
* : [ a < b ] is true if and only if [ a ] is strictly smaller than [ b ] .
val ( >= ) : t -> t -> bool
* : [ a > = b ] is true if and only if [ a ] is greater or equal to [ b ] .
val ( <= ) : t -> t -> bool
* : [ a < = b ] is true if and only if [ a ] is smaller or equalto [ b ] .
val ( = ) : t -> t -> bool
* : [ a = b ] if and only if [ a ] and [ b ] have the same value .
val max_num : t
* The greatest representable integer , which is either 2{^30}-1 or 2{^62}-1 .
val min_num : t
* The smallest representable integer , -2{^30 } or 2{^62 } .
val succ: t -> t
(** Successor. [succ x] is [add x one]. *)
val pred: t -> t
(** Predecessor. [pred x] is [sub x one]. *)
val abs : t -> t
(** Return the absolute value of its argument. *)
external of_float : float -> t = "%intoffloat"
(** Convert the given floating-point number to integer,
discarding the fractional part (truncate towards 0).
The result of the conversion is undefined if, after truncation,
the number is outside the range \[{!Int.min_int}, {!Int.max_int}\]. *)
external to_float : t -> float = "%floatofint"
(** Convert the given integer to a floating-point number. *)
val of_string : string -> t
* Convert the given string to an integer
The string is read in decimal ( by default ) or in hexadecimal ,
octal or binary if the string begins with [ 0x ] , [ 0o ] or [ 0b ]
respectively .
@raise Invalid_argument if the given string is not
a valid representation of an integer , or if the integer represented
exceeds the range of integers representable in type [ int ] .
The string is read in decimal (by default) or in hexadecimal,
octal or binary if the string begins with [0x], [0o] or [0b]
respectively.
@raise Invalid_argument if the given string is not
a valid representation of an integer, or if the integer represented
exceeds the range of integers representable in type [int]. *)
val to_string : t -> string
(** Return the string representation of its argument, in signed decimal. *)
val operations : t BatNumber.numeric
external of_int : int -> t = "%identity"
external to_int : t -> int = "%identity"
* { 6 Submodules regrouping all infix operations on safe integers }
module Infix : BatNumber.Infix with type bat__infix_t = t
module Compare : BatNumber.Compare with type bat__compare_t = t
(** {6 Boilerplate code}*)
val print: 'a BatInnerIO.output -> t -> unit
val compare : t -> t -> int
(** The comparison function for integers, with the same specification as
{!Pervasives.compare}. Along with the type [t], this function [compare]
allows the module [Int] to be passed as argument to the functors
{!Set.Make} and {!Map.Make}. *)
val equal : t -> t -> bool
(** Equality function for integers, useful for {!HashedType}. *)
val ord : t -> t -> BatOrd.order
end
| null | https://raw.githubusercontent.com/argp/bap/2f60a35e822200a1ec50eea3a947a322b45da363/batteries/src/batInt.mli | ocaml | * An alias for the type of integers.
* The integer [0].
* The integer [-1].
* Unary negation.
* Addition.
* Addition.
* Subtraction.
* Subtraction.
* Multiplication.
* Multiplication.
* [modulo a b] computes the remainder of the integer
division of [a] by [b]. This is defined only if [b <> 0].
The result of [modulo a b] is a number [m] between
[0] and [abs ( b - 1 )] if [a >= 0] or between [~- ( abs ( b - 1 ) ) ]
if [a < 0] and such that [a * k + (abs b) = m],
for some [k].
* [pow a b] computes a{^b}.
@raise Invalid_argument when [b] is negative.
* [a ** b] computes a{^b}
* The smallest representable integer, -2{^30} or -2{^62}.
* Return the absolute value of its argument, except when the argument is
[min_num]. In that case, [abs min_num = min_num].
* Convert the given floating-point number to integer integer,
discarding the fractional part (truncate towards 0).
The result of the conversion is undefined if, after truncation,
the number is outside the range \[{!Int.min_int}, {!Int.max_int}\].
* Convert the given integer to a floating-point number.
* Return the string representation of its argument, in signed decimal.
* Midpoint function; [mid a b] returns [floor((a+b)/2)], but done
correctly to compensate for numeric overflows. The result is an
integer that lies between [a] and [b] and is as equidistant from
both as possible.
*/*
*/*
* {6 Boilerplate code}
* prints as decimal string
* prints as hex string
* prints as binary string
* {7 Compare}
* The comparison function for integers, with the same specification as
{!Pervasives.compare}. Along with the type [t], this function [compare]
allows the module [Int] to be passed as argument to the functors
{!Set.Make} and {!Map.Make}.
* Equality function for integers, useful for {!HashedType}.
* An alias for the type of integers.
* The integer [0].
* The integer [-1].
* Unary negation.
* Addition.
* Addition.
* Substraction.
* Substraction.
* Multiplication.
* Multiplication.
* [modulo a b] computes the remainder of the integer
division of [a] by [b]. This is defined only if [b <> 0].
The result of [modulo a b] is a number [m] between
[0] and [abs ( b - 1 )] if [a >= 0] or between [~- ( abs ( b - 1 ) ) ]
if [a < 0] and such that [a * k + (abs b) = m],
for some [k].
* [pow a b] computes a{^b}.
@raise Invalid_argument when [b] is negative.
* [a ** b] computes a{^b}
* Successor. [succ x] is [add x one].
* Predecessor. [pred x] is [sub x one].
* Return the absolute value of its argument.
* Convert the given floating-point number to integer,
discarding the fractional part (truncate towards 0).
The result of the conversion is undefined if, after truncation,
the number is outside the range \[{!Int.min_int}, {!Int.max_int}\].
* Convert the given integer to a floating-point number.
* Return the string representation of its argument, in signed decimal.
* {6 Boilerplate code}
* The comparison function for integers, with the same specification as
{!Pervasives.compare}. Along with the type [t], this function [compare]
allows the module [Int] to be passed as argument to the functors
{!Set.Make} and {!Map.Make}.
* Equality function for integers, useful for {!HashedType}. |
* BatInt - Extended operations on integers
* Copyright ( C ) 2008
* 2008
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version ,
* with the special exception on linking described in file LICENSE .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
* BatInt - Extended operations on integers
* Copyright (C) 2008 Gabriel Scherer
* 2008 David Teller
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version,
* with the special exception on linking described in file LICENSE.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
*
Operations on integers .
This module provides operations on the type [ int ] of
integers . Values of this type may be either 31 bits on 32 - bit
processors or 63 bits on 64 - bit processors . All arithmetic
operations over [ int ] are taken modulo 2{^number of bits } .
This module implements { ! Number . Numeric } ,
{ ! Number . Bounded } , { ! Number . Discrete } .
@author @author
@documents Int
Operations on integers.
This module provides operations on the type [int] of
integers. Values of this type may be either 31 bits on 32-bit
processors or 63 bits on 64-bit processors. All arithmetic
operations over [int] are taken modulo 2{^number of bits}.
This module implements {!Number.Numeric},
{!Number.Bounded}, {!Number.Discrete}.
@author Gabriel Scherer
@author David Teller
@documents Int
*)
type t = int
val zero : int
val one : int
* The integer [ 1 ] .
val minus_one : int
external neg : int -> int = "%negint"
external add : int -> int -> int = "%addint"
external ( + ) : int -> int -> int = "%addint"
external sub : int -> int -> int = "%subint"
external ( - ) : int -> int -> int = "%subint"
external mul : int -> int -> int = "%mulint"
external ( * ) : int -> int -> int = "%mulint"
external div : int -> int -> int = "%divint"
* Integer division .
This division rounds the real quotient of
its arguments towards zero , as specified for { ! Pervasives.(/ ) } .
@raise Division_by_zero if the second argument is zero .
This division rounds the real quotient of
its arguments towards zero, as specified for {!Pervasives.(/)}.
@raise Division_by_zero if the second argument is zero. *)
external ( / ) : int -> int -> int = "%divint"
* Integer division . This division rounds the real quotient of
its arguments towards zero , as specified for { ! Pervasives.(/ ) } .
@raise Division_by_zero if the second argument is zero .
its arguments towards zero, as specified for {!Pervasives.(/)}.
@raise Division_by_zero if the second argument is zero. *)
external rem : int -> int -> int = "%modint"
* Integer remainder . If [ y ] is not zero , the result
of [ Int.rem x y ] satisfies the following property :
[ x = ( Int.mul ( Int.div x y ) y ) ( Int.rem x y ) ] .
@raise Division_by_zero if the second argument is zero .
of [Int.rem x y] satisfies the following property:
[x = Int.add (Int.mul (Int.div x y) y) (Int.rem x y)].
@raise Division_by_zero if the second argument is zero. *)
external modulo : int -> int -> int = "%modint"
val pow : int -> int -> int
val ( ** ) : int -> int -> int
val ( <> ) : int -> int -> bool
val ( > ) : int -> int -> bool
val ( < ) : int -> int -> bool
val ( >= ) : int -> int -> bool
val ( <= ) : int -> int -> bool
val ( = ) : int -> int -> bool
val min_num : int
val max_num : int
* The greatest representable integer , which is either 2{^30}-1 or 2{^62}-1 .
external succ: int -> int = "%succint"
* Successor . [ Int.succ x ] is [ x Int.one ] .
external pred: int -> int = "%predint"
* Predecessor . [ Int.pred x ] is [ Int.sub x Int.one ] .
val abs : int -> int
external of_float : float -> int = "%intoffloat"
external to_float : int -> float = "%floatofint"
val of_string : string -> int
* Convert the given string to an integer
The string is read in decimal ( by default ) or in hexadecimal ,
octal or binary if the string begins with [ 0x ] , [ 0o ] or [ 0b ]
respectively .
@raise Invalid_argument if the given string is not
a valid representation of an integer , or if the integer represented
exceeds the range of integers representable in type [ int ] .
The string is read in decimal (by default) or in hexadecimal,
octal or binary if the string begins with [0x], [0o] or [0b]
respectively.
@raise Invalid_argument if the given string is not
a valid representation of an integer, or if the integer represented
exceeds the range of integers representable in type [int]. *)
val to_string : int -> string
* The minimum of two integers . Faster than the polymorphic [ min ] from the
standard library .
standard library. *)
val min : int -> int -> int
* The maximum of two integers . Faster than the polymorphic [ min ] from the
standard library .
standard library. *)
val max : int -> int -> int
val mid : int -> int -> int
* Returns the number of 1 bits set in the binary representation of
the number . Maybe has problems with negative numbers
the number. Maybe has problems with negative numbers *)
val popcount : int -> int
val popcount_sparse : int -> int
val operations : int BatNumber.numeric
val ( -- ) : t -> t -> t BatEnum.t
* Enumerate an interval .
[ 5 -- 10 ] is the enumeration 5,6,7,8,9,10 .
[ 10 -- 5 ] is the empty enumeration
[5 -- 10] is the enumeration 5,6,7,8,9,10.
[10 -- 5] is the empty enumeration*)
val ( --- ) : t -> t -> t BatEnum.t
* Enumerate an interval .
[ 5 --- 10 ] is the enumeration 5,6,7,8,9,10 .
[ 10 --- 5 ] is the enumeration 10,9,8,7,6,5 .
[5 --- 10] is the enumeration 5,6,7,8,9,10.
[10 --- 5] is the enumeration 10,9,8,7,6,5.*)
external of_int : int -> int = "%identity"
external to_int : int -> int = "%identity"
* { 6 Submodules regrouping all infix operations }
module Infix : BatNumber.Infix with type bat__infix_t = t
module Compare : BatNumber.Compare with type bat__compare_t = t
* { 7 Printing }
val print: 'a BatInnerIO.output -> int -> unit
val print_hex: 'a BatInnerIO.output -> int -> unit
: ' a BatInnerIO.output - > t - > unit
( * * prints as binary string
val compare: t -> t -> int
val equal : t -> t -> bool
val ord : t -> t -> BatOrd.order
*
Safe operations on integers .
This module provides operations on the type [ int ] of
integers . Values of this type may be either 31 bits on 32 - bit
processors or 63 bits on 64 - bit processors . Operations which
overflow raise exception { ! Number . Overflow } .
This module implements { ! Number . Numeric } ,
{ ! Number . Bounded } , { ! Number . Discrete } .
{ b Important note } Untested .
Safe operations on integers.
This module provides operations on the type [int] of
integers. Values of this type may be either 31 bits on 32-bit
processors or 63 bits on 64-bit processors. Operations which
overflow raise exception {!Number.Overflow}.
This module implements {!Number.Numeric},
{!Number.Bounded}, {!Number.Discrete}.
{b Important note} Untested.
*)
module Safe_int : sig
type t = int
val zero : t
val one : t
* The integer [ 1 ] .
val minus_one : t
val neg : t -> t
val add : t -> t -> t
val ( + ) : t -> t -> t
val sub : t -> t -> t
val ( - ) : t -> t -> t
val mul : t -> t -> t
val ( * ) : t -> t -> t
external div : t -> t -> t = "%divint"
* Integer division .
This division rounds the real quotient of
its arguments towards zero , as specified for { ! Pervasives.(/ ) } .
@raise Division_by_zero if the second argument is zero .
This division rounds the real quotient of
its arguments towards zero, as specified for {!Pervasives.(/)}.
@raise Division_by_zero if the second argument is zero. *)
external ( / ) : t -> t -> t = "%divint"
* Integer division . This division rounds the real quotient of
its arguments towards zero , as specified for { ! Pervasives.(/ ) } .
@raise Division_by_zero if the second argument is zero .
its arguments towards zero, as specified for {!Pervasives.(/)}.
@raise Division_by_zero if the second argument is zero. *)
external rem : t -> t -> t = "%modint"
* Integer remainder . If [ y ] is not zero , the result
of [ Int.rem x y ] satisfies the following property :
[ x = ( Int.mul ( Int.div x y ) y ) ( Int.rem x y ) ] .
@raise Division_by_zero if the second argument is zero .
of [Int.rem x y] satisfies the following property:
[x = Int.add (Int.mul (Int.div x y) y) (Int.rem x y)].
@raise Division_by_zero if the second argument is zero. *)
external modulo : t -> t -> t = "%modint"
val pow : t -> t -> t
val ( ** ) : t -> t -> t
val ( <> ) : t -> t -> bool
* : [ a < > b ] is true if and only if [ a ] and [ b ] have
different values .
different values. *)
val ( > ) : t -> t -> bool
* : [ a > b ] is true if and only if [ a ] is strictly greater than [ b ] .
val ( < ) : t -> t -> bool
* : [ a < b ] is true if and only if [ a ] is strictly smaller than [ b ] .
val ( >= ) : t -> t -> bool
* : [ a > = b ] is true if and only if [ a ] is greater or equal to [ b ] .
val ( <= ) : t -> t -> bool
* : [ a < = b ] is true if and only if [ a ] is smaller or equalto [ b ] .
val ( = ) : t -> t -> bool
* : [ a = b ] if and only if [ a ] and [ b ] have the same value .
val max_num : t
* The greatest representable integer , which is either 2{^30}-1 or 2{^62}-1 .
val min_num : t
* The smallest representable integer , -2{^30 } or 2{^62 } .
val succ: t -> t
val pred: t -> t
val abs : t -> t
external of_float : float -> t = "%intoffloat"
external to_float : t -> float = "%floatofint"
val of_string : string -> t
* Convert the given string to an integer
The string is read in decimal ( by default ) or in hexadecimal ,
octal or binary if the string begins with [ 0x ] , [ 0o ] or [ 0b ]
respectively .
@raise Invalid_argument if the given string is not
a valid representation of an integer , or if the integer represented
exceeds the range of integers representable in type [ int ] .
The string is read in decimal (by default) or in hexadecimal,
octal or binary if the string begins with [0x], [0o] or [0b]
respectively.
@raise Invalid_argument if the given string is not
a valid representation of an integer, or if the integer represented
exceeds the range of integers representable in type [int]. *)
val to_string : t -> string
val operations : t BatNumber.numeric
external of_int : int -> t = "%identity"
external to_int : t -> int = "%identity"
* { 6 Submodules regrouping all infix operations on safe integers }
module Infix : BatNumber.Infix with type bat__infix_t = t
module Compare : BatNumber.Compare with type bat__compare_t = t
val print: 'a BatInnerIO.output -> t -> unit
val compare : t -> t -> int
val equal : t -> t -> bool
val ord : t -> t -> BatOrd.order
end
|
7cde7ceee91c4d4a51d545d8c6e3d3af7875770b7d8b299ce8c43aad4ac4bb57 | MaartenFaddegon/Hoed | Interpreter.hs | module Interpreter(obey) where
import Syntax
import Behaviour
import Value
type Env = [(Name,Value)]
obey :: Command -> Trace Value
obey p = fst (run p [])
look :: Name -> Env -> Value
look x s = maybe Wrong id (lookup x s)
update :: [Name] -> [Value] -> Env -> Env
update xs vs s = zip xs vs ++ filter (\(y,_)->y `notElem` xs) s
run :: Command -> Env -> (Trace Value, Env)
run Skip s = (End, s)
run (xs := es) s = (End, update xs (map (`eval` s) es) s)
run (p :-> q) s = let (outp, sp) = run p s
(outq, sq) = run q sp
in (outp +++ outq, sq)
run (If e p q) s = case eval e s of
Log True -> run p s
Log False -> run q s
_ -> (Crash, s)
run (While e p) s = case eval e s of
Log True -> let (outp,sp) = run p s
(outw,sw) = run (While e p) sp
in (outp +++ Step outw, sw)
Log False -> (End, s)
_ -> (Crash, s)
run (Print e) s = (eval e s :> End, s)
eval :: Expr -> Env -> Value
eval (Var x) s = look x s
eval (Val v) s = v
eval (Uno op a) s = uno op (eval a s)
eval (Duo op a b) s = duo op (eval a s) (eval b s)
| null | https://raw.githubusercontent.com/MaartenFaddegon/Hoed/8769d69e309928aab439b22bc3f3dbf5452acc77/examples/afp02Exercises/Compiler/parassign/Interpreter.hs | haskell | module Interpreter(obey) where
import Syntax
import Behaviour
import Value
type Env = [(Name,Value)]
obey :: Command -> Trace Value
obey p = fst (run p [])
look :: Name -> Env -> Value
look x s = maybe Wrong id (lookup x s)
update :: [Name] -> [Value] -> Env -> Env
update xs vs s = zip xs vs ++ filter (\(y,_)->y `notElem` xs) s
run :: Command -> Env -> (Trace Value, Env)
run Skip s = (End, s)
run (xs := es) s = (End, update xs (map (`eval` s) es) s)
run (p :-> q) s = let (outp, sp) = run p s
(outq, sq) = run q sp
in (outp +++ outq, sq)
run (If e p q) s = case eval e s of
Log True -> run p s
Log False -> run q s
_ -> (Crash, s)
run (While e p) s = case eval e s of
Log True -> let (outp,sp) = run p s
(outw,sw) = run (While e p) sp
in (outp +++ Step outw, sw)
Log False -> (End, s)
_ -> (Crash, s)
run (Print e) s = (eval e s :> End, s)
eval :: Expr -> Env -> Value
eval (Var x) s = look x s
eval (Val v) s = v
eval (Uno op a) s = uno op (eval a s)
eval (Duo op a b) s = duo op (eval a s) (eval b s)
| |
e19fb96e96d6e9ed9ca70e413e4f71e9c7e51bce40e269157e3f0c3acb140e31 | bartavelle/language-puppet | SizeSpec.hs | # LANGUAGE OverloadedLists #
module Interpreter.Function.SizeSpec (spec, main) where
import Helpers
main :: IO ()
main = hspec spec
evalArgs :: InterpreterMonad PValue -> Either PrettyError Scientific
evalArgs = dummyEval
>=> \pv -> case pv of
PNumber s -> return s
_ -> Left ("Expected a string, not " <> PrettyError (pretty pv))
spec :: Spec
spec = withStdlibFunction "size" $ \sizeFunc -> do
let evalArgs' = evalArgs . sizeFunc
let check args res = case evalArgs' args of
Left rr -> expectationFailure (show rr)
Right res' -> res' `shouldBe` res
checkError args ins = case evalArgs' args of
Left rr -> show rr `shouldContain` ins
Right r -> expectationFailure ("Should have errored, received this instead: " <> show r)
it "should error with no arguments" (checkError [] "a single argument")
it "should error with numerical arguments" (checkError [PNumber 1] "size(): Expects ")
it "should error with boolean arguments" (checkError [PBoolean True] "size(): Expects ")
-- Not conformant:
-- it "should error with numerical arguments" (checkError ["1"] "size(): Expects ")
it "should handle arrays" $ do
check [PArray []] 0
check [PArray ["a"]] 1
check [PArray ["one","two","three"]] 3
check [PArray ["one","two","three","four"]] 4
it "should handle hashes" $ do
check [PHash []] 0
check [PHash [("1","2")]] 1
check [PHash [("1","2"),("3","4")]] 2
it "should handle strings" $ do
check [""] 0
check ["a"] 1
check ["ab"] 2
check ["abcd"] 4
| null | https://raw.githubusercontent.com/bartavelle/language-puppet/6af7458e094440816c8b9b7b387050612e87a70f/tests/Interpreter/Function/SizeSpec.hs | haskell | Not conformant:
it "should error with numerical arguments" (checkError ["1"] "size(): Expects ") | # LANGUAGE OverloadedLists #
module Interpreter.Function.SizeSpec (spec, main) where
import Helpers
main :: IO ()
main = hspec spec
evalArgs :: InterpreterMonad PValue -> Either PrettyError Scientific
evalArgs = dummyEval
>=> \pv -> case pv of
PNumber s -> return s
_ -> Left ("Expected a string, not " <> PrettyError (pretty pv))
spec :: Spec
spec = withStdlibFunction "size" $ \sizeFunc -> do
let evalArgs' = evalArgs . sizeFunc
let check args res = case evalArgs' args of
Left rr -> expectationFailure (show rr)
Right res' -> res' `shouldBe` res
checkError args ins = case evalArgs' args of
Left rr -> show rr `shouldContain` ins
Right r -> expectationFailure ("Should have errored, received this instead: " <> show r)
it "should error with no arguments" (checkError [] "a single argument")
it "should error with numerical arguments" (checkError [PNumber 1] "size(): Expects ")
it "should error with boolean arguments" (checkError [PBoolean True] "size(): Expects ")
it "should handle arrays" $ do
check [PArray []] 0
check [PArray ["a"]] 1
check [PArray ["one","two","three"]] 3
check [PArray ["one","two","three","four"]] 4
it "should handle hashes" $ do
check [PHash []] 0
check [PHash [("1","2")]] 1
check [PHash [("1","2"),("3","4")]] 2
it "should handle strings" $ do
check [""] 0
check ["a"] 1
check ["ab"] 2
check ["abcd"] 4
|
890be39aa0df159b4869df40618ed7b145342d551ddd644690c32a20d930fb9e | archaelus/erms | erms_inject_mo.erl | %%%-------------------------------------------------------------------
@copyright Catalyst IT Ltd ( )
%%%
@author nt < >
%% @version {@vsn}, {@date} {@time}
@doc YAWS appmod for MO submission
%% @end
%%%-------------------------------------------------------------------
-module(erms_inject_mo).
-include_lib("logging.hrl").
-include_lib("mnesia_model.hrl").
-include_lib("yaws_api.hrl").
-import(yaws_api, [queryvar/2]).
%% API
-export([out/1]).
%%====================================================================
%% API
%%====================================================================
%%--------------------------------------------------------------------
out(Arg) when is_record(Arg, arg) ->
case (Arg#arg.req)#http_request.method of
'HEAD' -> [{status, 200},{ehtml,[]}];
_ -> out(Arg, find_login(Arg))
end.
out(_Arg, bad_login) ->
[{status, 403},
{ehtml,
{html, [],
[{head, [],
[{title, [], "Inject MO"}]},
{body, [],
{pre, [],
"Bad Login."}}]}}];
out(Arg, {auth, Shortcode, Connection}) ->
{Status, _Message} = find_mo(Shortcode, Connection, Arg),
[{status, Status},
{ehtml, []}].
%%====================================================================
Internal functions
%%====================================================================
find_mo(S,C,A) ->
case queryvar(A, "message") of
{ok, Value} ->
find_mo(S,C,
queryvar(A, "from"),
queryvar(A, "to"),
{ok, Value});
undefined ->
find_mo(S,C,
queryvar(A, "from"),
queryvar(A, "to"),
queryvar(A, "text"))
end.
find_mo(Shortcode,Connection,
{ok, From}, {ok, To}, {ok, Text}) ->
Msg = erms_msg:msg(From, To, Text),
Id = erms_msg_queue:queue(Connection, Shortcode, Msg, mo),
{200, erms_uuid:to_list(Id)};
find_mo(_,_,undefined,_,_) ->
{400, "Missing from number"};
find_mo(_,_,_,undefined,_) ->
{400, "Missing to number"};
find_mo(_,_,_,_,undefined) ->
{400, "Missing message body"}.
find_login(Arg) ->
User = queryvar(Arg, "user"),
Pass = queryvar(Arg, "pass"),
case gregexp:groups(Arg#arg.appmod_prepath, "/\\(.*\\)/") of
{match, [Login]} when User /= undefined, Pass /= undefined ->
lookup_login(Login, User, Pass);
{match, [Login]} ->
case string:tokens(Arg#arg.pathinfo,"/") of
[PathUser,PathPass] ->
lookup_login(Login, {ok, PathUser}, {ok, PathPass});
_Else ->
bad_login
end;
_ ->
bad_login
end.
lookup_login(_Login, {ok, User}, {ok, Pass}) ->
erms_auth:authorized(yaws,User,Pass);
lookup_login(_, _, _) ->
bad_login.
| null | https://raw.githubusercontent.com/archaelus/erms/5dbe5e79516a16e461e7a2a345dd80fbf92ef6fa/src/erms_inject_mo.erl | erlang | -------------------------------------------------------------------
@version {@vsn}, {@date} {@time}
@end
-------------------------------------------------------------------
API
====================================================================
API
====================================================================
--------------------------------------------------------------------
====================================================================
==================================================================== | @copyright Catalyst IT Ltd ( )
@author nt < >
@doc YAWS appmod for MO submission
-module(erms_inject_mo).
-include_lib("logging.hrl").
-include_lib("mnesia_model.hrl").
-include_lib("yaws_api.hrl").
-import(yaws_api, [queryvar/2]).
-export([out/1]).
out(Arg) when is_record(Arg, arg) ->
case (Arg#arg.req)#http_request.method of
'HEAD' -> [{status, 200},{ehtml,[]}];
_ -> out(Arg, find_login(Arg))
end.
out(_Arg, bad_login) ->
[{status, 403},
{ehtml,
{html, [],
[{head, [],
[{title, [], "Inject MO"}]},
{body, [],
{pre, [],
"Bad Login."}}]}}];
out(Arg, {auth, Shortcode, Connection}) ->
{Status, _Message} = find_mo(Shortcode, Connection, Arg),
[{status, Status},
{ehtml, []}].
Internal functions
find_mo(S,C,A) ->
case queryvar(A, "message") of
{ok, Value} ->
find_mo(S,C,
queryvar(A, "from"),
queryvar(A, "to"),
{ok, Value});
undefined ->
find_mo(S,C,
queryvar(A, "from"),
queryvar(A, "to"),
queryvar(A, "text"))
end.
find_mo(Shortcode,Connection,
{ok, From}, {ok, To}, {ok, Text}) ->
Msg = erms_msg:msg(From, To, Text),
Id = erms_msg_queue:queue(Connection, Shortcode, Msg, mo),
{200, erms_uuid:to_list(Id)};
find_mo(_,_,undefined,_,_) ->
{400, "Missing from number"};
find_mo(_,_,_,undefined,_) ->
{400, "Missing to number"};
find_mo(_,_,_,_,undefined) ->
{400, "Missing message body"}.
find_login(Arg) ->
User = queryvar(Arg, "user"),
Pass = queryvar(Arg, "pass"),
case gregexp:groups(Arg#arg.appmod_prepath, "/\\(.*\\)/") of
{match, [Login]} when User /= undefined, Pass /= undefined ->
lookup_login(Login, User, Pass);
{match, [Login]} ->
case string:tokens(Arg#arg.pathinfo,"/") of
[PathUser,PathPass] ->
lookup_login(Login, {ok, PathUser}, {ok, PathPass});
_Else ->
bad_login
end;
_ ->
bad_login
end.
lookup_login(_Login, {ok, User}, {ok, Pass}) ->
erms_auth:authorized(yaws,User,Pass);
lookup_login(_, _, _) ->
bad_login.
|
462f0802d61bf76b305c68984d2fddebf05b5f762b34078d181b860ba686ed5d | ghc/testsuite | TcCoercibleFailSafe.hs | # LANGUAGE RoleAnnotations , RankNTypes , ScopedTypeVariables , Safe #
import GHC.Prim (coerce, Coercible)
import Data.Ord (Down)
newtype Age = Age Int deriving Show
foo1 :: (Down Age -> Down Int)
foo1 = coerce
main = return ()
| null | https://raw.githubusercontent.com/ghc/testsuite/998a816ae89c4fd573f4abd7c6abb346cf7ee9af/tests/typecheck/should_fail/TcCoercibleFailSafe.hs | haskell | # LANGUAGE RoleAnnotations , RankNTypes , ScopedTypeVariables , Safe #
import GHC.Prim (coerce, Coercible)
import Data.Ord (Down)
newtype Age = Age Int deriving Show
foo1 :: (Down Age -> Down Int)
foo1 = coerce
main = return ()
| |
2128dfbfb9ebe6e618ec9b70d8fb74cdf8107a8c870cfcf27ec5114c4a39466e | ekmett/sparse | AppleBlas.hs |
module AppleBlas(blasMMult) where
import Foreign hiding (unsafePerformIO)
import Foreign.C.Types
import Unsafe.Coerce
import Prelude hiding (replicate)
import Data . Storable
import System.IO.Unsafe
import Data.Vector.Storable.Mutable
import GHC.Ptr (castPtr)
import Numerics.Simple.Util
foreign import ccall unsafe "cblas.c simple_dgemm"
dgemm :: Ptr CDouble -> Ptr CDouble -> Ptr CDouble -> CInt -> IO ()
saphWrapper :: (Ptr CDouble -> Ptr CDouble -> Ptr CDouble -> CInt -> IO ())-> ( Ptr Double -> Ptr Double -> Ptr Double -> Int -> IO ())
saphWrapper f = (\c a b n -> f (castPtr c ) (castPtr a) (castPtr c ) (CInt $ fromIntegral n ))
this is always safe / ok because CDouble is a newtyped Double , and CInt is a newtyped Int
dgemm_wrapped :: Ptr Double -> Ptr Double -> Ptr Double -> Int -> IO ()
dgemm_wrapped = saphWrapper dgemm
blasMMult :: IOVector Double -> IOVector Double -> IOVector Double -> Int -> IO ()
blasMMult aVect bVect cVect n =
unsafeWith aVect $! \aPtr ->
unsafeWith bVect $! \bPtr ->
unsafeWith cVect $! \cPtr ->
dgemm_wrapped aPtr bPtr cPtr n | null | https://raw.githubusercontent.com/ekmett/sparse/8de744d4c1f3520b84b0e841f7b24178f9a1ea2f/benchmarks/AppleBlas.hs | haskell |
module AppleBlas(blasMMult) where
import Foreign hiding (unsafePerformIO)
import Foreign.C.Types
import Unsafe.Coerce
import Prelude hiding (replicate)
import Data . Storable
import System.IO.Unsafe
import Data.Vector.Storable.Mutable
import GHC.Ptr (castPtr)
import Numerics.Simple.Util
foreign import ccall unsafe "cblas.c simple_dgemm"
dgemm :: Ptr CDouble -> Ptr CDouble -> Ptr CDouble -> CInt -> IO ()
saphWrapper :: (Ptr CDouble -> Ptr CDouble -> Ptr CDouble -> CInt -> IO ())-> ( Ptr Double -> Ptr Double -> Ptr Double -> Int -> IO ())
saphWrapper f = (\c a b n -> f (castPtr c ) (castPtr a) (castPtr c ) (CInt $ fromIntegral n ))
this is always safe / ok because CDouble is a newtyped Double , and CInt is a newtyped Int
dgemm_wrapped :: Ptr Double -> Ptr Double -> Ptr Double -> Int -> IO ()
dgemm_wrapped = saphWrapper dgemm
blasMMult :: IOVector Double -> IOVector Double -> IOVector Double -> Int -> IO ()
blasMMult aVect bVect cVect n =
unsafeWith aVect $! \aPtr ->
unsafeWith bVect $! \bPtr ->
unsafeWith cVect $! \cPtr ->
dgemm_wrapped aPtr bPtr cPtr n | |
97c4d033eaaca2f79c6ed921530569a6c2d1f9484d4588c48f2b3cb9ce691034 | robert-strandh/Cluffer | edit-protocol-implementation.lisp | (cl:in-package #:cluffer-standard-line)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
Method on CURSOR - ATTACHED - P.
(defmethod cluffer:cursor-attached-p ((cursor cursor))
(not (null (line cursor))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Methods on ITEM-COUNT.
(defmethod cluffer:item-count ((line open-line))
(- (length (contents line)) (- (gap-end line) (gap-start line))))
(defmethod cluffer:item-count ((line closed-line))
(length (contents line)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Methods on ITEMS.
When the items of an open line are asked for , we first close the
;;; line. While this way of doing it might seem wasteful, it probably
;;; is not that bad. When the items are asked for, the reason is
;;; probably that those items are going to be displayed or used to
;;; drive a parser, or something else that will imply some significant
;;; work for each item. So even if the line is repeatedly opened (to
;;; edit) and closed (to display), it probably does not matter much.
;;; A slight improvement could be to leave the line open and return a
;;; freshly allocated vector with the items in it.
(defmethod cluffer:items ((line open-line) &key (start 0) (end nil))
(close-line line)
(cluffer:items line :start start :end end))
;;; When all the items are asked for, we do not allocate a fresh
;;; vector. This means that client code is not allowed to mutate the
;;; return value of this function
(defmethod cluffer:items ((line closed-line) &key (start 0) (end nil))
(if (and (= start 0) (null end))
(contents line)
(subseq (contents line) start end)))
(defun close-line (line)
(let* ((item-count (cluffer:item-count line))
(contents (contents line))
(new-contents (make-array item-count)))
(replace new-contents contents
:start1 0 :start2 0 :end2 (gap-start line))
(replace new-contents contents
:start1 (gap-start line) :start2 (gap-end line))
(change-class line 'closed-line
:contents new-contents)
nil))
(defun open-line (line)
(let* ((contents (contents line))
(item-count (length contents))
(new-length (max 32 item-count))
(new-contents (make-array new-length)))
(replace new-contents contents
:start1 (- new-length item-count) :start2 0)
(change-class line 'open-line
:contents new-contents
:gap-start 0
:gap-end (- new-length item-count))
nil))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Detaching and attaching a cursor.
(defmethod cluffer:attach-cursor
((cursor cursor)
(line line)
&optional
(position 0))
(push cursor (cursors line))
(setf (line cursor) line)
(setf (cluffer:cursor-position cursor) position)
nil)
(defmethod cluffer:detach-cursor ((cursor cursor))
(setf (cursors (line cursor))
(remove cursor (cursors (line cursor))))
(setf (line cursor) nil))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Methods on INSERT-ITEM-AT-POSITION.
Helper function to capture commonalities between the two methods .
;;; LINE is always an open line.
(defun insert-item-at-position (line item position)
(let ((contents (contents line)))
(cond ((= (gap-start line) (gap-end line))
(let* ((new-length (* 2 (length contents)))
(diff (- new-length (length contents)))
(new-contents (make-array new-length)))
(replace new-contents contents
:start2 0 :start1 0 :end2 position)
(replace new-contents contents
:start2 position :start1 (+ position diff))
(setf (gap-start line) position)
(setf (gap-end line) (+ position diff))
(setf (contents line) new-contents)))
((< position (gap-start line))
(decf (gap-end line) (- (gap-start line) position))
(replace contents contents
:start2 position :end2 (gap-start line)
:start1 (gap-end line))
(setf (gap-start line) position))
((> position (gap-start line))
(replace contents contents
:start2 (gap-end line)
:start1 (gap-start line) :end1 position)
(incf (gap-end line) (- position (gap-start line)))
(setf (gap-start line) position))
(t
nil))
(setf (aref (contents line) (gap-start line)) item)
(incf (gap-start line))
(loop for cursor in (cursors line)
do (when (or (> (cluffer:cursor-position cursor) position)
(and (= (cluffer:cursor-position cursor) position)
(typep cursor 'right-sticky-cursor)))
(incf (cluffer:cursor-position cursor)))))
nil)
(defmethod cluffer:insert-item-at-position ((line closed-line) item position)
(open-line line)
(insert-item-at-position line item position))
(defmethod cluffer:insert-item-at-position ((line open-line) item position)
(insert-item-at-position line item position))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Methods on DELETE-ITEM-AT-POSITION.
Helper function to capture commonalities between the two methods .
;;; LINE is always an open line.
(defun delete-item-at-position (line position)
(let ((contents (contents line)))
(cond ((< position (gap-start line))
(decf (gap-end line) (- (gap-start line) position))
(replace contents contents
:start2 position :end2 (gap-start line)
:start1 (gap-end line))
(setf (gap-start line) position))
((> position (gap-start line))
(replace contents contents
:start2 (gap-end line)
:start1 (gap-start line) :end1 position)
(incf (gap-end line) (- position (gap-start line)))
(setf (gap-start line) position))
(t
nil))
for the GC
(incf (gap-end line))
(when (and (> (length contents) 32)
(> (- (gap-end line) (gap-start line))
(* 3/4 (length contents))))
(let* ((new-length (floor (length contents) 2))
(diff (- (length contents) new-length))
(new-contents (make-array new-length)))
(replace new-contents contents
:start2 0 :start1 0 :end2 (gap-start line))
(replace new-contents contents
:start2 (gap-end line) :start1 (- (gap-end line) diff))
(decf (gap-end line) diff)
(setf (contents line) new-contents)))
(loop for cursor in (cursors line)
do (when (> (cluffer:cursor-position cursor) position)
(decf (cluffer:cursor-position cursor)))))
nil)
(defmethod cluffer:delete-item-at-position ((line closed-line) position)
(open-line line)
(delete-item-at-position line position))
(defmethod cluffer:delete-item-at-position ((line open-line) position)
(delete-item-at-position line position))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Method on ITEM-AT-POSITION.
;;; No need to open the line.
(defmethod cluffer:item-at-position ((line closed-line) position)
(aref (contents line) position))
(defmethod cluffer:item-at-position ((line open-line) position)
(aref (contents line)
(if (< position (gap-start line))
position
(+ position (- (gap-end line) (gap-start line))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Methods on CLUFFER-INTERNAL:LINE-SPLIT-LINE.
(defmethod cluffer-internal:line-split-line ((line open-line) position)
(close-line line)
(cluffer-internal:line-split-line line position))
(defmethod cluffer-internal:line-split-line ((line closed-line) position)
(let* ((contents (contents line))
(new-contents (subseq contents position))
(new-line (make-instance 'closed-line
:cursors '()
:contents new-contents)))
(setf (contents line)
(subseq contents 0 position))
(setf (cursors new-line)
(loop for cursor in (cursors line)
when (or (and (typep cursor 'right-sticky-cursor)
(>= (cluffer:cursor-position cursor) position))
(and (typep cursor 'left-sticky-cursor)
(> (cluffer:cursor-position cursor) position)))
collect cursor))
(loop for cursor in (cursors new-line)
do (setf (line cursor) new-line)
(decf (cluffer:cursor-position cursor) position))
(setf (cursors line)
(set-difference (cursors line) (cursors new-line)))
new-line))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Methods on CLUFFER-INTERNAL:LINE-JOIN-LINE.
(defmethod cluffer-internal:line-join-line ((line1 open-line) line2)
(close-line line1)
(cluffer-internal:line-join-line line1 line2))
(defmethod cluffer-internal:line-join-line (line1 (line2 open-line))
(close-line line2)
(cluffer-internal:line-join-line line1 line2))
(defmethod cluffer-internal:line-join-line
((line1 closed-line) (line2 closed-line))
(loop with length = (length (contents line1))
initially
(setf (contents line1)
(concatenate 'vector (contents line1) (contents line2)))
for cursor in (cursors line2)
do (setf (line cursor) line1)
(incf (cluffer:cursor-position cursor) length)
(push cursor (cursors line1)))
nil)
| null | https://raw.githubusercontent.com/robert-strandh/Cluffer/5b612e66cd85b69de4feeae2fffea00c7e184e31/Standard-line/edit-protocol-implementation.lisp | lisp |
Methods on ITEM-COUNT.
Methods on ITEMS.
line. While this way of doing it might seem wasteful, it probably
is not that bad. When the items are asked for, the reason is
probably that those items are going to be displayed or used to
drive a parser, or something else that will imply some significant
work for each item. So even if the line is repeatedly opened (to
edit) and closed (to display), it probably does not matter much.
A slight improvement could be to leave the line open and return a
freshly allocated vector with the items in it.
When all the items are asked for, we do not allocate a fresh
vector. This means that client code is not allowed to mutate the
return value of this function
Detaching and attaching a cursor.
Methods on INSERT-ITEM-AT-POSITION.
LINE is always an open line.
Methods on DELETE-ITEM-AT-POSITION.
LINE is always an open line.
Method on ITEM-AT-POSITION.
No need to open the line.
Methods on CLUFFER-INTERNAL:LINE-SPLIT-LINE.
Methods on CLUFFER-INTERNAL:LINE-JOIN-LINE. | (cl:in-package #:cluffer-standard-line)
Method on CURSOR - ATTACHED - P.
(defmethod cluffer:cursor-attached-p ((cursor cursor))
(not (null (line cursor))))
(defmethod cluffer:item-count ((line open-line))
(- (length (contents line)) (- (gap-end line) (gap-start line))))
(defmethod cluffer:item-count ((line closed-line))
(length (contents line)))
When the items of an open line are asked for , we first close the
(defmethod cluffer:items ((line open-line) &key (start 0) (end nil))
(close-line line)
(cluffer:items line :start start :end end))
(defmethod cluffer:items ((line closed-line) &key (start 0) (end nil))
(if (and (= start 0) (null end))
(contents line)
(subseq (contents line) start end)))
(defun close-line (line)
(let* ((item-count (cluffer:item-count line))
(contents (contents line))
(new-contents (make-array item-count)))
(replace new-contents contents
:start1 0 :start2 0 :end2 (gap-start line))
(replace new-contents contents
:start1 (gap-start line) :start2 (gap-end line))
(change-class line 'closed-line
:contents new-contents)
nil))
(defun open-line (line)
(let* ((contents (contents line))
(item-count (length contents))
(new-length (max 32 item-count))
(new-contents (make-array new-length)))
(replace new-contents contents
:start1 (- new-length item-count) :start2 0)
(change-class line 'open-line
:contents new-contents
:gap-start 0
:gap-end (- new-length item-count))
nil))
(defmethod cluffer:attach-cursor
((cursor cursor)
(line line)
&optional
(position 0))
(push cursor (cursors line))
(setf (line cursor) line)
(setf (cluffer:cursor-position cursor) position)
nil)
(defmethod cluffer:detach-cursor ((cursor cursor))
(setf (cursors (line cursor))
(remove cursor (cursors (line cursor))))
(setf (line cursor) nil))
Helper function to capture commonalities between the two methods .
(defun insert-item-at-position (line item position)
(let ((contents (contents line)))
(cond ((= (gap-start line) (gap-end line))
(let* ((new-length (* 2 (length contents)))
(diff (- new-length (length contents)))
(new-contents (make-array new-length)))
(replace new-contents contents
:start2 0 :start1 0 :end2 position)
(replace new-contents contents
:start2 position :start1 (+ position diff))
(setf (gap-start line) position)
(setf (gap-end line) (+ position diff))
(setf (contents line) new-contents)))
((< position (gap-start line))
(decf (gap-end line) (- (gap-start line) position))
(replace contents contents
:start2 position :end2 (gap-start line)
:start1 (gap-end line))
(setf (gap-start line) position))
((> position (gap-start line))
(replace contents contents
:start2 (gap-end line)
:start1 (gap-start line) :end1 position)
(incf (gap-end line) (- position (gap-start line)))
(setf (gap-start line) position))
(t
nil))
(setf (aref (contents line) (gap-start line)) item)
(incf (gap-start line))
(loop for cursor in (cursors line)
do (when (or (> (cluffer:cursor-position cursor) position)
(and (= (cluffer:cursor-position cursor) position)
(typep cursor 'right-sticky-cursor)))
(incf (cluffer:cursor-position cursor)))))
nil)
(defmethod cluffer:insert-item-at-position ((line closed-line) item position)
(open-line line)
(insert-item-at-position line item position))
(defmethod cluffer:insert-item-at-position ((line open-line) item position)
(insert-item-at-position line item position))
Helper function to capture commonalities between the two methods .
(defun delete-item-at-position (line position)
(let ((contents (contents line)))
(cond ((< position (gap-start line))
(decf (gap-end line) (- (gap-start line) position))
(replace contents contents
:start2 position :end2 (gap-start line)
:start1 (gap-end line))
(setf (gap-start line) position))
((> position (gap-start line))
(replace contents contents
:start2 (gap-end line)
:start1 (gap-start line) :end1 position)
(incf (gap-end line) (- position (gap-start line)))
(setf (gap-start line) position))
(t
nil))
for the GC
(incf (gap-end line))
(when (and (> (length contents) 32)
(> (- (gap-end line) (gap-start line))
(* 3/4 (length contents))))
(let* ((new-length (floor (length contents) 2))
(diff (- (length contents) new-length))
(new-contents (make-array new-length)))
(replace new-contents contents
:start2 0 :start1 0 :end2 (gap-start line))
(replace new-contents contents
:start2 (gap-end line) :start1 (- (gap-end line) diff))
(decf (gap-end line) diff)
(setf (contents line) new-contents)))
(loop for cursor in (cursors line)
do (when (> (cluffer:cursor-position cursor) position)
(decf (cluffer:cursor-position cursor)))))
nil)
(defmethod cluffer:delete-item-at-position ((line closed-line) position)
(open-line line)
(delete-item-at-position line position))
(defmethod cluffer:delete-item-at-position ((line open-line) position)
(delete-item-at-position line position))
(defmethod cluffer:item-at-position ((line closed-line) position)
(aref (contents line) position))
(defmethod cluffer:item-at-position ((line open-line) position)
(aref (contents line)
(if (< position (gap-start line))
position
(+ position (- (gap-end line) (gap-start line))))))
(defmethod cluffer-internal:line-split-line ((line open-line) position)
(close-line line)
(cluffer-internal:line-split-line line position))
(defmethod cluffer-internal:line-split-line ((line closed-line) position)
(let* ((contents (contents line))
(new-contents (subseq contents position))
(new-line (make-instance 'closed-line
:cursors '()
:contents new-contents)))
(setf (contents line)
(subseq contents 0 position))
(setf (cursors new-line)
(loop for cursor in (cursors line)
when (or (and (typep cursor 'right-sticky-cursor)
(>= (cluffer:cursor-position cursor) position))
(and (typep cursor 'left-sticky-cursor)
(> (cluffer:cursor-position cursor) position)))
collect cursor))
(loop for cursor in (cursors new-line)
do (setf (line cursor) new-line)
(decf (cluffer:cursor-position cursor) position))
(setf (cursors line)
(set-difference (cursors line) (cursors new-line)))
new-line))
(defmethod cluffer-internal:line-join-line ((line1 open-line) line2)
(close-line line1)
(cluffer-internal:line-join-line line1 line2))
(defmethod cluffer-internal:line-join-line (line1 (line2 open-line))
(close-line line2)
(cluffer-internal:line-join-line line1 line2))
(defmethod cluffer-internal:line-join-line
((line1 closed-line) (line2 closed-line))
(loop with length = (length (contents line1))
initially
(setf (contents line1)
(concatenate 'vector (contents line1) (contents line2)))
for cursor in (cursors line2)
do (setf (line cursor) line1)
(incf (cluffer:cursor-position cursor) length)
(push cursor (cursors line1)))
nil)
|
64ee058d9780f164aea86ade340da5ce9264ab27f65364051470777eb3cfdf16 | startalkIM/ejabberd | mod_http_upload_quota.erl | %%%----------------------------------------------------------------------
%%% File : mod_http_upload_quota.erl
Author : < >
%%% Purpose : Quota management for HTTP File Upload (XEP-0363)
Created : 15 Oct 2015 by < >
%%%
%%%
ejabberd , Copyright ( C ) 2015 - 2016 ProcessOne
%%%
%%% This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
%%% License, or (at your option) any later version.
%%%
%%% This program is distributed in the hope that it will be useful,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
%%% General Public License for more details.
%%%
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
%%%
%%%----------------------------------------------------------------------
-module(mod_http_upload_quota).
-author('').
-define(GEN_SERVER, gen_server).
-define(PROCNAME, ?MODULE).
-define(TIMEOUT, timer:hours(24)).
-define(INITIAL_TIMEOUT, timer:minutes(10)).
-define(FORMAT(Error), file:format_error(Error)).
-behaviour(?GEN_SERVER).
-behaviour(gen_mod).
%% gen_mod/supervisor callbacks.
-export([start_link/3,
start/2,
stop/1,
depends/2,
mod_opt_type/1]).
%% gen_server callbacks.
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
%% ejabberd_hooks callback.
-export([handle_slot_request/5]).
-include("jlib.hrl").
-include("logger.hrl").
-include_lib("kernel/include/file.hrl").
-record(state,
{server_host :: binary(),
access_soft_quota :: atom(),
access_hard_quota :: atom(),
max_days :: pos_integer() | infinity,
docroot :: binary(),
disk_usage = #{} :: map(),
timers :: [timer:tref()]}).
-type state() :: #state{}.
%%--------------------------------------------------------------------
%% gen_mod/supervisor callbacks.
%%--------------------------------------------------------------------
-spec start_link(binary(), atom(), gen_mod:opts())
-> {ok, pid()} | ignore | {error, _}.
start_link(ServerHost, Proc, Opts) ->
?GEN_SERVER:start_link({local, Proc}, ?MODULE, {ServerHost, Opts}, []).
-spec start(binary(), gen_mod:opts()) -> {ok, _} | {ok, _, _} | {error, _}.
start(ServerHost, Opts) ->
Proc = mod_http_upload:get_proc_name(ServerHost, ?PROCNAME),
Spec = {Proc,
{?MODULE, start_link, [ServerHost, Proc, Opts]},
permanent,
3000,
worker,
[?MODULE]},
supervisor:start_child(ejabberd_sup, Spec).
-spec stop(binary()) -> ok.
stop(ServerHost) ->
Proc = mod_http_upload:get_proc_name(ServerHost, ?PROCNAME),
supervisor:terminate_child(ejabberd_sup, Proc),
supervisor:delete_child(ejabberd_sup, Proc).
-spec mod_opt_type(atom()) -> fun((term()) -> term()) | [atom()].
mod_opt_type(access_soft_quota) ->
fun acl:shaper_rules_validator/1;
mod_opt_type(access_hard_quota) ->
fun acl:shaper_rules_validator/1;
mod_opt_type(max_days) ->
fun(I) when is_integer(I), I > 0 -> I;
(infinity) -> infinity
end;
mod_opt_type(_) ->
[access_soft_quota, access_hard_quota, max_days].
-spec depends(binary(), gen_mod:opts()) -> [{module(), hard | soft}].
depends(_Host, _Opts) ->
[{mod_http_upload, hard}].
%%--------------------------------------------------------------------
%% gen_server callbacks.
%%--------------------------------------------------------------------
-spec init({binary(), gen_mod:opts()}) -> {ok, state()}.
init({ServerHost, Opts}) ->
process_flag(trap_exit, true),
AccessSoftQuota = gen_mod:get_opt(access_soft_quota, Opts,
fun acl:shaper_rules_validator/1,
soft_upload_quota),
AccessHardQuota = gen_mod:get_opt(access_hard_quota, Opts,
fun acl:shaper_rules_validator/1,
hard_upload_quota),
MaxDays = gen_mod:get_opt(max_days, Opts,
fun(I) when is_integer(I), I > 0 -> I;
(infinity) -> infinity
end,
infinity),
DocRoot1 = gen_mod:get_module_opt(ServerHost, mod_http_upload, docroot,
fun iolist_to_binary/1,
<<"@HOME@/upload">>),
DocRoot2 = mod_http_upload:expand_home(str:strip(DocRoot1, right, $/)),
DocRoot3 = mod_http_upload:expand_host(DocRoot2, ServerHost),
Timers = if MaxDays == infinity -> [];
true ->
{ok, T1} = timer:send_after(?INITIAL_TIMEOUT, sweep),
{ok, T2} = timer:send_interval(?TIMEOUT, sweep),
[T1, T2]
end,
ejabberd_hooks:add(http_upload_slot_request, ServerHost, ?MODULE,
handle_slot_request, 50),
{ok, #state{server_host = ServerHost,
access_soft_quota = AccessSoftQuota,
access_hard_quota = AccessHardQuota,
max_days = MaxDays,
docroot = DocRoot3,
timers = Timers}}.
-spec handle_call(_, {pid(), _}, state()) -> {noreply, state()}.
handle_call(Request, From, State) ->
?ERROR_MSG("Got unexpected request from ~p: ~p", [From, Request]),
{noreply, State}.
-spec handle_cast(_, state()) -> {noreply, state()}.
handle_cast({handle_slot_request, #jid{user = U, server = S} = JID, Path, Size},
#state{server_host = ServerHost,
access_soft_quota = AccessSoftQuota,
access_hard_quota = AccessHardQuota,
disk_usage = DiskUsage} = State) ->
HardQuota = case acl:match_rule(ServerHost, AccessHardQuota, JID) of
Hard when is_integer(Hard), Hard > 0 ->
Hard * 1024 * 1024;
_ ->
0
end,
SoftQuota = case acl:match_rule(ServerHost, AccessSoftQuota, JID) of
Soft when is_integer(Soft), Soft > 0 ->
Soft * 1024 * 1024;
_ ->
0
end,
OldSize = case maps:find({U, S}, DiskUsage) of
{ok, Value} ->
Value;
error ->
undefined
end,
NewSize = case {HardQuota, SoftQuota} of
{0, 0} ->
?DEBUG("No quota specified for ~s",
[jid:to_string(JID)]),
undefined;
{0, _} ->
?WARNING_MSG("No hard quota specified for ~s",
[jid:to_string(JID)]),
enforce_quota(Path, Size, OldSize, SoftQuota, SoftQuota);
{_, 0} ->
?WARNING_MSG("No soft quota specified for ~s",
[jid:to_string(JID)]),
enforce_quota(Path, Size, OldSize, HardQuota, HardQuota);
_ when SoftQuota > HardQuota ->
?WARNING_MSG("Bad quota for ~s (soft: ~p, hard: ~p)",
[jid:to_string(JID),
SoftQuota, HardQuota]),
enforce_quota(Path, Size, OldSize, SoftQuota, SoftQuota);
_ ->
?DEBUG("Enforcing quota for ~s",
[jid:to_string(JID)]),
enforce_quota(Path, Size, OldSize, SoftQuota, HardQuota)
end,
NewDiskUsage = if is_integer(NewSize) ->
maps:put({U, S}, NewSize, DiskUsage);
true ->
DiskUsage
end,
{noreply, State#state{disk_usage = NewDiskUsage}};
handle_cast(Request, State) ->
?ERROR_MSG("Got unexpected request: ~p", [Request]),
{noreply, State}.
-spec handle_info(_, state()) -> {noreply, state()}.
handle_info(sweep, #state{server_host = ServerHost,
docroot = DocRoot,
max_days = MaxDays} = State)
when is_integer(MaxDays), MaxDays > 0 ->
?DEBUG("Got 'sweep' message for ~s", [ServerHost]),
case file:list_dir(DocRoot) of
{ok, Entries} ->
BackThen = secs_since_epoch() - (MaxDays * 86400),
DocRootS = binary_to_list(DocRoot),
PathNames = lists:map(fun(Entry) ->
DocRootS ++ "/" ++ Entry
end, Entries),
UserDirs = lists:filter(fun filelib:is_dir/1, PathNames),
lists:foreach(fun(UserDir) ->
delete_old_files(UserDir, BackThen)
end, UserDirs);
{error, Error} ->
?ERROR_MSG("Cannot open document root ~s: ~s",
[DocRoot, ?FORMAT(Error)])
end,
{noreply, State};
handle_info(Info, State) ->
?ERROR_MSG("Got unexpected info: ~p", [Info]),
{noreply, State}.
-spec terminate(normal | shutdown | {shutdown, _} | _, state()) -> ok.
terminate(Reason, #state{server_host = ServerHost, timers = Timers}) ->
?DEBUG("Stopping upload quota process for ~s: ~p", [ServerHost, Reason]),
ejabberd_hooks:delete(http_upload_slot_request, ServerHost, ?MODULE,
handle_slot_request, 50),
lists:foreach(fun timer:cancel/1, Timers).
-spec code_change({down, _} | _, state(), _) -> {ok, state()}.
code_change(_OldVsn, #state{server_host = ServerHost} = State, _Extra) ->
?DEBUG("Updating upload quota process for ~s", [ServerHost]),
{ok, State}.
%%--------------------------------------------------------------------
%% ejabberd_hooks callback.
%%--------------------------------------------------------------------
-spec handle_slot_request(term(), jid(), binary(), non_neg_integer(), binary())
-> term().
handle_slot_request(allow, #jid{lserver = ServerHost} = JID, Path, Size,
_Lang) ->
Proc = mod_http_upload:get_proc_name(ServerHost, ?PROCNAME),
?GEN_SERVER:cast(Proc, {handle_slot_request, JID, Path, Size}),
allow;
handle_slot_request(Acc, _JID, _Path, _Size, _Lang) -> Acc.
%%--------------------------------------------------------------------
Internal functions .
%%--------------------------------------------------------------------
-spec enforce_quota(file:filename_all(), non_neg_integer(),
non_neg_integer() | undefined, non_neg_integer(),
non_neg_integer())
-> non_neg_integer().
enforce_quota(_UserDir, SlotSize, OldSize, _MinSize, MaxSize)
when is_integer(OldSize), OldSize + SlotSize =< MaxSize ->
OldSize + SlotSize;
enforce_quota(UserDir, SlotSize, _OldSize, MinSize, MaxSize) ->
Files = lists:sort(fun({_PathA, _SizeA, TimeA}, {_PathB, _SizeB, TimeB}) ->
TimeA > TimeB
end, gather_file_info(UserDir)),
{DelFiles, OldSize, NewSize} =
lists:foldl(fun({_Path, Size, _Time}, {[], AccSize, AccSize})
when AccSize + Size + SlotSize =< MinSize ->
{[], AccSize + Size, AccSize + Size};
({Path, Size, _Time}, {[], AccSize, AccSize}) ->
{[Path], AccSize + Size, AccSize};
({Path, Size, _Time}, {AccFiles, AccSize, NewSize}) ->
{[Path | AccFiles], AccSize + Size, NewSize}
end, {[], 0, 0}, Files),
if OldSize + SlotSize > MaxSize ->
lists:foreach(fun del_file_and_dir/1, DelFiles),
file:del_dir(UserDir), % In case it's empty, now.
NewSize + SlotSize;
true ->
OldSize + SlotSize
end.
-spec delete_old_files(file:filename_all(), integer()) -> ok.
delete_old_files(UserDir, CutOff) ->
FileInfo = gather_file_info(UserDir),
case [Path || {Path, _Size, Time} <- FileInfo, Time < CutOff] of
[] ->
ok;
OldFiles ->
lists:foreach(fun del_file_and_dir/1, OldFiles),
file:del_dir(UserDir) % In case it's empty, now.
end.
-spec gather_file_info(file:filename_all())
-> [{binary(), non_neg_integer(), non_neg_integer()}].
gather_file_info(Dir) when is_binary(Dir) ->
gather_file_info(binary_to_list(Dir));
gather_file_info(Dir) ->
case file:list_dir(Dir) of
{ok, Entries} ->
lists:foldl(fun(Entry, Acc) ->
Path = Dir ++ "/" ++ Entry,
case file:read_file_info(Path,
[{time, posix}]) of
{ok, #file_info{type = directory}} ->
gather_file_info(Path) ++ Acc;
{ok, #file_info{type = regular,
mtime = Time,
size = Size}} ->
[{Path, Size, Time} | Acc];
{ok, _Info} ->
?DEBUG("Won't stat(2) non-regular file ~s",
[Path]),
Acc;
{error, Error} ->
?ERROR_MSG("Cannot stat(2) ~s: ~s",
[Path, ?FORMAT(Error)]),
Acc
end
end, [], Entries);
{error, enoent} ->
?DEBUG("Directory ~s doesn't exist", [Dir]),
[];
{error, Error} ->
?ERROR_MSG("Cannot open directory ~s: ~s", [Dir, ?FORMAT(Error)]),
[]
end.
-spec del_file_and_dir(file:name_all()) -> ok.
del_file_and_dir(File) ->
case file:delete(File) of
ok ->
?INFO_MSG("Removed ~s", [File]),
Dir = filename:dirname(File),
case file:del_dir(Dir) of
ok ->
?DEBUG("Removed ~s", [Dir]);
{error, Error} ->
?DEBUG("Cannot remove ~s: ~s", [Dir, ?FORMAT(Error)])
end;
{error, Error} ->
?WARNING_MSG("Cannot remove ~s: ~s", [File, ?FORMAT(Error)])
end.
-spec secs_since_epoch() -> non_neg_integer().
secs_since_epoch() ->
{MegaSecs, Secs, _MicroSecs} = os:timestamp(),
MegaSecs * 1000000 + Secs.
| null | https://raw.githubusercontent.com/startalkIM/ejabberd/718d86cd2f5681099fad14dab5f2541ddc612c8b/src/mod_http_upload_quota.erl | erlang | ----------------------------------------------------------------------
File : mod_http_upload_quota.erl
Purpose : Quota management for HTTP File Upload (XEP-0363)
This program is free software; you can redistribute it and/or
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
----------------------------------------------------------------------
gen_mod/supervisor callbacks.
gen_server callbacks.
ejabberd_hooks callback.
--------------------------------------------------------------------
gen_mod/supervisor callbacks.
--------------------------------------------------------------------
--------------------------------------------------------------------
gen_server callbacks.
--------------------------------------------------------------------
--------------------------------------------------------------------
ejabberd_hooks callback.
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
In case it's empty, now.
In case it's empty, now. | Author : < >
Created : 15 Oct 2015 by < >
ejabberd , Copyright ( C ) 2015 - 2016 ProcessOne
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
-module(mod_http_upload_quota).
-author('').
-define(GEN_SERVER, gen_server).
-define(PROCNAME, ?MODULE).
-define(TIMEOUT, timer:hours(24)).
-define(INITIAL_TIMEOUT, timer:minutes(10)).
-define(FORMAT(Error), file:format_error(Error)).
-behaviour(?GEN_SERVER).
-behaviour(gen_mod).
-export([start_link/3,
start/2,
stop/1,
depends/2,
mod_opt_type/1]).
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
-export([handle_slot_request/5]).
-include("jlib.hrl").
-include("logger.hrl").
-include_lib("kernel/include/file.hrl").
-record(state,
{server_host :: binary(),
access_soft_quota :: atom(),
access_hard_quota :: atom(),
max_days :: pos_integer() | infinity,
docroot :: binary(),
disk_usage = #{} :: map(),
timers :: [timer:tref()]}).
-type state() :: #state{}.
-spec start_link(binary(), atom(), gen_mod:opts())
-> {ok, pid()} | ignore | {error, _}.
start_link(ServerHost, Proc, Opts) ->
?GEN_SERVER:start_link({local, Proc}, ?MODULE, {ServerHost, Opts}, []).
-spec start(binary(), gen_mod:opts()) -> {ok, _} | {ok, _, _} | {error, _}.
start(ServerHost, Opts) ->
Proc = mod_http_upload:get_proc_name(ServerHost, ?PROCNAME),
Spec = {Proc,
{?MODULE, start_link, [ServerHost, Proc, Opts]},
permanent,
3000,
worker,
[?MODULE]},
supervisor:start_child(ejabberd_sup, Spec).
-spec stop(binary()) -> ok.
stop(ServerHost) ->
Proc = mod_http_upload:get_proc_name(ServerHost, ?PROCNAME),
supervisor:terminate_child(ejabberd_sup, Proc),
supervisor:delete_child(ejabberd_sup, Proc).
-spec mod_opt_type(atom()) -> fun((term()) -> term()) | [atom()].
mod_opt_type(access_soft_quota) ->
fun acl:shaper_rules_validator/1;
mod_opt_type(access_hard_quota) ->
fun acl:shaper_rules_validator/1;
mod_opt_type(max_days) ->
fun(I) when is_integer(I), I > 0 -> I;
(infinity) -> infinity
end;
mod_opt_type(_) ->
[access_soft_quota, access_hard_quota, max_days].
-spec depends(binary(), gen_mod:opts()) -> [{module(), hard | soft}].
depends(_Host, _Opts) ->
[{mod_http_upload, hard}].
-spec init({binary(), gen_mod:opts()}) -> {ok, state()}.
init({ServerHost, Opts}) ->
process_flag(trap_exit, true),
AccessSoftQuota = gen_mod:get_opt(access_soft_quota, Opts,
fun acl:shaper_rules_validator/1,
soft_upload_quota),
AccessHardQuota = gen_mod:get_opt(access_hard_quota, Opts,
fun acl:shaper_rules_validator/1,
hard_upload_quota),
MaxDays = gen_mod:get_opt(max_days, Opts,
fun(I) when is_integer(I), I > 0 -> I;
(infinity) -> infinity
end,
infinity),
DocRoot1 = gen_mod:get_module_opt(ServerHost, mod_http_upload, docroot,
fun iolist_to_binary/1,
<<"@HOME@/upload">>),
DocRoot2 = mod_http_upload:expand_home(str:strip(DocRoot1, right, $/)),
DocRoot3 = mod_http_upload:expand_host(DocRoot2, ServerHost),
Timers = if MaxDays == infinity -> [];
true ->
{ok, T1} = timer:send_after(?INITIAL_TIMEOUT, sweep),
{ok, T2} = timer:send_interval(?TIMEOUT, sweep),
[T1, T2]
end,
ejabberd_hooks:add(http_upload_slot_request, ServerHost, ?MODULE,
handle_slot_request, 50),
{ok, #state{server_host = ServerHost,
access_soft_quota = AccessSoftQuota,
access_hard_quota = AccessHardQuota,
max_days = MaxDays,
docroot = DocRoot3,
timers = Timers}}.
-spec handle_call(_, {pid(), _}, state()) -> {noreply, state()}.
handle_call(Request, From, State) ->
?ERROR_MSG("Got unexpected request from ~p: ~p", [From, Request]),
{noreply, State}.
-spec handle_cast(_, state()) -> {noreply, state()}.
handle_cast({handle_slot_request, #jid{user = U, server = S} = JID, Path, Size},
#state{server_host = ServerHost,
access_soft_quota = AccessSoftQuota,
access_hard_quota = AccessHardQuota,
disk_usage = DiskUsage} = State) ->
HardQuota = case acl:match_rule(ServerHost, AccessHardQuota, JID) of
Hard when is_integer(Hard), Hard > 0 ->
Hard * 1024 * 1024;
_ ->
0
end,
SoftQuota = case acl:match_rule(ServerHost, AccessSoftQuota, JID) of
Soft when is_integer(Soft), Soft > 0 ->
Soft * 1024 * 1024;
_ ->
0
end,
OldSize = case maps:find({U, S}, DiskUsage) of
{ok, Value} ->
Value;
error ->
undefined
end,
NewSize = case {HardQuota, SoftQuota} of
{0, 0} ->
?DEBUG("No quota specified for ~s",
[jid:to_string(JID)]),
undefined;
{0, _} ->
?WARNING_MSG("No hard quota specified for ~s",
[jid:to_string(JID)]),
enforce_quota(Path, Size, OldSize, SoftQuota, SoftQuota);
{_, 0} ->
?WARNING_MSG("No soft quota specified for ~s",
[jid:to_string(JID)]),
enforce_quota(Path, Size, OldSize, HardQuota, HardQuota);
_ when SoftQuota > HardQuota ->
?WARNING_MSG("Bad quota for ~s (soft: ~p, hard: ~p)",
[jid:to_string(JID),
SoftQuota, HardQuota]),
enforce_quota(Path, Size, OldSize, SoftQuota, SoftQuota);
_ ->
?DEBUG("Enforcing quota for ~s",
[jid:to_string(JID)]),
enforce_quota(Path, Size, OldSize, SoftQuota, HardQuota)
end,
NewDiskUsage = if is_integer(NewSize) ->
maps:put({U, S}, NewSize, DiskUsage);
true ->
DiskUsage
end,
{noreply, State#state{disk_usage = NewDiskUsage}};
handle_cast(Request, State) ->
?ERROR_MSG("Got unexpected request: ~p", [Request]),
{noreply, State}.
-spec handle_info(_, state()) -> {noreply, state()}.
handle_info(sweep, #state{server_host = ServerHost,
docroot = DocRoot,
max_days = MaxDays} = State)
when is_integer(MaxDays), MaxDays > 0 ->
?DEBUG("Got 'sweep' message for ~s", [ServerHost]),
case file:list_dir(DocRoot) of
{ok, Entries} ->
BackThen = secs_since_epoch() - (MaxDays * 86400),
DocRootS = binary_to_list(DocRoot),
PathNames = lists:map(fun(Entry) ->
DocRootS ++ "/" ++ Entry
end, Entries),
UserDirs = lists:filter(fun filelib:is_dir/1, PathNames),
lists:foreach(fun(UserDir) ->
delete_old_files(UserDir, BackThen)
end, UserDirs);
{error, Error} ->
?ERROR_MSG("Cannot open document root ~s: ~s",
[DocRoot, ?FORMAT(Error)])
end,
{noreply, State};
handle_info(Info, State) ->
?ERROR_MSG("Got unexpected info: ~p", [Info]),
{noreply, State}.
-spec terminate(normal | shutdown | {shutdown, _} | _, state()) -> ok.
terminate(Reason, #state{server_host = ServerHost, timers = Timers}) ->
?DEBUG("Stopping upload quota process for ~s: ~p", [ServerHost, Reason]),
ejabberd_hooks:delete(http_upload_slot_request, ServerHost, ?MODULE,
handle_slot_request, 50),
lists:foreach(fun timer:cancel/1, Timers).
-spec code_change({down, _} | _, state(), _) -> {ok, state()}.
code_change(_OldVsn, #state{server_host = ServerHost} = State, _Extra) ->
?DEBUG("Updating upload quota process for ~s", [ServerHost]),
{ok, State}.
-spec handle_slot_request(term(), jid(), binary(), non_neg_integer(), binary())
-> term().
handle_slot_request(allow, #jid{lserver = ServerHost} = JID, Path, Size,
_Lang) ->
Proc = mod_http_upload:get_proc_name(ServerHost, ?PROCNAME),
?GEN_SERVER:cast(Proc, {handle_slot_request, JID, Path, Size}),
allow;
handle_slot_request(Acc, _JID, _Path, _Size, _Lang) -> Acc.
Internal functions .
-spec enforce_quota(file:filename_all(), non_neg_integer(),
non_neg_integer() | undefined, non_neg_integer(),
non_neg_integer())
-> non_neg_integer().
enforce_quota(_UserDir, SlotSize, OldSize, _MinSize, MaxSize)
when is_integer(OldSize), OldSize + SlotSize =< MaxSize ->
OldSize + SlotSize;
enforce_quota(UserDir, SlotSize, _OldSize, MinSize, MaxSize) ->
Files = lists:sort(fun({_PathA, _SizeA, TimeA}, {_PathB, _SizeB, TimeB}) ->
TimeA > TimeB
end, gather_file_info(UserDir)),
{DelFiles, OldSize, NewSize} =
lists:foldl(fun({_Path, Size, _Time}, {[], AccSize, AccSize})
when AccSize + Size + SlotSize =< MinSize ->
{[], AccSize + Size, AccSize + Size};
({Path, Size, _Time}, {[], AccSize, AccSize}) ->
{[Path], AccSize + Size, AccSize};
({Path, Size, _Time}, {AccFiles, AccSize, NewSize}) ->
{[Path | AccFiles], AccSize + Size, NewSize}
end, {[], 0, 0}, Files),
if OldSize + SlotSize > MaxSize ->
lists:foreach(fun del_file_and_dir/1, DelFiles),
NewSize + SlotSize;
true ->
OldSize + SlotSize
end.
-spec delete_old_files(file:filename_all(), integer()) -> ok.
delete_old_files(UserDir, CutOff) ->
FileInfo = gather_file_info(UserDir),
case [Path || {Path, _Size, Time} <- FileInfo, Time < CutOff] of
[] ->
ok;
OldFiles ->
lists:foreach(fun del_file_and_dir/1, OldFiles),
end.
-spec gather_file_info(file:filename_all())
-> [{binary(), non_neg_integer(), non_neg_integer()}].
gather_file_info(Dir) when is_binary(Dir) ->
gather_file_info(binary_to_list(Dir));
gather_file_info(Dir) ->
case file:list_dir(Dir) of
{ok, Entries} ->
lists:foldl(fun(Entry, Acc) ->
Path = Dir ++ "/" ++ Entry,
case file:read_file_info(Path,
[{time, posix}]) of
{ok, #file_info{type = directory}} ->
gather_file_info(Path) ++ Acc;
{ok, #file_info{type = regular,
mtime = Time,
size = Size}} ->
[{Path, Size, Time} | Acc];
{ok, _Info} ->
?DEBUG("Won't stat(2) non-regular file ~s",
[Path]),
Acc;
{error, Error} ->
?ERROR_MSG("Cannot stat(2) ~s: ~s",
[Path, ?FORMAT(Error)]),
Acc
end
end, [], Entries);
{error, enoent} ->
?DEBUG("Directory ~s doesn't exist", [Dir]),
[];
{error, Error} ->
?ERROR_MSG("Cannot open directory ~s: ~s", [Dir, ?FORMAT(Error)]),
[]
end.
-spec del_file_and_dir(file:name_all()) -> ok.
del_file_and_dir(File) ->
case file:delete(File) of
ok ->
?INFO_MSG("Removed ~s", [File]),
Dir = filename:dirname(File),
case file:del_dir(Dir) of
ok ->
?DEBUG("Removed ~s", [Dir]);
{error, Error} ->
?DEBUG("Cannot remove ~s: ~s", [Dir, ?FORMAT(Error)])
end;
{error, Error} ->
?WARNING_MSG("Cannot remove ~s: ~s", [File, ?FORMAT(Error)])
end.
-spec secs_since_epoch() -> non_neg_integer().
secs_since_epoch() ->
{MegaSecs, Secs, _MicroSecs} = os:timestamp(),
MegaSecs * 1000000 + Secs.
|
20a5c322dc8916d481d0324d90c202bebebdb8e1c80de5b8cf62a7c33879c8de | nedap/speced.def | def_with_doc.cljc | (ns nedap.speced.def.impl.def-with-doc
(:require
[clojure.core.protocols]
#?(:cljs [cljs.repl])
#?(:clj [clojure.spec.alpha :as spec] :cljs [cljs.spec.alpha :as spec])
[nedap.utils.spec.api :refer [check!]])
#?(:cljs (:require-macros [nedap.speced.def.impl.def-with-doc]))
#?(:clj (:import (java.io Writer))))
(defrecord Docstring [docstring]
clojure.core.protocols/Datafiable
(datafy [_]
docstring))
#?(:clj
(defmethod print-method Docstring
[this ^Writer writer]
(let [^String docstring (-> this :docstring)]
(-> writer (.write docstring)))))
#?(:clj
(defmacro def-with-doc
[spec-name docstring spec doc-registry symbol-doc-registry]
{:pre [(check! qualified-keyword? spec-name
string? docstring
some? spec)]}
(when (-> &env :ns nil?)
(check! (spec/and symbol?
resolve) doc-registry
(spec/and symbol?
resolve) symbol-doc-registry))
(list 'do
(list `swap! doc-registry `assoc spec-name docstring)
(list `swap! symbol-doc-registry `assoc (list 'quote (symbol spec-name)) (list `map->Docstring {:docstring docstring}))
(list (if (-> &env :ns some?)
'cljs.spec.alpha/def
'clojure.spec.alpha/def)
spec-name
spec))))
| null | https://raw.githubusercontent.com/nedap/speced.def/55053e53e749f77753294f3ee8d4639470840f8c/src/nedap/speced/def/impl/def_with_doc.cljc | clojure | (ns nedap.speced.def.impl.def-with-doc
(:require
[clojure.core.protocols]
#?(:cljs [cljs.repl])
#?(:clj [clojure.spec.alpha :as spec] :cljs [cljs.spec.alpha :as spec])
[nedap.utils.spec.api :refer [check!]])
#?(:cljs (:require-macros [nedap.speced.def.impl.def-with-doc]))
#?(:clj (:import (java.io Writer))))
(defrecord Docstring [docstring]
clojure.core.protocols/Datafiable
(datafy [_]
docstring))
#?(:clj
(defmethod print-method Docstring
[this ^Writer writer]
(let [^String docstring (-> this :docstring)]
(-> writer (.write docstring)))))
#?(:clj
(defmacro def-with-doc
[spec-name docstring spec doc-registry symbol-doc-registry]
{:pre [(check! qualified-keyword? spec-name
string? docstring
some? spec)]}
(when (-> &env :ns nil?)
(check! (spec/and symbol?
resolve) doc-registry
(spec/and symbol?
resolve) symbol-doc-registry))
(list 'do
(list `swap! doc-registry `assoc spec-name docstring)
(list `swap! symbol-doc-registry `assoc (list 'quote (symbol spec-name)) (list `map->Docstring {:docstring docstring}))
(list (if (-> &env :ns some?)
'cljs.spec.alpha/def
'clojure.spec.alpha/def)
spec-name
spec))))
| |
fcfd8a907e867f469b94d3ddd503b20ba049b1ba01065f3b0b29052a97459142 | tiensonqin/lymchat | pgpass.clj | (ns api.pg.pgpass
"Logic for matching passwords ~/.pgpass passwords to db specs."
(:require [clojure.java.io :as io]
[clojure.string :as str]))
(defn parse-pgpass-line
"The .pgpass files has lines of format: hostname:port:database:username:password
Return a map of fields {:pg-hostname \"*\" ...}"
[s]
(zipmap
[:pg-hostname :pg-port :pg-database :pg-username :pg-password]
(str/split s #":")))
(defn read-pgpass
"Find ~/.pgpass, read it and parse lines into maps"
[]
(let [homedir (io/file (System/getProperty "user.home"))
passfile (io/file homedir ".pgpass")]
(when (.isFile passfile)
(with-open [r (io/reader passfile)]
(->> r
line-seq
(map parse-pgpass-line)
doall)))))
(defn pgpass-matches?
"(filter (partial pgpass-matches? spec) pgpass-lines)"
[{:keys [host port dbname user]} {:keys [pg-hostname pg-port pg-database pg-username pg-password]}]
(when
(and
(or (= pg-hostname "*") (= pg-hostname host) (and (= pg-hostname "localhost") (nil? host)))
(or (= pg-port "*") (= pg-port port) (and (= pg-port "5432") (nil? port)))
(or (= pg-database "*") (= pg-database dbname))
(or (= pg-username "*") (= pg-username user)))
pg-password))
(defn pgpass-lookup
"Look up password from ~/.pgpass based on db spec {:host ... :port ... :dbname ... :user ...}"
[spec]
(when-let [match (first (filter (partial pgpass-matches? spec) (read-pgpass)))]
(:pg-password match)))
| null | https://raw.githubusercontent.com/tiensonqin/lymchat/824026607d30c12bc50afb06f677d1fa95ff1f2f/api/src/api/pg/pgpass.clj | clojure | (ns api.pg.pgpass
"Logic for matching passwords ~/.pgpass passwords to db specs."
(:require [clojure.java.io :as io]
[clojure.string :as str]))
(defn parse-pgpass-line
"The .pgpass files has lines of format: hostname:port:database:username:password
Return a map of fields {:pg-hostname \"*\" ...}"
[s]
(zipmap
[:pg-hostname :pg-port :pg-database :pg-username :pg-password]
(str/split s #":")))
(defn read-pgpass
"Find ~/.pgpass, read it and parse lines into maps"
[]
(let [homedir (io/file (System/getProperty "user.home"))
passfile (io/file homedir ".pgpass")]
(when (.isFile passfile)
(with-open [r (io/reader passfile)]
(->> r
line-seq
(map parse-pgpass-line)
doall)))))
(defn pgpass-matches?
"(filter (partial pgpass-matches? spec) pgpass-lines)"
[{:keys [host port dbname user]} {:keys [pg-hostname pg-port pg-database pg-username pg-password]}]
(when
(and
(or (= pg-hostname "*") (= pg-hostname host) (and (= pg-hostname "localhost") (nil? host)))
(or (= pg-port "*") (= pg-port port) (and (= pg-port "5432") (nil? port)))
(or (= pg-database "*") (= pg-database dbname))
(or (= pg-username "*") (= pg-username user)))
pg-password))
(defn pgpass-lookup
"Look up password from ~/.pgpass based on db spec {:host ... :port ... :dbname ... :user ...}"
[spec]
(when-let [match (first (filter (partial pgpass-matches? spec) (read-pgpass)))]
(:pg-password match)))
| |
cc2e1e0b8689a1128edac01affa66a74fbbf9752d8025b58d9f2ea9d892d7524 | marcin-rzeznicki/stackcollapse-ghc | Format.hs | stackcollapse - ghc - fold GHC prof files into flamegraph input
Copyright ( C ) 2020
This program is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program . If not , see < / > .
Copyright (C) 2020 Marcin Rzeźnicki
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see </>.
-}
{-# LANGUAGE OverloadedStrings #-}
module Format
( MayFail
, ColumnList
, Format
, Inherited(..)
, readSrc
, readCostCentre
, readText
, readInteger
, readDouble) where
import Trace
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as Char8
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8')
import Data.Either.Extra (maybeToEither)
import Control.Arrow (left)
import Text.Read (readMaybe)
type MayFail = Either String
type ColumnList = [ByteString]
type Format = ColumnList -> MayFail (Trace, Inherited)
data Inherited =
Inherited { inheritedTime :: Double, inheritedAlloc :: Double }
readSrc :: ByteString -> MayFail Src
readSrc = fmap mkSrc . readText
where
mkSrc "<built-in>" = BuiltIn
mkSrc "<entire-module>" = EntireModule
mkSrc "<no location info>" = NoLocationInfo
mkSrc text = Location text
readCostCentre :: ByteString -> MayFail CostCentre
readCostCentre = fmap mkCC . readText
where
mkCC "MAIN" = Main
mkCC "CAF" = CAF Nothing
mkCC text
| "CAF:" `T.isPrefixOf` text = CAF (Just text)
| otherwise = SCC text
readText :: ByteString -> MayFail Text
readText = left show . decodeUtf8'
showText :: ByteString -> String
showText = either (const "<malformed UTF-8>") T.unpack . readText
readInteger :: ByteString -> MayFail Integer
readInteger chars = case Char8.readInteger chars of
Just (i, chars')
| Char8.null chars' -> Right i
| otherwise -> _error
Nothing -> _error
where
_error = Left $ "expected integer in place of '" ++ showText chars ++ "' "
readDouble :: ByteString -> MayFail Double
readDouble chars = maybeToEither _error $ readMaybe $ Char8.unpack chars
where
_error = "expected double in place of '" ++ showText chars ++ "' "
| null | https://raw.githubusercontent.com/marcin-rzeznicki/stackcollapse-ghc/6ff9b8d526dbeccb68aab02bec7cd7b2e53515ef/src/Format.hs | haskell | # LANGUAGE OverloadedStrings # | stackcollapse - ghc - fold GHC prof files into flamegraph input
Copyright ( C ) 2020
This program is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program . If not , see < / > .
Copyright (C) 2020 Marcin Rzeźnicki
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see </>.
-}
module Format
( MayFail
, ColumnList
, Format
, Inherited(..)
, readSrc
, readCostCentre
, readText
, readInteger
, readDouble) where
import Trace
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as Char8
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8')
import Data.Either.Extra (maybeToEither)
import Control.Arrow (left)
import Text.Read (readMaybe)
type MayFail = Either String
type ColumnList = [ByteString]
type Format = ColumnList -> MayFail (Trace, Inherited)
data Inherited =
Inherited { inheritedTime :: Double, inheritedAlloc :: Double }
readSrc :: ByteString -> MayFail Src
readSrc = fmap mkSrc . readText
where
mkSrc "<built-in>" = BuiltIn
mkSrc "<entire-module>" = EntireModule
mkSrc "<no location info>" = NoLocationInfo
mkSrc text = Location text
readCostCentre :: ByteString -> MayFail CostCentre
readCostCentre = fmap mkCC . readText
where
mkCC "MAIN" = Main
mkCC "CAF" = CAF Nothing
mkCC text
| "CAF:" `T.isPrefixOf` text = CAF (Just text)
| otherwise = SCC text
readText :: ByteString -> MayFail Text
readText = left show . decodeUtf8'
showText :: ByteString -> String
showText = either (const "<malformed UTF-8>") T.unpack . readText
readInteger :: ByteString -> MayFail Integer
readInteger chars = case Char8.readInteger chars of
Just (i, chars')
| Char8.null chars' -> Right i
| otherwise -> _error
Nothing -> _error
where
_error = Left $ "expected integer in place of '" ++ showText chars ++ "' "
readDouble :: ByteString -> MayFail Double
readDouble chars = maybeToEither _error $ readMaybe $ Char8.unpack chars
where
_error = "expected double in place of '" ++ showText chars ++ "' "
|
aeb69433cf81f0cf4755c351833b8a447c8958daa935bf5db3070d74be1f6967 | OCamlPro/ocaml-benchs | sequence_cps.ml | type ('s,'a) unfolder =
{unfold :
'r.
's
-> on_done:'r
-> on_skip:('s -> 'r)
-> on_yield:('s -> 'a -> 'r)
-> 'r}
type _ t =
| Sequence : ('s * ('s,'a) unfolder) -> 'a t
let map (Sequence(s,{unfold})) ~f =
Sequence(s, {unfold =
fun s ~on_done ~on_skip ~on_yield ->
let on_yield s a = on_yield s (f a) in
unfold s ~on_done ~on_skip ~on_yield})
let filter (Sequence(s,{unfold})) ~f =
Sequence(s, {unfold =
fun s ~on_done ~on_skip ~on_yield ->
let on_yield s a =
if f a
then on_yield s a
else on_skip s in
unfold s ~on_done ~on_skip ~on_yield})
let fold_1 (Sequence(s,{unfold})) ~init ~f =
let rec loop s v =
unfold s ~on_done:v ~on_skip:(fun s -> loop s v)
~on_yield:(fun s a -> loop s (f v a))
in
loop s init
let fold_2 (Sequence(s,{unfold})) ~init ~f =
let s_ref = ref s in
let v_ref = ref init in
while begin
unfold
!s_ref
~on_done:false
~on_skip:(fun s -> s_ref:=s; true)
~on_yield:
(fun s a ->
s_ref := s;
v_ref := f !v_ref a;
true)
end do () done;
!v_ref
let fold = fold_2
let (|>) x f = f x
| null | https://raw.githubusercontent.com/OCamlPro/ocaml-benchs/98047e112574e6bf55137dd8058f227a9f40281b/sequence/sequence_cps.ml | ocaml | type ('s,'a) unfolder =
{unfold :
'r.
's
-> on_done:'r
-> on_skip:('s -> 'r)
-> on_yield:('s -> 'a -> 'r)
-> 'r}
type _ t =
| Sequence : ('s * ('s,'a) unfolder) -> 'a t
let map (Sequence(s,{unfold})) ~f =
Sequence(s, {unfold =
fun s ~on_done ~on_skip ~on_yield ->
let on_yield s a = on_yield s (f a) in
unfold s ~on_done ~on_skip ~on_yield})
let filter (Sequence(s,{unfold})) ~f =
Sequence(s, {unfold =
fun s ~on_done ~on_skip ~on_yield ->
let on_yield s a =
if f a
then on_yield s a
else on_skip s in
unfold s ~on_done ~on_skip ~on_yield})
let fold_1 (Sequence(s,{unfold})) ~init ~f =
let rec loop s v =
unfold s ~on_done:v ~on_skip:(fun s -> loop s v)
~on_yield:(fun s a -> loop s (f v a))
in
loop s init
let fold_2 (Sequence(s,{unfold})) ~init ~f =
let s_ref = ref s in
let v_ref = ref init in
while begin
unfold
!s_ref
~on_done:false
~on_skip:(fun s -> s_ref:=s; true)
~on_yield:
(fun s a ->
s_ref := s;
v_ref := f !v_ref a;
true)
end do () done;
!v_ref
let fold = fold_2
let (|>) x f = f x
| |
44f393c80aebc21a9c0d30db0b3f67ea0be8c5ace40d31aaba0ab0f2733c7733 | plumatic/grab-bag | core.clj | (ns kinesis.core
"Shared utils between publisher and client, such as message framing / schemas."
(:use plumbing.core)
(:require
[plumbing.serialize :as serialize])
(:import
[java.nio ByteBuffer]))
(set! *warn-on-reflection* true)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Encoding and decoding messages into records.
(defn- prepend-timestamp [^bytes b]
(let [bb (ByteBuffer/allocate (+ 8 (alength b)))]
(.putLong bb (millis))
(.put bb b)
(.array bb)))
(defn record-encoder
"Create an encoder function that can be passed Clojure messages, and periodically emits
sequences of byte arrays corresponding to Kinesis records. Each byte array will have up
to 50k of compressed and +default+-serialized messages, plus a timestamp to track
latency of kinesis processing."
[]
(comp #(mapv prepend-timestamp %)
(serialize/serialized-stream-packer
50k kinesis limit minus 8 - byte timestamp
serialize/+default-uncompressed+)))
(defn decode-record
"Decode a byte array from 'record-encoder' into a timestamp and sequence of messages."
[^bytes m]
(let [bb (ByteBuffer/wrap m)
date (.getLong bb)]
{:messages (serialize/serialized-unpack (serialize/get-bytes bb))
:date date}))
(defn env-stream
"Construct an env'd kinesis stream name from a base name and environment."
[env stream]
(str stream "-" (name env)))
(set! *warn-on-reflection* false)
| null | https://raw.githubusercontent.com/plumatic/grab-bag/a15e943322fbbf6f00790ce5614ba6f90de1a9b5/lib/kinesis/src/kinesis/core.clj | clojure |
Encoding and decoding messages into records. | (ns kinesis.core
"Shared utils between publisher and client, such as message framing / schemas."
(:use plumbing.core)
(:require
[plumbing.serialize :as serialize])
(:import
[java.nio ByteBuffer]))
(set! *warn-on-reflection* true)
(defn- prepend-timestamp [^bytes b]
(let [bb (ByteBuffer/allocate (+ 8 (alength b)))]
(.putLong bb (millis))
(.put bb b)
(.array bb)))
(defn record-encoder
"Create an encoder function that can be passed Clojure messages, and periodically emits
sequences of byte arrays corresponding to Kinesis records. Each byte array will have up
to 50k of compressed and +default+-serialized messages, plus a timestamp to track
latency of kinesis processing."
[]
(comp #(mapv prepend-timestamp %)
(serialize/serialized-stream-packer
50k kinesis limit minus 8 - byte timestamp
serialize/+default-uncompressed+)))
(defn decode-record
"Decode a byte array from 'record-encoder' into a timestamp and sequence of messages."
[^bytes m]
(let [bb (ByteBuffer/wrap m)
date (.getLong bb)]
{:messages (serialize/serialized-unpack (serialize/get-bytes bb))
:date date}))
(defn env-stream
"Construct an env'd kinesis stream name from a base name and environment."
[env stream]
(str stream "-" (name env)))
(set! *warn-on-reflection* false)
|
9c887e92b83f7ad069b18523800726895fef439492ddd24db031d7e72b82977c | phoe-trash/gateway | protocol.lisp | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;; GATEWAY
" phoe " Herda 2016
gateway.lisp
(in-package #:gateway)
(defaccessors sender recipient date-of contents
id name player avatar gender species colors shard
id name messages personas shard
dimensions x-dimension y-dimension
id username password email personas connection
id name world-map jewel personas chats lock
sexp)
(defgeneric find-persona (name))
(defgeneric location (object))
(defgeneric find-messages (chat &key sender recipient after-date before-date contents))
(defgeneric delete-message (message chat))
(defgeneric add-persona (persona chat))
(defgeneric delete-persona (persona chat))
(defgeneric make-password (passphrase))
(defgeneric password-matches-p (password passphrase))
(defgeneric object-at (world-map x y))
(defgeneric resize (world-map up left down right &key initial-element))
(defgeneric find-player (&key id username email))
(defgeneric output (object connection)) ;; done
(defgeneric input (connection &key safe-p)) ;; done <3
(defgeneric kill (connection)) ;; done
;; chat.lisp
(defprotocol id (chat)) ;; done
(defprotocol name (chat)) ;; done
(defprotocol messages (chat)) ;; done
(defprotocol personas (chat)) ;; done
(defprotocol shard (chat)) ;; done
(defprotocol send-message (message chat)) ;; done
(defprotocol find-messages
(chat &key sender recipient after-date before-date contents)) ;; done
(defprotocol delete-message (message chat)) ;; done
(defprotocol add-persona (persona chat)) ;; done
(defprotocol delete-persona (persona chat)) ;; done
world-map.lisp
(defprotocol dimensions (world-map)) ;; done
(defprotocol x-dimension (world-map)) ;; done
(defprotocol y-dimension (world-map)) ;; done
(defprotocol object-at (world-map x y)) ;; done
(defprotocol resize (world-map up left down right &key initial-element)) ;; done
;; player.lisp
(defprotocol id (player)) ;; done
(defprotocol username (player)) ;; done
(defprotocol password (player)) ;; done
(defprotocol email (player)) ;; done
(defprotocol personas (player)) ;; done
(defprotocol connection (player)) ;; done
(defprotocol send-message (message player)) ;; done
(defprotocol find-player (&key id username email)) ;; done
;;;; server protocol
;; shard.lisp
(defprotocol id (shard))
(defprotocol name (shard))
(defprotocol world-map (shard))
(defprotocol jewel (shard))
(defprotocol personas (shard))
(defprotocol chats (shard))
(defprotocol lock (shard))
;; connection.lisp
(defprotocol output (object connection)) ;; done
(defprotocol input (connection &key safe-p)) ;; done <3
(defprotocol kill (connection)) ;; done
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; SEXPABLE protocol
(defprotocol sexp (object))
;; MESSAGABLE protocol
(defprotocol send-message (message recipient))
;; MESSAGE protocol
(defclass message () ()
(:documentation "Must be SEXPABLE and IMMUTABLE.
Constructor arguments:
:SENDER - the sender of the message.
:RECIPIENT - the recipient of the message.
:DATE - object of type DATE.
:CONTENTS - contents of the message (a STRING).
"))
(defprotocol sender (message))
(defprotocol recipient (message))
(defprotocol date (message))
(defprotocol contents (message))
;; PASSWORD protocol
(defclass password () ()
(:documentation "Must be IMMUTABLE.
Constructor arguments:
:PASSPHRASE - a passphrase.
"))
(defprotocol password-matches-p (password passphrase))
;; PLAYER protocol
(defclass player () ()
(:documentation "Must be SEXPABLE and MESSAGABLE.
Constructor arguments:
:NAME - a STRING.
:EMAIL (optional) - a STRING.
:PASSWORD - a PASSWORD or a passphrase (a STRING).
"))
(defprotocol name (player))
(defprotocol email (player))
(defprotocol personas (player))
;; PERSONA protocol
(defclass persona () ()
(:documentation "Must be SEXPABLE and MESSAGABLE.
Constructor arguments:
:NAME - a STRING.
:PLAYER - a PLAYER.
:CHAT - a CHAT.
"))
(defprotocol name (persona))
(defprotocol player (persona))
(defprotocol chat (persona))
(defprotocol find-persona (name))
;; DATE protocol
(defclass date () ()
(:documentation "Must be SEXPABLE and IMMUTABLE.
Constructor arguments:
:TIMESTAMP (optional) - a timestamp, to be declared later.
"))
(defprotocol read-date (string))
(defprotocol print-object (date stream))
(defprotocol date= (date-1 date-2 &key fuzziness))
(defprotocol date< (date-1 date-2 &key fuzziness))
(defprotocol date> (date-1 date-2 &key fuzziness))
;; CHAT protocol
(defclass chat () ()
(:documentation "Must be SEXPABLE and MESSAGABLE.
Constructor arguments:
:NAME - a STRING.
"))
(defprotocol name (chat))
(defprotocol messages (chat))
(defprotocol personas (chat))
| null | https://raw.githubusercontent.com/phoe-trash/gateway/a8d579ccbafcaee8678caf59d365ec2eab0b1a7e/_old/__old/old/old_/protocol.lisp | lisp |
GATEWAY
done
done <3
done
chat.lisp
done
done
done
done
done
done
done
done
done
done
done
done
done
done
done
player.lisp
done
done
done
done
done
done
done
done
server protocol
shard.lisp
connection.lisp
done
done <3
done
SEXPABLE protocol
MESSAGABLE protocol
MESSAGE protocol
PASSWORD protocol
PLAYER protocol
PERSONA protocol
DATE protocol
CHAT protocol | " phoe " Herda 2016
gateway.lisp
(in-package #:gateway)
(defaccessors sender recipient date-of contents
id name player avatar gender species colors shard
id name messages personas shard
dimensions x-dimension y-dimension
id username password email personas connection
id name world-map jewel personas chats lock
sexp)
(defgeneric find-persona (name))
(defgeneric location (object))
(defgeneric find-messages (chat &key sender recipient after-date before-date contents))
(defgeneric delete-message (message chat))
(defgeneric add-persona (persona chat))
(defgeneric delete-persona (persona chat))
(defgeneric make-password (passphrase))
(defgeneric password-matches-p (password passphrase))
(defgeneric object-at (world-map x y))
(defgeneric resize (world-map up left down right &key initial-element))
(defgeneric find-player (&key id username email))
(defprotocol find-messages
world-map.lisp
(defprotocol id (shard))
(defprotocol name (shard))
(defprotocol world-map (shard))
(defprotocol jewel (shard))
(defprotocol personas (shard))
(defprotocol chats (shard))
(defprotocol lock (shard))
(defprotocol sexp (object))
(defprotocol send-message (message recipient))
(defclass message () ()
(:documentation "Must be SEXPABLE and IMMUTABLE.
Constructor arguments:
:SENDER - the sender of the message.
:RECIPIENT - the recipient of the message.
:DATE - object of type DATE.
:CONTENTS - contents of the message (a STRING).
"))
(defprotocol sender (message))
(defprotocol recipient (message))
(defprotocol date (message))
(defprotocol contents (message))
(defclass password () ()
(:documentation "Must be IMMUTABLE.
Constructor arguments:
:PASSPHRASE - a passphrase.
"))
(defprotocol password-matches-p (password passphrase))
(defclass player () ()
(:documentation "Must be SEXPABLE and MESSAGABLE.
Constructor arguments:
:NAME - a STRING.
:EMAIL (optional) - a STRING.
:PASSWORD - a PASSWORD or a passphrase (a STRING).
"))
(defprotocol name (player))
(defprotocol email (player))
(defprotocol personas (player))
(defclass persona () ()
(:documentation "Must be SEXPABLE and MESSAGABLE.
Constructor arguments:
:NAME - a STRING.
:PLAYER - a PLAYER.
:CHAT - a CHAT.
"))
(defprotocol name (persona))
(defprotocol player (persona))
(defprotocol chat (persona))
(defprotocol find-persona (name))
(defclass date () ()
(:documentation "Must be SEXPABLE and IMMUTABLE.
Constructor arguments:
:TIMESTAMP (optional) - a timestamp, to be declared later.
"))
(defprotocol read-date (string))
(defprotocol print-object (date stream))
(defprotocol date= (date-1 date-2 &key fuzziness))
(defprotocol date< (date-1 date-2 &key fuzziness))
(defprotocol date> (date-1 date-2 &key fuzziness))
(defclass chat () ()
(:documentation "Must be SEXPABLE and MESSAGABLE.
Constructor arguments:
:NAME - a STRING.
"))
(defprotocol name (chat))
(defprotocol messages (chat))
(defprotocol personas (chat))
|
6144479aa75526c34693d7d1bbd0ddc8fbe75310951866b5ae5f65ad95059a6b | dwayne/haskell-programming | DetermineTheType.hs | # LANGUAGE NoMonomorphismRestriction #
-- Toggle to see how the type of example changes
module DetermineTheType where
-- simple example
example = 1
| null | https://raw.githubusercontent.com/dwayne/haskell-programming/d08679e76cfd39985fa2ee3cd89d55c9aedfb531/ch5/DetermineTheType.hs | haskell | Toggle to see how the type of example changes
simple example | # LANGUAGE NoMonomorphismRestriction #
module DetermineTheType where
example = 1
|
9dc4fe355b11263f1037e9358549f7ce0c67136630cf1bb0e88796df77081194 | 8c6794b6/guile-tjit | command.scm | Repl commands
Copyright ( C ) 2001 , 2009 , 2010 , 2011 , 2012 , 2013 Free Software Foundation , Inc.
;; This library is free software; you can redistribute it and/or
;; modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 3 of the License , or ( at your option ) any later version .
;;
;; This library is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;; Lesser General Public License for more details.
;;
You should have received a copy of the GNU Lesser General Public
;; License along with this library; if not, write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA
02110 - 1301 USA
;;; Code:
(define-module (system repl command)
#:use-module (system base syntax)
#:use-module (system base pmatch)
#:use-module (system base compile)
#:use-module (system repl common)
#:use-module (system repl debug)
#:use-module (system vm disassembler)
#:use-module (system vm loader)
#:use-module (system vm program)
#:use-module (system vm trap-state)
#:use-module (system vm vm)
#:autoload (system base language) (lookup-language language-reader)
#:autoload (system vm trace) (call-with-trace)
#:use-module (ice-9 format)
#:use-module (ice-9 session)
#:use-module (ice-9 documentation)
#:use-module (ice-9 and-let-star)
#:use-module (ice-9 rdelim)
#:use-module (ice-9 control)
#:use-module ((ice-9 pretty-print) #:select ((pretty-print . pp)))
#:use-module ((system vm inspect) #:select ((inspect . %inspect)))
#:use-module (rnrs bytevectors)
#:use-module (statprof)
#:export (meta-command define-meta-command))
;;;
Meta command interface
;;;
(define *command-table*
'((help (help h) (show) (apropos a) (describe d))
(module (module m) (import use) (load l) (reload re) (binding b) (in))
(language (language L))
(compile (compile c) (compile-file cc)
(expand exp) (optimize opt)
(disassemble x) (disassemble-file xx))
(profile (time t) (profile pr) (trace tr))
(debug (backtrace bt) (up) (down) (frame fr)
(procedure proc) (locals) (error-message error)
(break br bp) (break-at-source break-at bs)
(step s) (step-instruction si)
(next n) (next-instruction ni)
(finish)
(tracepoint tp)
(traps) (delete del) (disable) (enable)
(registers regs))
(inspect (inspect i) (pretty-print pp))
(system (gc) (statistics stat) (option o)
(quit q continue cont))))
(define *show-table*
'((show (warranty w) (copying c) (version v))))
(define (group-name g) (car g))
(define (group-commands g) (cdr g))
(define *command-infos* (make-hash-table))
(define (command-name c) (car c))
(define (command-abbrevs c) (cdr c))
(define (command-info c) (hashq-ref *command-infos* (command-name c)))
(define (command-procedure c) (command-info-procedure (command-info c)))
(define (command-doc c) (procedure-documentation (command-procedure c)))
(define (make-command-info proc arguments-reader)
(cons proc arguments-reader))
(define (command-info-procedure info)
(car info))
(define (command-info-arguments-reader info)
(cdr info))
(define (command-usage c)
(let ((doc (command-doc c)))
(substring doc 0 (string-index doc #\newline))))
(define (command-summary c)
(let* ((doc (command-doc c))
(start (1+ (string-index doc #\newline))))
(cond ((string-index doc #\newline start)
=> (lambda (end) (substring doc start end)))
(else (substring doc start)))))
(define (lookup-group name)
(assq name *command-table*))
(define* (lookup-command key #:optional (table *command-table*))
(let loop ((groups table) (commands '()))
(cond ((and (null? groups) (null? commands)) #f)
((null? commands)
(loop (cdr groups) (cdar groups)))
((memq key (car commands)) (car commands))
(else (loop groups (cdr commands))))))
(define* (display-group group #:optional (abbrev? #t))
(format #t "~:(~A~) Commands~:[~; [abbrev]~]:~2%" (group-name group) abbrev?)
(for-each (lambda (c)
(display-summary (command-usage c)
(if abbrev? (command-abbrevs c) '())
(command-summary c)))
(group-commands group))
(newline))
(define (display-command command)
(display "Usage: ")
(display (command-doc command))
(newline))
(define (display-summary usage abbrevs summary)
(let* ((usage-len (string-length usage))
(abbrevs (if (pair? abbrevs)
(format #f "[,~A~{ ,~A~}]" (car abbrevs) (cdr abbrevs))
""))
(abbrevs-len (string-length abbrevs)))
(format #t " ,~A~A~A - ~A\n"
usage
(cond
((> abbrevs-len 32)
(error "abbrevs too long" abbrevs))
((> (+ usage-len abbrevs-len) 32)
(format #f "~%~v_" (+ 2 (- 32 abbrevs-len))))
(else
(format #f "~v_" (- 32 abbrevs-len usage-len))))
abbrevs
summary)))
(define (read-command repl)
(catch #t
(lambda () (read))
(lambda (key . args)
(pmatch args
((,subr ,msg ,args . ,rest)
(format #t "Throw to key `~a' while reading command:\n" key)
(display-error #f (current-output-port) subr msg args rest))
(else
(format #t "Throw to key `~a' with args `~s' while reading command.\n"
key args)))
(force-output)
*unspecified*)))
(define (read-command-arguments c repl)
((command-info-arguments-reader (command-info c)) repl))
(define (meta-command repl)
(let ((command (read-command repl)))
(cond
((eq? command *unspecified*)) ; read error, already signalled; pass.
((not (symbol? command))
(format #t "Meta-command not a symbol: ~s~%" command))
((lookup-command command)
=> (lambda (c)
(and=> (read-command-arguments c repl)
(lambda (args) (apply (command-procedure c) repl args)))))
(else
(format #t "Unknown meta command: ~A~%" command)))))
(define (add-meta-command! name category proc argument-reader)
(hashq-set! *command-infos* name (make-command-info proc argument-reader))
(if category
(let ((entry (assq category *command-table*)))
(if entry
(set-cdr! entry (append (cdr entry) (list (list name))))
(set! *command-table*
(append *command-table*
(list (list category (list name)))))))))
(define-syntax define-meta-command
(syntax-rules ()
((_ ((name category) repl (expression0 ...) . datums) docstring b0 b1 ...)
(add-meta-command!
'name
'category
(lambda* (repl expression0 ... . datums)
docstring
b0 b1 ...)
(lambda (repl)
(define (handle-read-error form-name key args)
(pmatch args
((,subr ,msg ,args . ,rest)
(format #t "Throw to key `~a' while reading ~@[argument `~A' of ~]command `~A':\n"
key form-name 'name)
(display-error #f (current-output-port) subr msg args rest))
(else
(format #t "Throw to key `~a' with args `~s' while reading ~@[ argument `~A' of ~]command `~A'.\n"
key args form-name 'name)))
(abort))
(% (let* ((expression0
(catch #t
(lambda ()
(repl-reader
""
(lambda* (#:optional (port (current-input-port)))
((language-reader (repl-language repl))
port (current-module)))))
(lambda (k . args)
(handle-read-error 'expression0 k args))))
...)
(append
(list expression0 ...)
(catch #t
(lambda ()
(let ((port (open-input-string (read-line))))
(let lp ((out '()))
(let ((x (read port)))
(if (eof-object? x)
(reverse out)
(lp (cons x out)))))))
(lambda (k . args)
(handle-read-error #f k args)))))
(lambda (k) #f))))) ; the abort handler
((_ ((name category) repl . datums) docstring b0 b1 ...)
(define-meta-command ((name category) repl () . datums)
docstring b0 b1 ...))
((_ (name repl (expression0 ...) . datums) docstring b0 b1 ...)
(define-meta-command ((name #f) repl (expression0 ...) . datums)
docstring b0 b1 ...))
((_ (name repl . datums) docstring b0 b1 ...)
(define-meta-command ((name #f) repl () . datums)
docstring b0 b1 ...))))
;;;
;;; Help commands
;;;
(define-meta-command (help repl . args)
"help [all | GROUP | [-c] COMMAND]
Show help.
With one argument, tries to look up the argument as a group name, giving
help on that group if successful. Otherwise tries to look up the
argument as a command, giving help on the command.
If there is a command whose name is also a group name, use the ,help
-c COMMAND form to give help on the command instead of the group.
Without any argument, a list of help commands and command groups
are displayed."
(pmatch args
(()
(display-group (lookup-group 'help))
(display "Command Groups:\n\n")
(display-summary "help all" #f "List all commands")
(for-each (lambda (g)
(let* ((name (symbol->string (group-name g)))
(usage (string-append "help " name))
(header (string-append "List " name " commands")))
(display-summary usage #f header)))
(cdr *command-table*))
(newline)
(display
"Type `,help -c COMMAND' to show documentation of a particular command.")
(newline))
((all)
(for-each display-group *command-table*))
((,group) (guard (lookup-group group))
(display-group (lookup-group group)))
((,command) (guard (lookup-command command))
(display-command (lookup-command command)))
((-c ,command) (guard (lookup-command command))
(display-command (lookup-command command)))
((,command)
(format #t "Unknown command or group: ~A~%" command))
((-c ,command)
(format #t "Unknown command: ~A~%" command))
(else
(format #t "Bad arguments: ~A~%" args))))
(define-meta-command (show repl . args)
"show [TOPIC]
Gives information about Guile.
With one argument, tries to show a particular piece of information;
currently supported topics are `warranty' (or `w'), `copying' (or `c'),
and `version' (or `v').
Without any argument, a list of topics is displayed."
(pmatch args
(()
(display-group (car *show-table*) #f)
(newline))
((,topic) (guard (lookup-command topic *show-table*))
((command-procedure (lookup-command topic *show-table*)) repl))
((,command)
(format #t "Unknown topic: ~A~%" command))
(else
(format #t "Bad arguments: ~A~%" args))))
;;; `warranty', `copying' and `version' are "hidden" meta-commands, only
;;; accessible via `show'. They have an entry in *command-infos* but not
;;; in *command-table*.
(define-meta-command (warranty repl)
"show warranty
Details on the lack of warranty."
(display *warranty*)
(newline))
(define-meta-command (copying repl)
"show copying
Show the LGPLv3."
(display *copying*)
(newline))
(define-meta-command (version repl)
"show version
Version information."
(display *version*)
(newline))
(define-meta-command (apropos repl regexp)
"apropos REGEXP
Find bindings/modules/packages."
(apropos (->string regexp)))
(define-meta-command (describe repl (form))
"describe OBJ
Show description/documentation."
(display
(object-documentation
(let ((input (repl-parse repl form)))
(if (symbol? input)
(module-ref (current-module) input)
(repl-eval repl input)))))
(newline))
(define-meta-command (option repl . args)
"option [NAME] [EXP]
List/show/set options."
(pmatch args
(()
(for-each (lambda (spec)
(format #t " ~A~24t~A\n" (car spec) (cadr spec)))
(repl-options repl)))
((,name)
(display (repl-option-ref repl name))
(newline))
((,name ,exp)
;; Would be nice to evaluate in the current language, but the REPL
;; option parser doesn't permit that, currently.
(repl-option-set! repl name (eval exp (current-module))))))
(define-meta-command (quit repl)
"quit
Quit this session."
(throw 'quit))
;;;
;;; Module commands
;;;
(define-meta-command (module repl . args)
"module [MODULE]
Change modules / Show current module."
(pmatch args
(() (puts (module-name (current-module))))
((,mod-name) (guard (list? mod-name))
(set-current-module (resolve-module mod-name)))
(,mod-name (set-current-module (resolve-module mod-name)))))
(define-meta-command (import repl . args)
"import [MODULE ...]
Import modules / List those imported."
(let ()
(define (use name)
(let ((mod (resolve-interface name)))
(if mod
(module-use! (current-module) mod)
(format #t "No such module: ~A~%" name))))
(if (null? args)
(for-each puts (map module-name (module-uses (current-module))))
(for-each use args))))
(define-meta-command (load repl file)
"load FILE
Load a file in the current module."
(load (->string file)))
(define-meta-command (reload repl . args)
"reload [MODULE]
Reload the given module, or the current module if none was given."
(pmatch args
(() (reload-module (current-module)))
((,mod-name) (guard (list? mod-name))
(reload-module (resolve-module mod-name)))
(,mod-name (reload-module (resolve-module mod-name)))))
(define-meta-command (binding repl)
"binding
List current bindings."
(module-for-each (lambda (k v) (format #t "~23A ~A\n" k v))
(current-module)))
(define-meta-command (in repl module command-or-expression . args)
"in MODULE COMMAND-OR-EXPRESSION
Evaluate an expression or command in the context of module."
(let ((m (resolve-module module #:ensure #f)))
(if m
(pmatch command-or-expression
(('unquote ,command) (guard (lookup-command command))
(save-module-excursion
(lambda ()
(set-current-module m)
(apply (command-procedure (lookup-command command)) repl args))))
(,expression
(guard (null? args))
(repl-print repl (eval expression m)))
(else
(format #t "Invalid arguments to `in': expected a single expression or a command.\n")))
(format #t "No such module: ~s\n" module))))
;;;
Language commands
;;;
(define-meta-command (language repl name)
"language LANGUAGE
Change languages."
(let ((lang (lookup-language name))
(cur (repl-language repl)))
(format #t "Happy hacking with ~a! To switch back, type `,L ~a'.\n"
(language-title lang) (language-name cur))
(current-language lang)
(set! (repl-language repl) lang)))
;;;
;;; Compile commands
;;;
(define (load-image x)
(let ((thunk (load-thunk-from-memory x)))
(find-mapped-elf-image (program-code thunk))))
(define-meta-command (compile repl (form))
"compile EXP
Generate compiled code."
(let ((x (repl-compile repl (repl-parse repl form))))
(cond ((bytevector? x) (disassemble-image (load-image x)))
(else (repl-print repl x)))))
(define-meta-command (compile-file repl file . opts)
"compile-file FILE
Compile a file."
(compile-file (->string file) #:opts opts))
(define-meta-command (expand repl (form))
"expand EXP
Expand any macros in a form."
(let ((x (repl-expand repl (repl-parse repl form))))
(run-hook before-print-hook x)
(pp x)))
(define-meta-command (optimize repl (form))
"optimize EXP
Run the optimizer on a piece of code and print the result."
(let ((x (repl-optimize repl (repl-parse repl form))))
(run-hook before-print-hook x)
(pp x)))
(define-meta-command (disassemble repl (form))
"disassemble EXP
Disassemble a compiled procedure."
(let ((obj (repl-eval repl (repl-parse repl form))))
(cond
((program? obj)
(disassemble-program obj))
((bytevector? obj)
(disassemble-image (load-image obj)))
(else
(format #t
"Argument to ,disassemble not a procedure or a bytevector: ~a~%"
obj)))))
(define-meta-command (disassemble-file repl file)
"disassemble-file FILE
Disassemble a file."
(disassemble-file (->string file)))
;;;
;;; Profile commands
;;;
(define-meta-command (time repl (form))
"time EXP
Time execution."
(let* ((gc-start (gc-run-time))
(real-start (get-internal-real-time))
(run-start (get-internal-run-time))
(result (repl-eval repl (repl-parse repl form)))
(run-end (get-internal-run-time))
(real-end (get-internal-real-time))
(gc-end (gc-run-time)))
(define (diff start end)
(/ (- end start) 1.0 internal-time-units-per-second))
(repl-print repl result)
(format #t ";; ~,6Fs real time, ~,6Fs run time. ~,6Fs spent in GC.\n"
(diff real-start real-end)
(diff run-start run-end)
(diff gc-start gc-end))
result))
(define-meta-command (profile repl (form) . opts)
"profile EXP
Profile execution."
FIXME opts
(apply statprof
(repl-prepare-eval-thunk repl (repl-parse repl form))
opts))
(define-meta-command (trace repl (form) . opts)
"trace EXP
Trace execution."
;; FIXME: doc options, or somehow deal with them better
(apply call-with-trace
(repl-prepare-eval-thunk repl (repl-parse repl form))
(cons* #:width (terminal-width) opts)))
;;;
;;; Debug commands
;;;
(define-syntax define-stack-command
(lambda (x)
(syntax-case x ()
((_ (name repl . args) docstring body body* ...)
#`(define-meta-command (name repl . args)
docstring
(let ((debug (repl-debug repl)))
(if debug
(letrec-syntax
((#,(datum->syntax #'repl 'frames)
(identifier-syntax (debug-frames debug)))
(#,(datum->syntax #'repl 'message)
(identifier-syntax (debug-error-message debug)))
(#,(datum->syntax #'repl 'index)
(identifier-syntax
(id (debug-index debug))
((set! id exp) (set! (debug-index debug) exp))))
(#,(datum->syntax #'repl 'cur)
(identifier-syntax
(vector-ref #,(datum->syntax #'repl 'frames)
#,(datum->syntax #'repl 'index)))))
body body* ...)
(format #t "Nothing to debug.~%"))))))))
(define-stack-command (backtrace repl #:optional count
#:key (width (terminal-width)) full?)
"backtrace [COUNT] [#:width W] [#:full? F]
Print a backtrace.
Print a backtrace of all stack frames, or innermost COUNT frames.
If COUNT is negative, the last COUNT frames will be shown."
(print-frames frames
#:count count
#:width width
#:full? full?))
(define-stack-command (up repl #:optional (count 1))
"up [COUNT]
Select a calling stack frame.
Select and print stack frames that called this one.
An argument says how many frames up to go."
(cond
((or (not (integer? count)) (<= count 0))
(format #t "Invalid argument to `up': expected a positive integer for COUNT.~%"))
((>= (+ count index) (vector-length frames))
(cond
((= index (1- (vector-length frames)))
(format #t "Already at outermost frame.\n"))
(else
(set! index (1- (vector-length frames)))
(print-frame cur #:index index))))
(else
(set! index (+ count index))
(print-frame cur #:index index))))
(define-stack-command (down repl #:optional (count 1))
"down [COUNT]
Select a called stack frame.
Select and print stack frames called by this one.
An argument says how many frames down to go."
(cond
((or (not (integer? count)) (<= count 0))
(format #t "Invalid argument to `down': expected a positive integer for COUNT.~%"))
((< (- index count) 0)
(cond
((zero? index)
(format #t "Already at innermost frame.\n"))
(else
(set! index 0)
(print-frame cur #:index index))))
(else
(set! index (- index count))
(print-frame cur #:index index))))
(define-stack-command (frame repl #:optional idx)
"frame [IDX]
Show a frame.
Show the selected frame.
With an argument, select a frame by index, then show it."
(cond
(idx
(cond
((or (not (integer? idx)) (< idx 0))
(format #t "Invalid argument to `frame': expected a non-negative integer for IDX.~%"))
((< idx (vector-length frames))
(set! index idx)
(print-frame cur #:index index))
(else
(format #t "No such frame.~%"))))
(else (print-frame cur #:index index))))
(define-stack-command (locals repl #:key (width (terminal-width)))
"locals
Show local variables.
Show locally-bound variables in the selected frame."
(print-locals cur #:width width))
(define-stack-command (error-message repl)
"error-message
Show error message.
Display the message associated with the error that started the current
debugging REPL."
(format #t "~a~%" (if (string? message) message "No error message")))
(define-meta-command (break repl (form))
"break PROCEDURE
Break on calls to PROCEDURE.
Starts a recursive prompt when PROCEDURE is called."
(let ((proc (repl-eval repl (repl-parse repl form))))
(if (not (procedure? proc))
(error "Not a procedure: ~a" proc)
(let ((idx (add-trap-at-procedure-call! proc)))
(format #t "Trap ~a: ~a.~%" idx (trap-name idx))))))
(define-meta-command (break-at-source repl file line)
"break-at-source FILE LINE
Break when control reaches the given source location.
Starts a recursive prompt when control reaches line LINE of file FILE.
Note that the given source location must be inside a procedure."
(let ((file (if (symbol? file) (symbol->string file) file)))
(let ((idx (add-trap-at-source-location! file line)))
(format #t "Trap ~a: ~a.~%" idx (trap-name idx)))))
(define (repl-pop-continuation-resumer repl msg)
;; Capture the dynamic environment with this prompt thing. The result
;; is a procedure that takes a frame and number of values returned.
(% (call-with-values
(lambda ()
(abort
(lambda (k)
;; Call frame->stack-vector before reinstating the
;; continuation, so that we catch the %stacks fluid at
;; the time of capture.
(lambda (frame . values)
(k frame
(frame->stack-vector
(frame-previous frame))
values)))))
(lambda (from stack values)
(format #t "~a~%" msg)
(if (null? values)
(format #t "No return values.~%")
(begin
(format #t "Return values:~%")
(for-each (lambda (x) (repl-print repl x)) values)))
((module-ref (resolve-interface '(system repl repl)) 'start-repl)
#:debug (make-debug stack 0 msg))))))
(define-stack-command (finish repl)
"finish
Run until the current frame finishes.
Resume execution, breaking when the current frame finishes."
(let ((handler (repl-pop-continuation-resumer
repl (format #f "Return from ~a" cur))))
(add-ephemeral-trap-at-frame-finish! cur handler)
(throw 'quit)))
(define (repl-next-resumer msg)
;; Capture the dynamic environment with this prompt thing. The
;; result is a procedure that takes a frame.
(% (let ((stack (abort
(lambda (k)
;; Call frame->stack-vector before reinstating the
;; continuation, so that we catch the %stacks fluid
;; at the time of capture.
(lambda (frame)
(k (frame->stack-vector frame)))))))
(format #t "~a~%" msg)
((module-ref (resolve-interface '(system repl repl)) 'start-repl)
#:debug (make-debug stack 0 msg)))))
(define-stack-command (step repl)
"step
Step until control reaches a different source location.
Step until control reaches a different source location."
(let ((msg (format #f "Step into ~a" cur)))
(add-ephemeral-stepping-trap! cur (repl-next-resumer msg)
#:into? #t #:instruction? #f)
(throw 'quit)))
(define-stack-command (step-instruction repl)
"step-instruction
Step until control reaches a different instruction.
Step until control reaches a different VM instruction."
(let ((msg (format #f "Step into ~a" cur)))
(add-ephemeral-stepping-trap! cur (repl-next-resumer msg)
#:into? #t #:instruction? #t)
(throw 'quit)))
(define-stack-command (next repl)
"next
Step until control reaches a different source location in the current frame.
Step until control reaches a different source location in the current frame."
(let ((msg (format #f "Step into ~a" cur)))
(add-ephemeral-stepping-trap! cur (repl-next-resumer msg)
#:into? #f #:instruction? #f)
(throw 'quit)))
(define-stack-command (next-instruction repl)
"next-instruction
Step until control reaches a different instruction in the current frame.
Step until control reaches a different VM instruction in the current frame."
(let ((msg (format #f "Step into ~a" cur)))
(add-ephemeral-stepping-trap! cur (repl-next-resumer msg)
#:into? #f #:instruction? #t)
(throw 'quit)))
(define-meta-command (tracepoint repl (form))
"tracepoint PROCEDURE
Add a tracepoint to PROCEDURE.
A tracepoint will print out the procedure and its arguments, when it is
called, and its return value(s) when it returns."
(let ((proc (repl-eval repl (repl-parse repl form))))
(if (not (procedure? proc))
(error "Not a procedure: ~a" proc)
(let ((idx (add-trace-at-procedure-call! proc)))
(format #t "Trap ~a: ~a.~%" idx (trap-name idx))))))
(define-meta-command (traps repl)
"traps
Show the set of currently attached traps.
Show the set of currently attached traps (breakpoints and tracepoints)."
(let ((traps (list-traps)))
(if (null? traps)
(format #t "No traps set.~%")
(for-each (lambda (idx)
(format #t " ~a: ~a~a~%"
idx (trap-name idx)
(if (trap-enabled? idx) "" " (disabled)")))
traps))))
(define-meta-command (delete repl idx)
"delete IDX
Delete a trap.
Delete a trap."
(if (not (integer? idx))
(error "expected a trap index (a non-negative integer)" idx)
(delete-trap! idx)))
(define-meta-command (disable repl idx)
"disable IDX
Disable a trap.
Disable a trap."
(if (not (integer? idx))
(error "expected a trap index (a non-negative integer)" idx)
(disable-trap! idx)))
(define-meta-command (enable repl idx)
"enable IDX
Enable a trap.
Enable a trap."
(if (not (integer? idx))
(error "expected a trap index (a non-negative integer)" idx)
(enable-trap! idx)))
(define-stack-command (registers repl)
"registers
Print registers.
Print the registers of the current frame."
(print-registers cur))
(define-meta-command (width repl #:optional x)
"width [X]
Set debug output width.
Set the number of screen columns in the output from `backtrace' and
`locals'."
(terminal-width x)
(format #t "Set screen width to ~a columns.~%" (terminal-width)))
;;;
;;; Inspection commands
;;;
(define-meta-command (inspect repl (form))
"inspect EXP
Inspect the result(s) of evaluating EXP."
(call-with-values (repl-prepare-eval-thunk repl (repl-parse repl form))
(lambda args
(for-each %inspect args))))
(define-meta-command (pretty-print repl (form))
"pretty-print EXP
Pretty-print the result(s) of evaluating EXP."
(call-with-values (repl-prepare-eval-thunk repl (repl-parse repl form))
(lambda args
(for-each
(lambda (x)
(run-hook before-print-hook x)
(pp x))
args))))
;;;
;;; System commands
;;;
(define-meta-command (gc repl)
"gc
Garbage collection."
(gc))
(define-meta-command (statistics repl)
"statistics
Display statistics."
(let ((this-tms (times))
(this-gcs (gc-stats))
(last-tms (repl-tm-stats repl))
(last-gcs (repl-gc-stats repl)))
GC times
(let ((this-times (assq-ref this-gcs 'gc-times))
(last-times (assq-ref last-gcs 'gc-times)))
(display-diff-stat "GC times:" #t this-times last-times "times")
(newline))
;; Memory size
(let ((this-heap (assq-ref this-gcs 'heap-size))
(this-free (assq-ref this-gcs 'heap-free-size)))
(display-stat-title "Memory size:" "current" "limit")
(display-stat "heap" #f (- this-heap this-free) this-heap "bytes")
(newline))
;; Cells collected
(let ((this-alloc (assq-ref this-gcs 'heap-total-allocated))
(last-alloc (assq-ref last-gcs 'heap-total-allocated)))
(display-stat-title "Bytes allocated:" "diff" "total")
(display-diff-stat "allocated" #f this-alloc last-alloc "bytes")
(newline))
GC time taken
(let ((this-total (assq-ref this-gcs 'gc-time-taken))
(last-total (assq-ref last-gcs 'gc-time-taken)))
(display-stat-title "GC time taken:" "diff" "total")
(display-time-stat "total" this-total last-total)
(newline))
;; Process time spent
(let ((this-utime (tms:utime this-tms))
(last-utime (tms:utime last-tms))
(this-stime (tms:stime this-tms))
(last-stime (tms:stime last-tms))
(this-cutime (tms:cutime this-tms))
(last-cutime (tms:cutime last-tms))
(this-cstime (tms:cstime this-tms))
(last-cstime (tms:cstime last-tms)))
(display-stat-title "Process time spent:" "diff" "total")
(display-time-stat "user" this-utime last-utime)
(display-time-stat "system" this-stime last-stime)
(display-time-stat "child user" this-cutime last-cutime)
(display-time-stat "child system" this-cstime last-cstime)
(newline))
;; Save statistics
;; Save statistics
(set! (repl-tm-stats repl) this-tms)
(set! (repl-gc-stats repl) this-gcs)))
(define (display-stat title flag field1 field2 unit)
(let ((fmt (format #f "~~20~AA ~~10@A /~~10@A ~~A~~%" (if flag "" "@"))))
(format #t fmt title field1 field2 unit)))
(define (display-stat-title title field1 field2)
(display-stat title #t field1 field2 ""))
(define (display-diff-stat title flag this last unit)
(display-stat title flag (- this last) this unit))
(define (display-time-stat title this last)
(define (conv num)
(format #f "~10,2F" (exact->inexact (/ num internal-time-units-per-second))))
(display-stat title #f (conv (- this last)) (conv this) "s"))
(define (display-mips-stat title this-time this-clock last-time last-clock)
(define (mips time clock)
(if (= time 0) "----" (format #f "~10,2F" (/ clock time 1000000.0))))
(display-stat title #f
(mips (- this-time last-time) (- this-clock last-clock))
(mips this-time this-clock) "mips"))
| null | https://raw.githubusercontent.com/8c6794b6/guile-tjit/9566e480af2ff695e524984992626426f393414f/module/system/repl/command.scm | scheme | This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
either
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this library; if not, write to the Free Software
Code:
read error, already signalled; pass.
the abort handler
Help commands
`warranty', `copying' and `version' are "hidden" meta-commands, only
accessible via `show'. They have an entry in *command-infos* but not
in *command-table*.
Would be nice to evaluate in the current language, but the REPL
option parser doesn't permit that, currently.
Module commands
Compile commands
Profile commands
FIXME: doc options, or somehow deal with them better
Debug commands
Capture the dynamic environment with this prompt thing. The result
is a procedure that takes a frame and number of values returned.
Call frame->stack-vector before reinstating the
continuation, so that we catch the %stacks fluid at
the time of capture.
Capture the dynamic environment with this prompt thing. The
result is a procedure that takes a frame.
Call frame->stack-vector before reinstating the
continuation, so that we catch the %stacks fluid
at the time of capture.
Inspection commands
System commands
Memory size
Cells collected
Process time spent
Save statistics
Save statistics | Repl commands
Copyright ( C ) 2001 , 2009 , 2010 , 2011 , 2012 , 2013 Free Software Foundation , Inc.
version 3 of the License , or ( at your option ) any later version .
You should have received a copy of the GNU Lesser General Public
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA
02110 - 1301 USA
(define-module (system repl command)
#:use-module (system base syntax)
#:use-module (system base pmatch)
#:use-module (system base compile)
#:use-module (system repl common)
#:use-module (system repl debug)
#:use-module (system vm disassembler)
#:use-module (system vm loader)
#:use-module (system vm program)
#:use-module (system vm trap-state)
#:use-module (system vm vm)
#:autoload (system base language) (lookup-language language-reader)
#:autoload (system vm trace) (call-with-trace)
#:use-module (ice-9 format)
#:use-module (ice-9 session)
#:use-module (ice-9 documentation)
#:use-module (ice-9 and-let-star)
#:use-module (ice-9 rdelim)
#:use-module (ice-9 control)
#:use-module ((ice-9 pretty-print) #:select ((pretty-print . pp)))
#:use-module ((system vm inspect) #:select ((inspect . %inspect)))
#:use-module (rnrs bytevectors)
#:use-module (statprof)
#:export (meta-command define-meta-command))
Meta command interface
(define *command-table*
'((help (help h) (show) (apropos a) (describe d))
(module (module m) (import use) (load l) (reload re) (binding b) (in))
(language (language L))
(compile (compile c) (compile-file cc)
(expand exp) (optimize opt)
(disassemble x) (disassemble-file xx))
(profile (time t) (profile pr) (trace tr))
(debug (backtrace bt) (up) (down) (frame fr)
(procedure proc) (locals) (error-message error)
(break br bp) (break-at-source break-at bs)
(step s) (step-instruction si)
(next n) (next-instruction ni)
(finish)
(tracepoint tp)
(traps) (delete del) (disable) (enable)
(registers regs))
(inspect (inspect i) (pretty-print pp))
(system (gc) (statistics stat) (option o)
(quit q continue cont))))
(define *show-table*
'((show (warranty w) (copying c) (version v))))
(define (group-name g) (car g))
(define (group-commands g) (cdr g))
(define *command-infos* (make-hash-table))
(define (command-name c) (car c))
(define (command-abbrevs c) (cdr c))
(define (command-info c) (hashq-ref *command-infos* (command-name c)))
(define (command-procedure c) (command-info-procedure (command-info c)))
(define (command-doc c) (procedure-documentation (command-procedure c)))
(define (make-command-info proc arguments-reader)
(cons proc arguments-reader))
(define (command-info-procedure info)
(car info))
(define (command-info-arguments-reader info)
(cdr info))
(define (command-usage c)
(let ((doc (command-doc c)))
(substring doc 0 (string-index doc #\newline))))
(define (command-summary c)
(let* ((doc (command-doc c))
(start (1+ (string-index doc #\newline))))
(cond ((string-index doc #\newline start)
=> (lambda (end) (substring doc start end)))
(else (substring doc start)))))
(define (lookup-group name)
(assq name *command-table*))
(define* (lookup-command key #:optional (table *command-table*))
(let loop ((groups table) (commands '()))
(cond ((and (null? groups) (null? commands)) #f)
((null? commands)
(loop (cdr groups) (cdar groups)))
((memq key (car commands)) (car commands))
(else (loop groups (cdr commands))))))
(define* (display-group group #:optional (abbrev? #t))
(format #t "~:(~A~) Commands~:[~; [abbrev]~]:~2%" (group-name group) abbrev?)
(for-each (lambda (c)
(display-summary (command-usage c)
(if abbrev? (command-abbrevs c) '())
(command-summary c)))
(group-commands group))
(newline))
(define (display-command command)
(display "Usage: ")
(display (command-doc command))
(newline))
(define (display-summary usage abbrevs summary)
(let* ((usage-len (string-length usage))
(abbrevs (if (pair? abbrevs)
(format #f "[,~A~{ ,~A~}]" (car abbrevs) (cdr abbrevs))
""))
(abbrevs-len (string-length abbrevs)))
(format #t " ,~A~A~A - ~A\n"
usage
(cond
((> abbrevs-len 32)
(error "abbrevs too long" abbrevs))
((> (+ usage-len abbrevs-len) 32)
(format #f "~%~v_" (+ 2 (- 32 abbrevs-len))))
(else
(format #f "~v_" (- 32 abbrevs-len usage-len))))
abbrevs
summary)))
(define (read-command repl)
(catch #t
(lambda () (read))
(lambda (key . args)
(pmatch args
((,subr ,msg ,args . ,rest)
(format #t "Throw to key `~a' while reading command:\n" key)
(display-error #f (current-output-port) subr msg args rest))
(else
(format #t "Throw to key `~a' with args `~s' while reading command.\n"
key args)))
(force-output)
*unspecified*)))
(define (read-command-arguments c repl)
((command-info-arguments-reader (command-info c)) repl))
(define (meta-command repl)
(let ((command (read-command repl)))
(cond
((not (symbol? command))
(format #t "Meta-command not a symbol: ~s~%" command))
((lookup-command command)
=> (lambda (c)
(and=> (read-command-arguments c repl)
(lambda (args) (apply (command-procedure c) repl args)))))
(else
(format #t "Unknown meta command: ~A~%" command)))))
(define (add-meta-command! name category proc argument-reader)
(hashq-set! *command-infos* name (make-command-info proc argument-reader))
(if category
(let ((entry (assq category *command-table*)))
(if entry
(set-cdr! entry (append (cdr entry) (list (list name))))
(set! *command-table*
(append *command-table*
(list (list category (list name)))))))))
(define-syntax define-meta-command
(syntax-rules ()
((_ ((name category) repl (expression0 ...) . datums) docstring b0 b1 ...)
(add-meta-command!
'name
'category
(lambda* (repl expression0 ... . datums)
docstring
b0 b1 ...)
(lambda (repl)
(define (handle-read-error form-name key args)
(pmatch args
((,subr ,msg ,args . ,rest)
(format #t "Throw to key `~a' while reading ~@[argument `~A' of ~]command `~A':\n"
key form-name 'name)
(display-error #f (current-output-port) subr msg args rest))
(else
(format #t "Throw to key `~a' with args `~s' while reading ~@[ argument `~A' of ~]command `~A'.\n"
key args form-name 'name)))
(abort))
(% (let* ((expression0
(catch #t
(lambda ()
(repl-reader
""
(lambda* (#:optional (port (current-input-port)))
((language-reader (repl-language repl))
port (current-module)))))
(lambda (k . args)
(handle-read-error 'expression0 k args))))
...)
(append
(list expression0 ...)
(catch #t
(lambda ()
(let ((port (open-input-string (read-line))))
(let lp ((out '()))
(let ((x (read port)))
(if (eof-object? x)
(reverse out)
(lp (cons x out)))))))
(lambda (k . args)
(handle-read-error #f k args)))))
((_ ((name category) repl . datums) docstring b0 b1 ...)
(define-meta-command ((name category) repl () . datums)
docstring b0 b1 ...))
((_ (name repl (expression0 ...) . datums) docstring b0 b1 ...)
(define-meta-command ((name #f) repl (expression0 ...) . datums)
docstring b0 b1 ...))
((_ (name repl . datums) docstring b0 b1 ...)
(define-meta-command ((name #f) repl () . datums)
docstring b0 b1 ...))))
(define-meta-command (help repl . args)
"help [all | GROUP | [-c] COMMAND]
Show help.
With one argument, tries to look up the argument as a group name, giving
help on that group if successful. Otherwise tries to look up the
argument as a command, giving help on the command.
If there is a command whose name is also a group name, use the ,help
-c COMMAND form to give help on the command instead of the group.
Without any argument, a list of help commands and command groups
are displayed."
(pmatch args
(()
(display-group (lookup-group 'help))
(display "Command Groups:\n\n")
(display-summary "help all" #f "List all commands")
(for-each (lambda (g)
(let* ((name (symbol->string (group-name g)))
(usage (string-append "help " name))
(header (string-append "List " name " commands")))
(display-summary usage #f header)))
(cdr *command-table*))
(newline)
(display
"Type `,help -c COMMAND' to show documentation of a particular command.")
(newline))
((all)
(for-each display-group *command-table*))
((,group) (guard (lookup-group group))
(display-group (lookup-group group)))
((,command) (guard (lookup-command command))
(display-command (lookup-command command)))
((-c ,command) (guard (lookup-command command))
(display-command (lookup-command command)))
((,command)
(format #t "Unknown command or group: ~A~%" command))
((-c ,command)
(format #t "Unknown command: ~A~%" command))
(else
(format #t "Bad arguments: ~A~%" args))))
(define-meta-command (show repl . args)
"show [TOPIC]
Gives information about Guile.
currently supported topics are `warranty' (or `w'), `copying' (or `c'),
and `version' (or `v').
Without any argument, a list of topics is displayed."
(pmatch args
(()
(display-group (car *show-table*) #f)
(newline))
((,topic) (guard (lookup-command topic *show-table*))
((command-procedure (lookup-command topic *show-table*)) repl))
((,command)
(format #t "Unknown topic: ~A~%" command))
(else
(format #t "Bad arguments: ~A~%" args))))
(define-meta-command (warranty repl)
"show warranty
Details on the lack of warranty."
(display *warranty*)
(newline))
(define-meta-command (copying repl)
"show copying
Show the LGPLv3."
(display *copying*)
(newline))
(define-meta-command (version repl)
"show version
Version information."
(display *version*)
(newline))
(define-meta-command (apropos repl regexp)
"apropos REGEXP
Find bindings/modules/packages."
(apropos (->string regexp)))
(define-meta-command (describe repl (form))
"describe OBJ
Show description/documentation."
(display
(object-documentation
(let ((input (repl-parse repl form)))
(if (symbol? input)
(module-ref (current-module) input)
(repl-eval repl input)))))
(newline))
(define-meta-command (option repl . args)
"option [NAME] [EXP]
List/show/set options."
(pmatch args
(()
(for-each (lambda (spec)
(format #t " ~A~24t~A\n" (car spec) (cadr spec)))
(repl-options repl)))
((,name)
(display (repl-option-ref repl name))
(newline))
((,name ,exp)
(repl-option-set! repl name (eval exp (current-module))))))
(define-meta-command (quit repl)
"quit
Quit this session."
(throw 'quit))
(define-meta-command (module repl . args)
"module [MODULE]
Change modules / Show current module."
(pmatch args
(() (puts (module-name (current-module))))
((,mod-name) (guard (list? mod-name))
(set-current-module (resolve-module mod-name)))
(,mod-name (set-current-module (resolve-module mod-name)))))
(define-meta-command (import repl . args)
"import [MODULE ...]
Import modules / List those imported."
(let ()
(define (use name)
(let ((mod (resolve-interface name)))
(if mod
(module-use! (current-module) mod)
(format #t "No such module: ~A~%" name))))
(if (null? args)
(for-each puts (map module-name (module-uses (current-module))))
(for-each use args))))
(define-meta-command (load repl file)
"load FILE
Load a file in the current module."
(load (->string file)))
(define-meta-command (reload repl . args)
"reload [MODULE]
Reload the given module, or the current module if none was given."
(pmatch args
(() (reload-module (current-module)))
((,mod-name) (guard (list? mod-name))
(reload-module (resolve-module mod-name)))
(,mod-name (reload-module (resolve-module mod-name)))))
(define-meta-command (binding repl)
"binding
List current bindings."
(module-for-each (lambda (k v) (format #t "~23A ~A\n" k v))
(current-module)))
(define-meta-command (in repl module command-or-expression . args)
"in MODULE COMMAND-OR-EXPRESSION
Evaluate an expression or command in the context of module."
(let ((m (resolve-module module #:ensure #f)))
(if m
(pmatch command-or-expression
(('unquote ,command) (guard (lookup-command command))
(save-module-excursion
(lambda ()
(set-current-module m)
(apply (command-procedure (lookup-command command)) repl args))))
(,expression
(guard (null? args))
(repl-print repl (eval expression m)))
(else
(format #t "Invalid arguments to `in': expected a single expression or a command.\n")))
(format #t "No such module: ~s\n" module))))
Language commands
(define-meta-command (language repl name)
"language LANGUAGE
Change languages."
(let ((lang (lookup-language name))
(cur (repl-language repl)))
(format #t "Happy hacking with ~a! To switch back, type `,L ~a'.\n"
(language-title lang) (language-name cur))
(current-language lang)
(set! (repl-language repl) lang)))
(define (load-image x)
(let ((thunk (load-thunk-from-memory x)))
(find-mapped-elf-image (program-code thunk))))
(define-meta-command (compile repl (form))
"compile EXP
Generate compiled code."
(let ((x (repl-compile repl (repl-parse repl form))))
(cond ((bytevector? x) (disassemble-image (load-image x)))
(else (repl-print repl x)))))
(define-meta-command (compile-file repl file . opts)
"compile-file FILE
Compile a file."
(compile-file (->string file) #:opts opts))
(define-meta-command (expand repl (form))
"expand EXP
Expand any macros in a form."
(let ((x (repl-expand repl (repl-parse repl form))))
(run-hook before-print-hook x)
(pp x)))
(define-meta-command (optimize repl (form))
"optimize EXP
Run the optimizer on a piece of code and print the result."
(let ((x (repl-optimize repl (repl-parse repl form))))
(run-hook before-print-hook x)
(pp x)))
(define-meta-command (disassemble repl (form))
"disassemble EXP
Disassemble a compiled procedure."
(let ((obj (repl-eval repl (repl-parse repl form))))
(cond
((program? obj)
(disassemble-program obj))
((bytevector? obj)
(disassemble-image (load-image obj)))
(else
(format #t
"Argument to ,disassemble not a procedure or a bytevector: ~a~%"
obj)))))
(define-meta-command (disassemble-file repl file)
"disassemble-file FILE
Disassemble a file."
(disassemble-file (->string file)))
(define-meta-command (time repl (form))
"time EXP
Time execution."
(let* ((gc-start (gc-run-time))
(real-start (get-internal-real-time))
(run-start (get-internal-run-time))
(result (repl-eval repl (repl-parse repl form)))
(run-end (get-internal-run-time))
(real-end (get-internal-real-time))
(gc-end (gc-run-time)))
(define (diff start end)
(/ (- end start) 1.0 internal-time-units-per-second))
(repl-print repl result)
(format #t ";; ~,6Fs real time, ~,6Fs run time. ~,6Fs spent in GC.\n"
(diff real-start real-end)
(diff run-start run-end)
(diff gc-start gc-end))
result))
(define-meta-command (profile repl (form) . opts)
"profile EXP
Profile execution."
FIXME opts
(apply statprof
(repl-prepare-eval-thunk repl (repl-parse repl form))
opts))
(define-meta-command (trace repl (form) . opts)
"trace EXP
Trace execution."
(apply call-with-trace
(repl-prepare-eval-thunk repl (repl-parse repl form))
(cons* #:width (terminal-width) opts)))
(define-syntax define-stack-command
(lambda (x)
(syntax-case x ()
((_ (name repl . args) docstring body body* ...)
#`(define-meta-command (name repl . args)
docstring
(let ((debug (repl-debug repl)))
(if debug
(letrec-syntax
((#,(datum->syntax #'repl 'frames)
(identifier-syntax (debug-frames debug)))
(#,(datum->syntax #'repl 'message)
(identifier-syntax (debug-error-message debug)))
(#,(datum->syntax #'repl 'index)
(identifier-syntax
(id (debug-index debug))
((set! id exp) (set! (debug-index debug) exp))))
(#,(datum->syntax #'repl 'cur)
(identifier-syntax
(vector-ref #,(datum->syntax #'repl 'frames)
#,(datum->syntax #'repl 'index)))))
body body* ...)
(format #t "Nothing to debug.~%"))))))))
(define-stack-command (backtrace repl #:optional count
#:key (width (terminal-width)) full?)
"backtrace [COUNT] [#:width W] [#:full? F]
Print a backtrace.
Print a backtrace of all stack frames, or innermost COUNT frames.
If COUNT is negative, the last COUNT frames will be shown."
(print-frames frames
#:count count
#:width width
#:full? full?))
(define-stack-command (up repl #:optional (count 1))
"up [COUNT]
Select a calling stack frame.
Select and print stack frames that called this one.
An argument says how many frames up to go."
(cond
((or (not (integer? count)) (<= count 0))
(format #t "Invalid argument to `up': expected a positive integer for COUNT.~%"))
((>= (+ count index) (vector-length frames))
(cond
((= index (1- (vector-length frames)))
(format #t "Already at outermost frame.\n"))
(else
(set! index (1- (vector-length frames)))
(print-frame cur #:index index))))
(else
(set! index (+ count index))
(print-frame cur #:index index))))
(define-stack-command (down repl #:optional (count 1))
"down [COUNT]
Select a called stack frame.
Select and print stack frames called by this one.
An argument says how many frames down to go."
(cond
((or (not (integer? count)) (<= count 0))
(format #t "Invalid argument to `down': expected a positive integer for COUNT.~%"))
((< (- index count) 0)
(cond
((zero? index)
(format #t "Already at innermost frame.\n"))
(else
(set! index 0)
(print-frame cur #:index index))))
(else
(set! index (- index count))
(print-frame cur #:index index))))
(define-stack-command (frame repl #:optional idx)
"frame [IDX]
Show a frame.
Show the selected frame.
With an argument, select a frame by index, then show it."
(cond
(idx
(cond
((or (not (integer? idx)) (< idx 0))
(format #t "Invalid argument to `frame': expected a non-negative integer for IDX.~%"))
((< idx (vector-length frames))
(set! index idx)
(print-frame cur #:index index))
(else
(format #t "No such frame.~%"))))
(else (print-frame cur #:index index))))
(define-stack-command (locals repl #:key (width (terminal-width)))
"locals
Show local variables.
Show locally-bound variables in the selected frame."
(print-locals cur #:width width))
(define-stack-command (error-message repl)
"error-message
Show error message.
Display the message associated with the error that started the current
debugging REPL."
(format #t "~a~%" (if (string? message) message "No error message")))
(define-meta-command (break repl (form))
"break PROCEDURE
Break on calls to PROCEDURE.
Starts a recursive prompt when PROCEDURE is called."
(let ((proc (repl-eval repl (repl-parse repl form))))
(if (not (procedure? proc))
(error "Not a procedure: ~a" proc)
(let ((idx (add-trap-at-procedure-call! proc)))
(format #t "Trap ~a: ~a.~%" idx (trap-name idx))))))
(define-meta-command (break-at-source repl file line)
"break-at-source FILE LINE
Break when control reaches the given source location.
Starts a recursive prompt when control reaches line LINE of file FILE.
Note that the given source location must be inside a procedure."
(let ((file (if (symbol? file) (symbol->string file) file)))
(let ((idx (add-trap-at-source-location! file line)))
(format #t "Trap ~a: ~a.~%" idx (trap-name idx)))))
(define (repl-pop-continuation-resumer repl msg)
(% (call-with-values
(lambda ()
(abort
(lambda (k)
(lambda (frame . values)
(k frame
(frame->stack-vector
(frame-previous frame))
values)))))
(lambda (from stack values)
(format #t "~a~%" msg)
(if (null? values)
(format #t "No return values.~%")
(begin
(format #t "Return values:~%")
(for-each (lambda (x) (repl-print repl x)) values)))
((module-ref (resolve-interface '(system repl repl)) 'start-repl)
#:debug (make-debug stack 0 msg))))))
(define-stack-command (finish repl)
"finish
Run until the current frame finishes.
Resume execution, breaking when the current frame finishes."
(let ((handler (repl-pop-continuation-resumer
repl (format #f "Return from ~a" cur))))
(add-ephemeral-trap-at-frame-finish! cur handler)
(throw 'quit)))
(define (repl-next-resumer msg)
(% (let ((stack (abort
(lambda (k)
(lambda (frame)
(k (frame->stack-vector frame)))))))
(format #t "~a~%" msg)
((module-ref (resolve-interface '(system repl repl)) 'start-repl)
#:debug (make-debug stack 0 msg)))))
(define-stack-command (step repl)
"step
Step until control reaches a different source location.
Step until control reaches a different source location."
(let ((msg (format #f "Step into ~a" cur)))
(add-ephemeral-stepping-trap! cur (repl-next-resumer msg)
#:into? #t #:instruction? #f)
(throw 'quit)))
(define-stack-command (step-instruction repl)
"step-instruction
Step until control reaches a different instruction.
Step until control reaches a different VM instruction."
(let ((msg (format #f "Step into ~a" cur)))
(add-ephemeral-stepping-trap! cur (repl-next-resumer msg)
#:into? #t #:instruction? #t)
(throw 'quit)))
(define-stack-command (next repl)
"next
Step until control reaches a different source location in the current frame.
Step until control reaches a different source location in the current frame."
(let ((msg (format #f "Step into ~a" cur)))
(add-ephemeral-stepping-trap! cur (repl-next-resumer msg)
#:into? #f #:instruction? #f)
(throw 'quit)))
(define-stack-command (next-instruction repl)
"next-instruction
Step until control reaches a different instruction in the current frame.
Step until control reaches a different VM instruction in the current frame."
(let ((msg (format #f "Step into ~a" cur)))
(add-ephemeral-stepping-trap! cur (repl-next-resumer msg)
#:into? #f #:instruction? #t)
(throw 'quit)))
(define-meta-command (tracepoint repl (form))
"tracepoint PROCEDURE
Add a tracepoint to PROCEDURE.
A tracepoint will print out the procedure and its arguments, when it is
called, and its return value(s) when it returns."
(let ((proc (repl-eval repl (repl-parse repl form))))
(if (not (procedure? proc))
(error "Not a procedure: ~a" proc)
(let ((idx (add-trace-at-procedure-call! proc)))
(format #t "Trap ~a: ~a.~%" idx (trap-name idx))))))
(define-meta-command (traps repl)
"traps
Show the set of currently attached traps.
Show the set of currently attached traps (breakpoints and tracepoints)."
(let ((traps (list-traps)))
(if (null? traps)
(format #t "No traps set.~%")
(for-each (lambda (idx)
(format #t " ~a: ~a~a~%"
idx (trap-name idx)
(if (trap-enabled? idx) "" " (disabled)")))
traps))))
(define-meta-command (delete repl idx)
"delete IDX
Delete a trap.
Delete a trap."
(if (not (integer? idx))
(error "expected a trap index (a non-negative integer)" idx)
(delete-trap! idx)))
(define-meta-command (disable repl idx)
"disable IDX
Disable a trap.
Disable a trap."
(if (not (integer? idx))
(error "expected a trap index (a non-negative integer)" idx)
(disable-trap! idx)))
(define-meta-command (enable repl idx)
"enable IDX
Enable a trap.
Enable a trap."
(if (not (integer? idx))
(error "expected a trap index (a non-negative integer)" idx)
(enable-trap! idx)))
(define-stack-command (registers repl)
"registers
Print registers.
Print the registers of the current frame."
(print-registers cur))
(define-meta-command (width repl #:optional x)
"width [X]
Set debug output width.
Set the number of screen columns in the output from `backtrace' and
`locals'."
(terminal-width x)
(format #t "Set screen width to ~a columns.~%" (terminal-width)))
(define-meta-command (inspect repl (form))
"inspect EXP
Inspect the result(s) of evaluating EXP."
(call-with-values (repl-prepare-eval-thunk repl (repl-parse repl form))
(lambda args
(for-each %inspect args))))
(define-meta-command (pretty-print repl (form))
"pretty-print EXP
Pretty-print the result(s) of evaluating EXP."
(call-with-values (repl-prepare-eval-thunk repl (repl-parse repl form))
(lambda args
(for-each
(lambda (x)
(run-hook before-print-hook x)
(pp x))
args))))
(define-meta-command (gc repl)
"gc
Garbage collection."
(gc))
(define-meta-command (statistics repl)
"statistics
Display statistics."
(let ((this-tms (times))
(this-gcs (gc-stats))
(last-tms (repl-tm-stats repl))
(last-gcs (repl-gc-stats repl)))
GC times
(let ((this-times (assq-ref this-gcs 'gc-times))
(last-times (assq-ref last-gcs 'gc-times)))
(display-diff-stat "GC times:" #t this-times last-times "times")
(newline))
(let ((this-heap (assq-ref this-gcs 'heap-size))
(this-free (assq-ref this-gcs 'heap-free-size)))
(display-stat-title "Memory size:" "current" "limit")
(display-stat "heap" #f (- this-heap this-free) this-heap "bytes")
(newline))
(let ((this-alloc (assq-ref this-gcs 'heap-total-allocated))
(last-alloc (assq-ref last-gcs 'heap-total-allocated)))
(display-stat-title "Bytes allocated:" "diff" "total")
(display-diff-stat "allocated" #f this-alloc last-alloc "bytes")
(newline))
GC time taken
(let ((this-total (assq-ref this-gcs 'gc-time-taken))
(last-total (assq-ref last-gcs 'gc-time-taken)))
(display-stat-title "GC time taken:" "diff" "total")
(display-time-stat "total" this-total last-total)
(newline))
(let ((this-utime (tms:utime this-tms))
(last-utime (tms:utime last-tms))
(this-stime (tms:stime this-tms))
(last-stime (tms:stime last-tms))
(this-cutime (tms:cutime this-tms))
(last-cutime (tms:cutime last-tms))
(this-cstime (tms:cstime this-tms))
(last-cstime (tms:cstime last-tms)))
(display-stat-title "Process time spent:" "diff" "total")
(display-time-stat "user" this-utime last-utime)
(display-time-stat "system" this-stime last-stime)
(display-time-stat "child user" this-cutime last-cutime)
(display-time-stat "child system" this-cstime last-cstime)
(newline))
(set! (repl-tm-stats repl) this-tms)
(set! (repl-gc-stats repl) this-gcs)))
(define (display-stat title flag field1 field2 unit)
(let ((fmt (format #f "~~20~AA ~~10@A /~~10@A ~~A~~%" (if flag "" "@"))))
(format #t fmt title field1 field2 unit)))
(define (display-stat-title title field1 field2)
(display-stat title #t field1 field2 ""))
(define (display-diff-stat title flag this last unit)
(display-stat title flag (- this last) this unit))
(define (display-time-stat title this last)
(define (conv num)
(format #f "~10,2F" (exact->inexact (/ num internal-time-units-per-second))))
(display-stat title #f (conv (- this last)) (conv this) "s"))
(define (display-mips-stat title this-time this-clock last-time last-clock)
(define (mips time clock)
(if (= time 0) "----" (format #f "~10,2F" (/ clock time 1000000.0))))
(display-stat title #f
(mips (- this-time last-time) (- this-clock last-clock))
(mips this-time this-clock) "mips"))
|
d9f7234a554f8eee030cfab059c35890a3ad28ba30cf51d7e1761f0f58ce8c56 | JacquesCarette/Drasil | Derivations.hs | module Drasil.GamePhysics.Derivations where
import Language.Drasil (eqSymb, ModelExprC(..), ExprC(..), ModelExpr, LiteralC(..))
import Drasil.GamePhysics.Unitals (timeT, time_1, time_2, velo_1, velo_2)
import qualified Data.Drasil.Quantities.Physics as QP (force, time, velocity,
acceleration, chgInVelocity)
import qualified Data.Drasil.Quantities.PhysicalProperties as QPP (mass)
impulseVDerivEqn1 :: ModelExpr
impulseVDerivEqn1 = sy QP.force $= sy QPP.mass `mulRe` sy QP.acceleration
$= sy QPP.mass `mulRe` deriv (sy QP.velocity) QP.time
impulseVDerivEqn2 :: ModelExpr -- TODO: Why does defint take a symbol as an argument? Shouldn't it be a UID?
impulseVDerivEqn2 = defint (eqSymb timeT) (sy time_1) (sy time_2) (sy QP.force) $=
sy QPP.mass `mulRe` defint (eqSymb QP.velocity) (sy velo_1) (sy velo_2) (exactDbl 1)
impulseVDerivEqn3 :: ModelExpr
impulseVDerivEqn3 = defint (eqSymb timeT) (sy time_1) (sy time_2) (sy QP.force)
$= (sy QPP.mass `mulRe` sy velo_2) $- (sy QPP.mass `mulRe` sy velo_1)
$= sy QPP.mass `mulRe` sy QP.chgInVelocity
impulseVDerivEqns :: [ModelExpr]
impulseVDerivEqns = [impulseVDerivEqn1, impulseVDerivEqn2, impulseVDerivEqn3]
| null | https://raw.githubusercontent.com/JacquesCarette/Drasil/d9d9e7ac87131ccaae889029481cd34e0c0ad773/code/drasil-example/gamephysics/lib/Drasil/GamePhysics/Derivations.hs | haskell | TODO: Why does defint take a symbol as an argument? Shouldn't it be a UID? | module Drasil.GamePhysics.Derivations where
import Language.Drasil (eqSymb, ModelExprC(..), ExprC(..), ModelExpr, LiteralC(..))
import Drasil.GamePhysics.Unitals (timeT, time_1, time_2, velo_1, velo_2)
import qualified Data.Drasil.Quantities.Physics as QP (force, time, velocity,
acceleration, chgInVelocity)
import qualified Data.Drasil.Quantities.PhysicalProperties as QPP (mass)
impulseVDerivEqn1 :: ModelExpr
impulseVDerivEqn1 = sy QP.force $= sy QPP.mass `mulRe` sy QP.acceleration
$= sy QPP.mass `mulRe` deriv (sy QP.velocity) QP.time
impulseVDerivEqn2 = defint (eqSymb timeT) (sy time_1) (sy time_2) (sy QP.force) $=
sy QPP.mass `mulRe` defint (eqSymb QP.velocity) (sy velo_1) (sy velo_2) (exactDbl 1)
impulseVDerivEqn3 :: ModelExpr
impulseVDerivEqn3 = defint (eqSymb timeT) (sy time_1) (sy time_2) (sy QP.force)
$= (sy QPP.mass `mulRe` sy velo_2) $- (sy QPP.mass `mulRe` sy velo_1)
$= sy QPP.mass `mulRe` sy QP.chgInVelocity
impulseVDerivEqns :: [ModelExpr]
impulseVDerivEqns = [impulseVDerivEqn1, impulseVDerivEqn2, impulseVDerivEqn3]
|
2243ce524c0e91bdb5c8a00e2e2b2e5a1544bf13bed41a2d161904eb819b178e | hasktorch/hasktorch | StdArray.hs |
# LANGUAGE DataKinds #
# LANGUAGE PolyKinds #
# LANGUAGE TemplateHaskell #
# LANGUAGE QuasiQuotes #
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TypeFamilies #
# LANGUAGE FlexibleInstances #
module Torch.Internal.Unmanaged.Type.StdArray where
import qualified Language.C.Inline.Cpp as C
import qualified Language.C.Inline.Cpp.Unsafe as C
import qualified Language.C.Inline.Context as C
import qualified Language.C.Types as C
import qualified Data.Map as Map
import Foreign.C.String
import Foreign.C.Types
import Foreign
import Torch.Internal.Type
import Torch.Internal.Class
C.context $ C.cppCtx <> mempty { C.ctxTypesTable = typeTable }
C.include "<array>"
newStdArrayBool2
:: IO (Ptr (StdArray '(CBool,2)))
newStdArrayBool2 =
[C.throwBlock| std::array<bool,2>* { return new std::array<bool,2>(
);
}|]
newStdArrayBool2_bb
:: CBool
-> CBool
-> IO (Ptr (StdArray '(CBool,2)))
newStdArrayBool2_bb b0 b1 =
[C.throwBlock| std::array<bool,2>* { return new std::array<bool,2>({$(bool b0),$(bool b1)}); }|]
instance CppTuple2 (Ptr (StdArray '(CBool,2))) where
type A (Ptr (StdArray '(CBool,2))) = CBool
type B (Ptr (StdArray '(CBool,2))) = CBool
get0 v = [C.throwBlock| bool { return std::get<0>(*$(std::array<bool,2>* v));}|]
get1 v = [C.throwBlock| bool { return std::get<1>(*$(std::array<bool,2>* v));}|]
newStdArrayBool3
:: IO (Ptr (StdArray '(CBool,3)))
newStdArrayBool3 =
[C.throwBlock| std::array<bool,3>* { return new std::array<bool,3>(
);
}|]
newStdArrayBool3_bbb
:: CBool
-> CBool
-> CBool
-> IO (Ptr (StdArray '(CBool,3)))
newStdArrayBool3_bbb b0 b1 b2 =
[C.throwBlock| std::array<bool,3>* { return new std::array<bool,3>({$(bool b0),$(bool b1),$(bool b2)}); }|]
instance CppTuple2 (Ptr (StdArray '(CBool,3))) where
type A (Ptr (StdArray '(CBool,3))) = CBool
type B (Ptr (StdArray '(CBool,3))) = CBool
get0 v = [C.throwBlock| bool { return std::get<0>(*$(std::array<bool,3>* v));}|]
get1 v = [C.throwBlock| bool { return std::get<1>(*$(std::array<bool,3>* v));}|]
instance CppTuple3 (Ptr (StdArray '(CBool,3))) where
type C (Ptr (StdArray '(CBool,3))) = CBool
get2 v = [C.throwBlock| bool { return std::get<2>(*$(std::array<bool,3>* v));}|]
newStdArrayBool4
:: IO (Ptr (StdArray '(CBool,4)))
newStdArrayBool4 =
[C.throwBlock| std::array<bool,4>* { return new std::array<bool,4>(
);
}|]
newStdArrayBool4_bbbb
:: CBool
-> CBool
-> CBool
-> CBool
-> IO (Ptr (StdArray '(CBool,4)))
newStdArrayBool4_bbbb b0 b1 b2 b3 =
[C.throwBlock| std::array<bool,4>* { return new std::array<bool,4>({$(bool b0),$(bool b1),$(bool b2),$(bool b3)}); }|]
instance CppTuple2 (Ptr (StdArray '(CBool,4))) where
type A (Ptr (StdArray '(CBool,4))) = CBool
type B (Ptr (StdArray '(CBool,4))) = CBool
get0 v = [C.throwBlock| bool { return std::get<0>(*$(std::array<bool,4>* v));}|]
get1 v = [C.throwBlock| bool { return std::get<1>(*$(std::array<bool,4>* v));}|]
instance CppTuple3 (Ptr (StdArray '(CBool,4))) where
type C (Ptr (StdArray '(CBool,4))) = CBool
get2 v = [C.throwBlock| bool { return std::get<2>(*$(std::array<bool,4>* v));}|]
instance CppTuple4 (Ptr (StdArray '(CBool,4))) where
type D (Ptr (StdArray '(CBool,4))) = CBool
get3 v = [C.throwBlock| bool { return std::get<3>(*$(std::array<bool,4>* v));}|]
| null | https://raw.githubusercontent.com/hasktorch/hasktorch/6233c173e1dd9fd7218fd13b104da15fc457f67e/libtorch-ffi/src/Torch/Internal/Unmanaged/Type/StdArray.hs | haskell | # LANGUAGE OverloadedStrings # |
# LANGUAGE DataKinds #
# LANGUAGE PolyKinds #
# LANGUAGE TemplateHaskell #
# LANGUAGE QuasiQuotes #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE FlexibleInstances #
module Torch.Internal.Unmanaged.Type.StdArray where
import qualified Language.C.Inline.Cpp as C
import qualified Language.C.Inline.Cpp.Unsafe as C
import qualified Language.C.Inline.Context as C
import qualified Language.C.Types as C
import qualified Data.Map as Map
import Foreign.C.String
import Foreign.C.Types
import Foreign
import Torch.Internal.Type
import Torch.Internal.Class
C.context $ C.cppCtx <> mempty { C.ctxTypesTable = typeTable }
C.include "<array>"
newStdArrayBool2
:: IO (Ptr (StdArray '(CBool,2)))
newStdArrayBool2 =
[C.throwBlock| std::array<bool,2>* { return new std::array<bool,2>(
);
}|]
newStdArrayBool2_bb
:: CBool
-> CBool
-> IO (Ptr (StdArray '(CBool,2)))
newStdArrayBool2_bb b0 b1 =
[C.throwBlock| std::array<bool,2>* { return new std::array<bool,2>({$(bool b0),$(bool b1)}); }|]
instance CppTuple2 (Ptr (StdArray '(CBool,2))) where
type A (Ptr (StdArray '(CBool,2))) = CBool
type B (Ptr (StdArray '(CBool,2))) = CBool
get0 v = [C.throwBlock| bool { return std::get<0>(*$(std::array<bool,2>* v));}|]
get1 v = [C.throwBlock| bool { return std::get<1>(*$(std::array<bool,2>* v));}|]
newStdArrayBool3
:: IO (Ptr (StdArray '(CBool,3)))
newStdArrayBool3 =
[C.throwBlock| std::array<bool,3>* { return new std::array<bool,3>(
);
}|]
newStdArrayBool3_bbb
:: CBool
-> CBool
-> CBool
-> IO (Ptr (StdArray '(CBool,3)))
newStdArrayBool3_bbb b0 b1 b2 =
[C.throwBlock| std::array<bool,3>* { return new std::array<bool,3>({$(bool b0),$(bool b1),$(bool b2)}); }|]
instance CppTuple2 (Ptr (StdArray '(CBool,3))) where
type A (Ptr (StdArray '(CBool,3))) = CBool
type B (Ptr (StdArray '(CBool,3))) = CBool
get0 v = [C.throwBlock| bool { return std::get<0>(*$(std::array<bool,3>* v));}|]
get1 v = [C.throwBlock| bool { return std::get<1>(*$(std::array<bool,3>* v));}|]
instance CppTuple3 (Ptr (StdArray '(CBool,3))) where
type C (Ptr (StdArray '(CBool,3))) = CBool
get2 v = [C.throwBlock| bool { return std::get<2>(*$(std::array<bool,3>* v));}|]
newStdArrayBool4
:: IO (Ptr (StdArray '(CBool,4)))
newStdArrayBool4 =
[C.throwBlock| std::array<bool,4>* { return new std::array<bool,4>(
);
}|]
newStdArrayBool4_bbbb
:: CBool
-> CBool
-> CBool
-> CBool
-> IO (Ptr (StdArray '(CBool,4)))
newStdArrayBool4_bbbb b0 b1 b2 b3 =
[C.throwBlock| std::array<bool,4>* { return new std::array<bool,4>({$(bool b0),$(bool b1),$(bool b2),$(bool b3)}); }|]
instance CppTuple2 (Ptr (StdArray '(CBool,4))) where
type A (Ptr (StdArray '(CBool,4))) = CBool
type B (Ptr (StdArray '(CBool,4))) = CBool
get0 v = [C.throwBlock| bool { return std::get<0>(*$(std::array<bool,4>* v));}|]
get1 v = [C.throwBlock| bool { return std::get<1>(*$(std::array<bool,4>* v));}|]
instance CppTuple3 (Ptr (StdArray '(CBool,4))) where
type C (Ptr (StdArray '(CBool,4))) = CBool
get2 v = [C.throwBlock| bool { return std::get<2>(*$(std::array<bool,4>* v));}|]
instance CppTuple4 (Ptr (StdArray '(CBool,4))) where
type D (Ptr (StdArray '(CBool,4))) = CBool
get3 v = [C.throwBlock| bool { return std::get<3>(*$(std::array<bool,4>* v));}|]
|
1438627d122dde0c4b20f0276a9cc635e9eedf284bd63b216d67035b635650d3 | nmunro/mtg-api | subtypes.lisp | (defpackage mtg-api/subtypes
(:use :cl)
(:export #:fetch
#:make-subtypes-api))
(in-package :mtg-api/subtypes)
(defclass subtypes-api-v1 (mtg-api/base:api-v1)
((resource :initarg :formats :initform "subtypes" :reader resource))
(:documentation "Represents a list of subtypes using V1 of the API"))
(defmethod print-object ((object subtypes-api-v1) stream)
(print-unreadable-object (object stream)
(format stream "~A" (resource object))))
(defclass subtypes-api (subtypes-api-v1)
()
(:documentation "A convenience class to represent the current version of the API"))
(defun make-subtypes-api ()
(make-instance 'subtypes-api))
(defmethod fetch ((obj subtypes-api))
(let ((url (format nil "~A/~A" (mtg-api/base:url obj) (resource obj))))
(loop for format in (gethash (resource obj) (jonathan:parse (dexador:get url) :as :hash-table)) collect format)))
| null | https://raw.githubusercontent.com/nmunro/mtg-api/f796794cde8a483278f22d7ac7e6c46302685ec1/src/types/subtypes.lisp | lisp | (defpackage mtg-api/subtypes
(:use :cl)
(:export #:fetch
#:make-subtypes-api))
(in-package :mtg-api/subtypes)
(defclass subtypes-api-v1 (mtg-api/base:api-v1)
((resource :initarg :formats :initform "subtypes" :reader resource))
(:documentation "Represents a list of subtypes using V1 of the API"))
(defmethod print-object ((object subtypes-api-v1) stream)
(print-unreadable-object (object stream)
(format stream "~A" (resource object))))
(defclass subtypes-api (subtypes-api-v1)
()
(:documentation "A convenience class to represent the current version of the API"))
(defun make-subtypes-api ()
(make-instance 'subtypes-api))
(defmethod fetch ((obj subtypes-api))
(let ((url (format nil "~A/~A" (mtg-api/base:url obj) (resource obj))))
(loop for format in (gethash (resource obj) (jonathan:parse (dexador:get url) :as :hash-table)) collect format)))
| |
243d45f7500effcf7a4b4cc84156b62949ee729d4a692d2091d2d89dbdeb7b3e | charlieg/Sparser | status-printer.lisp | ;;; -*- Mode:LISP; Syntax:Common-Lisp; Package:SPARSER -*-
copyright ( c ) 1995 -- all rights reserved
;;;
;;; File: "status printer"
Module : " : titles : "
version : February 1995
initiated 2/28/95
(in-package :sparser)
(define-special-printing-routine-for-category qualified-title
:full ((format stream "#<qualified-title ~A,~A>"
(string/title (value-of 'title obj))
(string/title-qualifier (value-of 'qualifier obj))))
:short ((format stream "#<~A,~A>"
(string/title (value-of 'title obj))
(string/title-qualifier (value-of 'qualifier obj)))))
(defun string/qualified-title (qt)
(concatenate 'string
(string/title (value-of 'title qt))
", "
(string/title-qualifier (value-of 'qualifier qt))))
| null | https://raw.githubusercontent.com/charlieg/Sparser/b9bb7d01d2e40f783f3214fc104062db3d15e608/Sparser/code/s/grammar/model/core/titles/status-printer.lisp | lisp | -*- Mode:LISP; Syntax:Common-Lisp; Package:SPARSER -*-
File: "status printer" | copyright ( c ) 1995 -- all rights reserved
Module : " : titles : "
version : February 1995
initiated 2/28/95
(in-package :sparser)
(define-special-printing-routine-for-category qualified-title
:full ((format stream "#<qualified-title ~A,~A>"
(string/title (value-of 'title obj))
(string/title-qualifier (value-of 'qualifier obj))))
:short ((format stream "#<~A,~A>"
(string/title (value-of 'title obj))
(string/title-qualifier (value-of 'qualifier obj)))))
(defun string/qualified-title (qt)
(concatenate 'string
(string/title (value-of 'title qt))
", "
(string/title-qualifier (value-of 'qualifier qt))))
|
8be1af35fab408482ad88c94cafa21715be02e9170de7d4d355306ceff1e04bd | well-typed-lightbulbs/ocaml-esp32 | morematch.ml | (* TEST
include testing
*)
(**************************************************************)
(* This suite tests the pattern-matching compiler *)
(* it should just compile and run. *)
(* While compiling the following messages are normal: *)
(**************************************************************)
let test msg f arg r =
if f arg <> r then begin
prerr_endline msg ;
failwith "Malaise"
end
;;
type t = A | B | C | D | E | F
;;
let f x = match x with
| A | B | C -> 1
| D | E -> 2
| F -> 3;;
test "un" f C 1 ;
test "un" f D 2 ;
test "un" f F 3 ; ()
;;
let g x = match x with
1 -> 1
| 2 -> 2
| 3 -> 3
| 4 | 5 -> 4
| 6 -> 5
| 7 | 8 -> 6
| 9 -> 7
| _ -> assert false
;;
test "deux" g 5 4 ;
test "deux" g 6 5 ;
test "deux" g 9 7 ; ()
;;
let g x = match x with
1 -> 1
| 2 -> 2
| 3 -> 3
| 4 | 5 -> 4
| 6 -> 5
| 7 | 8 -> 6
| 9 -> 7
| _ -> 8;;
test "trois" g 10 8
;;
let g x= match x with
1 -> 1
| 2 -> 2
| 3 -> 3
| 4 | 5 -> 4
| 6 -> 5
| 4|5|7 -> 100
| 7 | 8 -> 6
| 9 -> 7
| _ -> 8
;;
test "quatre" g 4 4 ;
test "quatre" g 7 100 ; ()
;;
let h x =
match x with
(1,1) -> 1
| (2|3), 1 -> 2
| 2,(2|3) -> 3
| (4,4) -> 5
| _ -> 100
;;
test "cinq" h (2,2) 3 ;
test "cinq" h (2,1) 2 ;
test "cinq" h (2,4) 100 ; ()
;;
idem hh ( 2,5 )
let hh x = match x with
| 1,1 -> 1
| 2,1 -> 2
| (2|3),(1|2|3|4) -> 3
| 2,5 -> 4
| (4,4) -> 5
| _ -> 100
;;
let hhh x = match x with
| 1,1 -> 1
| (2|3),1 -> 2
| 2,2 -> 3
| _ -> 100
;;
let h x =
match x with
(1,1) -> 1
| 3,1 -> 2
| 2,(2|3) -> 3
| (4,4) -> 5
| _ -> 100
;;
let h x = match x with
1 -> 1
| 2|3 -> 2
| 4 -> 4
| 5 -> 5
| 6|7 -> 6
| 8 -> 8
| _ -> 100
;;
let f x = match x with
| ((1|2),(3|4))|((3|4),(1|2)) -> 1
| (3,(5|6)) -> 2
| _ -> 3
;;
test "six" f (1,3) 1 ;
test "six" f (3,2) 1 ;
test "six" f (3,5) 2 ;
test "six" f (3,7) 3 ; ()
;;
type tt = {a : bool list ; b : bool}
let f = function
| {a=([]|[true])} -> 1
| {a=false::_}|{b=(true|false)} -> 2
;;
test "sept" f {a=[] ; b = true} 1 ;
test "sept" f {a=[true] ; b = false} 1 ;
test "sept" f {a=[false ; true] ; b = true} 2 ;
test "sept" f {a=[false] ; b = false} 2 ; ()
;;
let f = function
| (([]|[true]),_) -> 1
| (false::_,_)|(_,(true|false)) -> 2
;;
test "huit" f ([],true) 1 ;
test "huit" f ([true],false) 1 ;
test "huit" f ([false ; true], true) 2 ;
test "huit" f ([false], false) 2 ; ()
;;
let split_cases = function
| `Nil | `Cons _ as x -> `A x
| `Snoc _ as x -> `B x
;;
test "oubli" split_cases `Nil (`A `Nil);
test "oubli" split_cases (`Cons 1) (`A (`Cons 1));
test "oubli" split_cases (`Snoc 1) (`B (`Snoc 1)) ; ()
;;
type t1 = A of int | B of int
let f1 = function
| (A x | B x) -> x
;;
test "neuf" f1 (A 1) 1 ;
test "neuf" f1 (B 1) 1 ;
;;
type coucou = A of int | B of int * int | C
;;
let g = function
| (A x | B (_,x)) -> x
| C -> 0
;;
test "dix" g (A 1) 1 ;
test "dix" g (B (1,2)) 2 ;
;;
let h = function
| ([x]|[1 ; x ]|[1 ; 2 ; x]) -> x
| _ -> 0
;;
test "encore" h [1] 1 ;
test "encore" h [1;2] 2 ;
test "encore" h [1;2;3] 3 ;
test "encore" h [0 ; 0] 0 ; ()
;;
let f = function
| (x,(0 as y)) | (y,x) -> y-x
;;
test "foo1" f (1,0) (-1);
test "foo1" f (1,2) (-1)
;;
let f = function (([]|[_]) as x)|(_::([] as x))|(_::_::x) -> x
;;
test "zob" f [] [] ;
test "zob" f [1] [1] ;
test "zob" f [1;2;3] [3]
;;
type zob = A | B | C | D of zob * int | E of zob * zob
let rec f = function
| (A | B | C) -> A
| D (x,i) -> D (f x,i)
| E (x,_) -> D (f x,0)
;;
test "fin" f B A ;
test "fin" f (D (C,1)) (D (A,1)) ;
test "fin" f (E (C,A)) (D (A,0)) ; ()
;;
type length =
Char of int | Pixel of int | Percent of int | No of string | Default
let length = function
| Char n -> n | Pixel n -> n
| _ -> 0
;;
test "length" length (Char 10) 10 ;
test "length" length (Pixel 20) 20 ;
test "length" length Default 0 ;
test "length" length (Percent 100) 0 ; ()
;;
let length2 = function
| Char n -> n | Percent n -> n
| _ -> 0
;;
test "length2" length2 (Char 10) 10 ;
test "length2" length2 (Pixel 20) 0 ;
test "length2" length2 Default 0 ;
test "length2" length2(Percent 100) 100 ; ()
;;
let length3 = function
| Char _ | No _ -> true
| _ -> false
;;
test "length3" length3 (Char 10) true ;
test "length3" length3 (No "") true ;
test "length3" length3 (Pixel 20) false ;
test "length3" length3 Default false ;
test "length3" length3(Percent 100) false ; ()
;;
type hevea = A | B | C
let h x = match x with
| A -> 1
| B|C -> 2
;;
test "hevea" h A 1 ;
test "hevea" h B 2 ;
test "hevea" h B 2 ; ()
;;
type lambda =
Lvar of int
| Lconst of int
| Lapply of lambda * lambda list
| Lfunction of bool * int list * lambda
| Llet of bool * int * lambda * lambda
| Lletrec of (int * lambda) list * lambda
| Lprim of string * lambda list
| Lswitch of lambda * lambda_switch
| Lstaticfail
| Lcatch of lambda * lambda
| Lstaticraise of int * lambda list
| Lstaticcatch of lambda * (int * int list) * lambda
| Ltrywith of lambda * int * lambda
| Lifthenelse of lambda * lambda * lambda
| Lsequence of lambda * lambda
| Lwhile of lambda * lambda
| Lfor of int * lambda * lambda * bool * lambda
| Lassign of int * lambda
| Lsend of lambda * lambda * lambda list
| Levent of lambda * lambda_event
| Lifused of int * lambda
and lambda_switch =
{ sw_numconsts: int; (* Number of integer cases *)
Integer cases
sw_numblocks: int; (* Number of tag block cases *)
sw_blocks: (int * lambda) list; (* Tag block cases *)
sw_checked: bool ; (* True if bound checks needed *)
sw_nofail: bool} (* True if should not fail *)
and lambda_event =
{ lev_loc: int;
lev_kind: bool ;
lev_repr: int ref option;
lev_env: int list }
let rec approx_present v l = true
let rec lower_bind v arg lam = match lam with
| Lifthenelse (cond, ifso, ifnot) -> 1
| Lswitch (ls,({sw_consts=[i,act] ; sw_blocks = []} as _sw))
when not (approx_present v ls) -> 2
| Lswitch (ls,({sw_consts=[] ; sw_blocks = [i,act]} as _sw))
when not (approx_present v ls) -> 3
| Llet (true , vv, lv, l) -> 4
| _ -> 5
;;
test "lower_bind" (lower_bind 0 0) (Llet (true,0, Lvar 1, Lvar 2)) 4 ;
test "lower_bind" (lower_bind 0 0) (Lvar 0) 5 ;
test "lower_bind" (lower_bind 0 0) (Lifthenelse (Lvar 0, Lvar 1, Lvar 2)) 1
;;
type field_kind =
Fvar of field_kind option ref
| Fpresent
| Fabsent
let unify_kind (k1, k2) = match k1, k2 with
(Fvar r, (Fvar _ | Fpresent)) -> 1
| (Fpresent, Fvar r) -> 2
| (Fpresent, Fpresent) -> 3
| _ -> 4
let r = ref (Some Fpresent)
;;
test "unify" unify_kind (Fvar r, Fpresent) 1 ;
test "unify" unify_kind (Fvar r, Fvar r) 1 ;
test "unify" unify_kind (Fvar r, Fabsent) 4 ;
test "unify" unify_kind (Fpresent, Fvar r) 2 ;
test "unify" unify_kind (Fpresent, Fpresent) 3 ;
test "unify" unify_kind (Fabsent, Fpresent) 4 ; ()
;;
type youyou = A | B | C | D of youyou
let foo (k1, k2) = match k1,k2 with
| D _, (A|D _) -> 1
| (A|B),D _ -> 2
| C,_ -> 3
| _, (A|B|C) -> 4
;;
test "foo2" foo (D A,A) 1 ;
test "foo2" foo (D A,B) 4 ;
test "foo2" foo (A,A) 4 ; ()
;;
type yaya = A | B
;;
let yaya = function
| A,_,_ -> 1
| _,A,_ -> 2
| B,B,_ -> 3
| A,_,(100|103) -> 5
;;
test "yaya" yaya (A,A,0) 1 ;
test "yaya" yaya (B,A,0) 2 ;
test "yaya" yaya (B,B,100) 3 ; ()
;;
let yoyo = function
| [],_,_ -> 1
| _,[],_ -> 2
| _::_,_::_,_ -> 3
| [],_,(100|103|104) -> 5
| [],_,(100|103) -> 6
| [],_,(1000|1001|1002|20000) -> 7
;;
test "yoyo" yoyo ([],[],0) 1 ;
test "yoyo" yoyo ([1],[],0) 2 ;
test "yoyo" yoyo ([1],[1],100) 3 ; ()
;;
let youyou = function
| (100|103|104) -> 1
| (100|103|101) -> 2
| (1000|1001|1002|20000) -> 3
| _ -> -1
;;
test "youyou" youyou 100 1 ;
test "youyou" youyou 101 2 ;
test "youyou" youyou 1000 3
;;
type autre =
| C | D | E of autre | F of autre * autre | H of autre | I | J | K of string
let rec autre = function
| C,_,_ -> 1
| _,C,_ -> 2
| D,D,_ -> 3
| (D|F (_,_)|H _|K _),_,_ -> 4
| (_, (D|I|E _|F (_, _)|H _|K _), _) -> 8
| (J,J,((C|D) as x |E x|F (_,x))) | (J,_,((C|J) as x)) -> autre (x,x,x)
| (J, J, (I|H _|K _)) -> 9
| I,_,_ -> 6
| E _,_,_ -> 7
;;
test "autre" autre (J,J,F (D,D)) 3 ;
test "autre" autre (J,J,D) 3 ;
test "autre" autre (J,J,I) 9 ;
test "autre" autre (H I,I,I) 4 ;
test "autre" autre (J,J,H I) 9 ; ()
;;
type youpi = YA | YB | YC
and hola = X | Y | Z | T of hola | U of hola | V of hola
let xyz = function
| YA,_,_ -> 1
| _,YA,_ -> 2
| YB,YB,_ -> 3
| ((YB|YC), (YB|YC), (X|Y|Z|V _|T _)) -> 6
| _,_,(X|U _) -> 8
| _,_,Y -> 5
;;
test "xyz" xyz (YC,YC,X) 6 ;
test "xyz" xyz (YC,YB,U X) 8 ;
test "xyz" xyz (YB,YC,X) 6 ; ()
;;
(* This test is for the compiler itself *)
let eq (x,y) = x=y
;;
test "eq" eq ("coucou", "coucou") true ; ()
;;
(* Test guards, non trivial *)
let is_none = function
| None -> true
| _ -> false
let guard x = match x with
| (Some _, _) when is_none (snd x) -> 1
| (Some (pc, _), Some pc') when pc = pc' -> 2
| _ -> 3
;;
test "guard" guard (Some (1,1),None) 1 ;
test "guard" guard (Some (1,1),Some 1) 2 ;
test "guard" guard (Some (2,1),Some 1) 3 ; ()
;;
let orstring = function
| ("A"|"B"|"C") -> 2
| "D" -> 3
| _ -> 4
;;
test "orstring" orstring "A" 2 ;
test "orstring" orstring "B" 2 ;
test "orstring" orstring "C" 2 ;
test "orstring" orstring "D" 3 ;
test "orstring" orstring "E" 4 ; ()
;;
type var_t = [`Variant of [ `Some of string | `None | `Foo] ]
let crash (pat:var_t) =
match pat with
| `Variant (`Some tag) -> tag
| `Variant (`None) -> "none"
| _ -> "foo"
;;
test "crash" crash (`Variant `None) "none" ;
test "crash" crash (`Variant (`Some "coucou")) "coucou" ;
test "crash" crash (`Variant (`Foo)) "foo" ; ()
;;
let flatguard c =
let x,y = c in
match x,y with
| (1,2)|(2,3) when y=2 -> 1
| (1,_)|(_,3) -> 2
| _ -> 3
;;
test "flatguard" flatguard (1,2) 1 ;
test "flatguard" flatguard (1,3) 2 ;
test "flatguard" flatguard (2,3) 2 ;
test "flatguard" flatguard (2,4) 3 ; ()
;;
's bugs
type f =
| ABSENT
| FILE
| SYMLINK
| DIRECTORY
type r =
| Unchanged
| Deleted
| Modified
| PropsChanged
| Created
let replicaContent2shortString rc =
let (typ, status) = rc in
match typ, status with
_, Unchanged -> " "
| ABSENT, Deleted -> "deleted "
| FILE, Created -> "new file"
| FILE, Modified -> "changed "
| FILE, PropsChanged -> "props "
| SYMLINK, Created -> "new link"
| SYMLINK, Modified -> "chgd lnk"
| DIRECTORY, Created -> "new dir "
| DIRECTORY, Modified -> "chgd dir"
| DIRECTORY, PropsChanged -> "props "
(* Cases that can't happen... *)
| ABSENT, (Created | Modified | PropsChanged)
| SYMLINK, PropsChanged
| (FILE|SYMLINK|DIRECTORY), Deleted
-> "assert false"
;;
test "jerome_constr"
replicaContent2shortString (ABSENT, Unchanged) " " ;
test "jerome_constr"
replicaContent2shortString (ABSENT, Deleted) "deleted " ;
test "jerome_constr"
replicaContent2shortString (FILE, Modified) "changed " ;
test "jerome_constr"
replicaContent2shortString (DIRECTORY, PropsChanged) "props " ;
test "jerome_constr"
replicaContent2shortString (FILE, Deleted) "assert false" ;
test "jerome_constr"
replicaContent2shortString (SYMLINK, Deleted) "assert false" ;
test "jerome_constr"
replicaContent2shortString (SYMLINK, PropsChanged) "assert false" ;
test "jerome_constr"
replicaContent2shortString (DIRECTORY, Deleted) "assert false" ;
test "jerome_constr"
replicaContent2shortString (ABSENT, Created) "assert false" ;
test "jerome_constr"
replicaContent2shortString (ABSENT, Modified) "assert false" ;
test "jerome_constr"
replicaContent2shortString (ABSENT, PropsChanged) "assert false" ;
;;
let replicaContent2shortString rc =
let (typ, status) = rc in
match typ, status with
_, `Unchanged -> " "
| `ABSENT, `Deleted -> "deleted "
| `FILE, `Created -> "new file"
| `FILE, `Modified -> "changed "
| `FILE, `PropsChanged -> "props "
| `SYMLINK, `Created -> "new link"
| `SYMLINK, `Modified -> "chgd lnk"
| `DIRECTORY, `Created -> "new dir "
| `DIRECTORY, `Modified -> "chgd dir"
| `DIRECTORY, `PropsChanged -> "props "
(* Cases that can't happen... *)
| `ABSENT, (`Created | `Modified | `PropsChanged)
| `SYMLINK, `PropsChanged
| (`FILE|`SYMLINK|`DIRECTORY), `Deleted
-> "assert false"
;;
test "jerome_variant"
replicaContent2shortString (`ABSENT, `Unchanged) " " ;
test "jerome_variant"
replicaContent2shortString (`ABSENT, `Deleted) "deleted " ;
test "jerome_variant"
replicaContent2shortString (`FILE, `Modified) "changed " ;
test "jerome_variant"
replicaContent2shortString (`DIRECTORY, `PropsChanged) "props " ;
test "jerome_variant"
replicaContent2shortString (`FILE, `Deleted) "assert false" ;
test "jerome_variant"
replicaContent2shortString (`SYMLINK, `Deleted) "assert false" ;
test "jerome_variant"
replicaContent2shortString (`SYMLINK, `PropsChanged) "assert false" ;
test "jerome_variant"
replicaContent2shortString (`DIRECTORY, `Deleted) "assert false" ;
test "jerome_variant"
replicaContent2shortString (`ABSENT, `Created) "assert false" ;
test "jerome_variant"
replicaContent2shortString (`ABSENT, `Modified) "assert false" ;
test "jerome_variant"
replicaContent2shortString (`ABSENT, `PropsChanged) "assert false" ;
;;
bug 319
type ab = A of int | B of int
type cd = C | D
let ohl = function
| (A (p) | B (p)), C -> p
| (A (p) | B (p)), D -> p
;;
test "ohl" ohl (A 0,C) 0 ;
test "ohl" ohl (B 0,D) 0 ; ()
;;
bug 324
type pottier =
| A
| B
;;
let pottier x =
match x with
| (( (A, 1) | (B, 2)),A) -> false
| _ -> true
;;
test "pottier" pottier ((B,2),A) false ;
test "pottier" pottier ((B,2),B) true ;
test "pottier" pottier ((A,2),A) true ; ()
;;
bug 325 in bytecode compiler
let coquery q = match q with
| y,0,([modu;defs]| [defs;modu;_]) -> y+defs-modu
| _ -> 0
;;
test "coquery" coquery (1,0,[1 ; 2 ; 3]) 0 ;
test "coquery" coquery (1,0,[1 ; 2]) 2 ; ()
;;
Two other variable in or - pat tests
Two other variable in or-pat tests
*)
type vars = A of int | B of (int * int) | C
;;
let vars1 = function
| (A x | B (_,x)) -> x
| C -> 0
;;
test "vars1" vars1 (A 1) 1 ;
test "vars1" vars1 (B (1,2)) 2 ; ()
;;
let vars2 = function
| ([x]|[1 ; x ]|[1 ; 2 ; x]) -> x
| _ -> 0
;;
test"vars2" vars2 [1] 1 ;
test"vars2" vars2 [1;2] 2 ;
test"vars2" vars2 [1;2;3] 3 ;
test"vars2" vars2 [0 ; 0] 0 ; ()
;;
(* Bug 342 *)
type eber = {x:int; y: int; z:bool}
let eber = function
| {x=a; z=true}
| {y=a; z=false} -> a
;;
test "eber" eber {x=0 ; y=1 ; z=true} 0 ;
test "eber" eber {x=1 ; y=0 ; z=false} 0 ; ()
;;
(* Chaining interval tests *)
let escaped = function
| '\"' | '\\' | '\n' | '\t' -> 2
| c -> 1
;;
test "escaped" escaped '\"' 2 ;
test "escaped" escaped '\\' 2 ;
test "escaped" escaped '\n' 2 ;
test "escaped" escaped '\t' 2 ;
test "escaped" escaped '\000' 1 ;
test "escaped" escaped ' ' 1 ;
test "escaped" escaped '\000' 1 ;
test "escaped" escaped '[' 1 ;
test "escaped" escaped ']' 1 ;
test "escaped" escaped '!' 1 ;
test "escaped" escaped '#' 1 ;
()
;;
For compilation speed ( due to )
exception Unknown_Reply of int
type command_reply =
RPL_TRYAGAIN
| RPL_TRACEEND
| RPL_TRACELOG
| RPL_ADMINEMAIL
| RPL_ADMINLOC2
| RPL_ADMINLOC1
| RPL_ADMINME
| RPL_LUSERME
| RPL_LUSERCHANNELS
| RPL_LUSERUNKNOWN
| RPL_LUSEROP
| RPL_LUSERCLIENT
| RPL_STATSDLINE
| RPL_STATSDEBUG
| RPL_STATSDEFINE
| RPL_STATSBLINE
| RPL_STATSPING
| RPL_STATSSLINE
| RPL_STATSHLINE
| RPL_STATSOLINE
| RPL_STATSUPTIME
| RPL_STATSLLINE
| RPL_STATSVLINE
| RPL_SERVLISTEND
| RPL_SERVLIST
| RPL_SERVICE
| RPL_ENDOFSERVICES
| RPL_SERVICEINFO
| RPL_UMODEIS
| RPL_ENDOFSTATS
| RPL_STATSYLINE
| RPL_STATSQLINE
| RPL_STATSKLINE
| RPL_STATSILINE
| RPL_STATSNLINE
| RPL_STATSCLINE
| RPL_STATSCOMMANDS
| RPL_STATSLINKINFO
| RPL_TRACERECONNECT
| RPL_TRACECLASS
| RPL_TRACENEWTYPE
| RPL_TRACESERVICE
| RPL_TRACESERVER
| RPL_TRACEUSER
| RPL_TRACEOPERATOR
| RPL_TRACEUNKNOWN
| RPL_TRACEHANDSHAKE
| RPL_TRACECONNECTING
| RPL_TRACELINK
| RPL_NOUSERS
| RPL_ENDOFUSERS
| RPL_USERS
| RPL_USERSSTART
| RPL_TIME
| RPL_NOTOPERANYMORE
| RPL_MYPORTIS
| RPL_YOURESERVICE
| RPL_REHASHING
| RPL_YOUREOPER
| RPL_ENDOFMOTD
| RPL_MOTDSTART
| RPL_ENDOFINFO
| RPL_INFOSTART
| RPL_MOTD
| RPL_INFO
| RPL_ENDOFBANLIST
| RPL_BANLIST
| RPL_ENDOFLINKS
| RPL_LINKS
| RPL_CLOSEEND
| RPL_CLOSING
| RPL_KILLDONE
| RPL_ENDOFNAMES
| RPL_NAMREPLY
| RPL_ENDOFWHO
| RPL_WHOREPLY
| RPL_VERSION
| RPL_SUMMONING
| RPL_INVITING
| RPL_TOPIC
| RPL_NOTOPIC
| RPL_CHANNELMODEIS
| RPL_LISTEND
| RPL_LIST
| RPL_LISTSTART
| RPL_WHOISCHANNELS
| RPL_ENDOFWHOIS
| RPL_WHOISIDLE
| RPL_WHOISCHANOP
| RPL_ENDOFWHOWAS
| RPL_WHOWASUSER
| RPL_WHOISOPERATOR
| RPL_WHOISSERVER
| RPL_WHOISUSER
| RPL_NOWAWAY
| RPL_UNAWAY
| RPL_TEXT
| RPL_ISON
| RPL_USERHOST
| RPL_AWAY
| RPL_NONE
let get_command_reply n =
match n with
263 -> RPL_TRYAGAIN
| 319 -> RPL_WHOISCHANNELS
| 318 -> RPL_ENDOFWHOIS
| 317 -> RPL_WHOISIDLE
| 316 -> RPL_WHOISCHANOP
| 369 -> RPL_ENDOFWHOWAS
| 314 -> RPL_WHOWASUSER
| 313 -> RPL_WHOISOPERATOR
| 312 -> RPL_WHOISSERVER
| 311 -> RPL_WHOISUSER
| 262 -> RPL_TRACEEND
| 261 -> RPL_TRACELOG
| 259 -> RPL_ADMINEMAIL
| 258 -> RPL_ADMINLOC2
| 257 -> RPL_ADMINLOC1
| 256 -> RPL_ADMINME
| 255 -> RPL_LUSERME
| 254 -> RPL_LUSERCHANNELS
| 253 -> RPL_LUSERUNKNOWN
| 252 -> RPL_LUSEROP
| 251 -> RPL_LUSERCLIENT
| 250 -> RPL_STATSDLINE
| 249 -> RPL_STATSDEBUG
| 248 -> RPL_STATSDEFINE
| 247 -> RPL_STATSBLINE
| 246 -> RPL_STATSPING
| 245 -> RPL_STATSSLINE
| 244 -> RPL_STATSHLINE
| 243 -> RPL_STATSOLINE
| 242 -> RPL_STATSUPTIME
| 241 -> RPL_STATSLLINE
| 240 -> RPL_STATSVLINE
| 235 -> RPL_SERVLISTEND
| 234 -> RPL_SERVLIST
| 233 -> RPL_SERVICE
| 232 -> RPL_ENDOFSERVICES
| 231 -> RPL_SERVICEINFO
| 221 -> RPL_UMODEIS
| 219 -> RPL_ENDOFSTATS
| 218 -> RPL_STATSYLINE
| 217 -> RPL_STATSQLINE
| 216 -> RPL_STATSKLINE
| 215 -> RPL_STATSILINE
| 214 -> RPL_STATSNLINE
| 213 -> RPL_STATSCLINE
| 212 -> RPL_STATSCOMMANDS
| 211 -> RPL_STATSLINKINFO
| 210 -> RPL_TRACERECONNECT
| 209 -> RPL_TRACECLASS
| 208 -> RPL_TRACENEWTYPE
| 207 -> RPL_TRACESERVICE
| 206 -> RPL_TRACESERVER
| 205 -> RPL_TRACEUSER
| 204 -> RPL_TRACEOPERATOR
| 203 -> RPL_TRACEUNKNOWN
| 202 -> RPL_TRACEHANDSHAKE
| 201 -> RPL_TRACECONNECTING
| 200 -> RPL_TRACELINK
| 395 -> RPL_NOUSERS
| 394 -> RPL_ENDOFUSERS
| 393 -> RPL_USERS
| 392 -> RPL_USERSSTART
| 391 -> RPL_TIME
| 385 -> RPL_NOTOPERANYMORE
| 384 -> RPL_MYPORTIS
| 383 -> RPL_YOURESERVICE
| 382 -> RPL_REHASHING
| 381 -> RPL_YOUREOPER
| 376 -> RPL_ENDOFMOTD
| 375 -> RPL_MOTDSTART
| 374 -> RPL_ENDOFINFO
| 373 -> RPL_INFOSTART
| 372 -> RPL_MOTD
| 371 -> RPL_INFO
| 368 -> RPL_ENDOFBANLIST
| 367 -> RPL_BANLIST
| 365 -> RPL_ENDOFLINKS
| 364 -> RPL_LINKS
| 363 -> RPL_CLOSEEND
| 362 -> RPL_CLOSING
| 361 -> RPL_KILLDONE
| 366 -> RPL_ENDOFNAMES
| 353 -> RPL_NAMREPLY
| 315 -> RPL_ENDOFWHO
| 352 -> RPL_WHOREPLY
| 351 -> RPL_VERSION
| 342 -> RPL_SUMMONING
| 341 -> RPL_INVITING
| 332 -> RPL_TOPIC
| 331 -> RPL_NOTOPIC
| 324 -> RPL_CHANNELMODEIS
| 323 -> RPL_LISTEND
| 322 -> RPL_LIST
| 321 -> RPL_LISTSTART
| 306 -> RPL_NOWAWAY
| 305 -> RPL_UNAWAY
| 304 -> RPL_TEXT
| 303 -> RPL_ISON
| 302 -> RPL_USERHOST
| 301 -> RPL_AWAY
| 300 -> RPL_NONE
| _ -> raise (Unknown_Reply n)
(* Bug 454 *)
type habert_a=
| A of habert_c
| B of habert_c
and habert_c= {lvar:int; lassoc: habert_c;lnb:int}
let habert=function
| (A {lnb=i}|B {lnb=i}) when i=0 -> 1
| A {lassoc=({lnb=j});lnb=i} -> 2
| _ -> 3
;;
let rec ex0 = {lvar=0 ; lnb=0 ; lassoc=ex1}
and ex1 = {lvar=1 ; lnb=1 ; lassoc=ex0} in
test "habert" habert (A ex0) 1 ;
test "habert" habert (B ex0) 1 ;
test "habert" habert (A ex1) 2 ;
test "habert" habert (B ex1) 3 ;
Problems with interval test in arithmetic mod 2 ^ 31 , bug # 359
From
type type_expr = [
| `TTuple of type_expr list
| `TConstr of type_expr list
| `TVar of string
| `TVariant of string list
| `TBlock of int
| `TCopy of type_expr
]
and recurs_type_expr = [
| `TTuple of type_expr list
| `TConstr of type_expr list
| `TVariant of string list
]
let rec maf te =
match te with
| `TCopy te -> 1
| `TVar _ -> 2
| `TBlock _ -> 2
| #recurs_type_expr as desc ->
let te =
(match desc with
`TTuple tl ->
4
| `TConstr tl ->
5
| `TVariant (row) ->
6
)
in
te
;;
let base = `TBlock 0
;;
test "maf" maf (`TCopy base) 1 ;
test "maf" maf (`TVar "test") 2 ;
test "maf" maf (`TBlock 0) 2 ;
test "maf" maf (`TTuple []) 4 ;
test "maf" maf (`TConstr []) 5 ;
test "maf" maf (`TVariant []) 6
;;
PR#3517
Using ` ` get_args '' in place or an ad - hoc ` ` matcher '' function for tuples .
Has made the compiler [ 3.05 ] to fail .
Using ``get_args'' in place or an ad-hoc ``matcher'' function for tuples.
Has made the compiler [3.05] to fail.
*)
type t_seb = Uin | Uout
;;
let rec seb = function
| ((i, Uin) | (i, Uout)), Uout -> 1
| ((j, Uin) | (j, Uout)), Uin -> 2
;;
test "seb" seb ((0,Uin),Uout) 1 ;
test "seb" seb ((0,Uout),Uin) 2 ;
()
;;
Talk with
- type ' b is still open ? ?
- better case generation , accept intervals of size 1 when ok_inter is
false ( in Switch )
- type 'b is still open ??
- better case generation, accept intervals of size 1 when ok_inter is
false (in Switch)
*)
type ('a, 'b) t_j = A of 'a | B of 'b * 'a | C
let f = function
| A (`A|`C) -> 0
| B (`B,`D) -> 1
| C -> 2
let g x = try f x with Match_failure _ -> 3
let _ =
test "jacques" g (A `A) 0 ;
test "jacques" g (A `C) 0 ;
test "jacques" g (B (`B,`D)) 1 ;
test "jacaues" g C 2 ;
test " jacques " g ( B ( ` A,`D ) ) 3 ; ( * type incorrect expected behavior ?
()
Compilation bug , segfault , because of incorrect compilation
of unused match case .. - > " 11 "
Compilation bug, segfault, because of incorrect compilation
of unused match case .. -> "11"
*)
type t_l = A | B
let f = function
| _, _, _, _, _, _, _, _, _, _, _, _, _, B, _, _ -> "0"
| _, _, _, B, A, _, _, _, _, _, _, _, _, _, _, _ -> "1"
| _, _, _, B, _, A, _, _, A, _, _, _, _, _, _, _ -> "2"
| _, _, _, _, _, _, _, _, _, _, B, A, _, A, _, _ -> "3"
| _, _, _, _, _, _, _, B, _, _, _, _, B, _, A, A -> "4"
| A, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ -> "5"
| _, _, _, _, _, _, _, B, _, B, _, _, _, _, _, _ -> "6"
| _, B, _, _, _, _, _, _, _, _, _, _, _, _, _, _ -> "7"
| _, A, A, _, A, _, B, _, _, _, _, _, _, _, _, B -> "8"
| _, _, _, _, B, _, _, _, _, _, _, _, _, _, B, _ -> "9"
| _, _, _, _, _, _, _, _, _, _, _, B, _, _, _, _ -> "10"
| _, _, _, _, _, A, _, _, _, _, B, _, _, _, _, _ -> "11"
| B, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ -> "12"
| _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ -> "13"
let _ =
test "luc" f (B, A, A, A, A, A, A, A, A, A, A, B, A, A, A, A) "10" ;
test "luc" f (B, A, A, A, A, A, A, A, A, A, A, A, A, A, A, A) "12" ;
()
By , compilation raised some assert false i make_failactionneg
By Gilles Peskine, compilation raised some assert false i make_failactionneg
*)
type bg = [
| `False
| `True
]
type vg = [
| `A
| `B
| `U of int
| `V of int
]
type tg = {
v : vg;
x : bg;
}
let predg x = true
let rec gilles o = match o with
| {v = (`U data | `V data); x = `False} when predg o -> 1
| {v = (`A|`B) ; x = `False}
| {v = (`U _ | `V _); x = `False}
| {v = _ ; x = `True}
-> 2
(*
Match in trywith should always have a default case
*)
exception Found of string * int
exception Error of string
let lucexn e =
try
try raise e with Error msg -> msg
with Found (s,r) -> s^Int.to_string r
let () =
test "lucexn1" lucexn (Error "coucou") "coucou" ;
test "lucexn2" lucexn (Found ("int: ",0)) "int: 0" ;
()
(*
PR#5758: different representations of floats
*)
let pr5758 x str =
match (x, str) with
| (1. , "A") -> "Matched A"
| (1.0, "B") -> "Matched B"
| (1. , "C") -> "Matched C"
| result ->
match result with
| (1., "A") -> "Failed match A then later matched"
| _ -> "Failed twice"
;;
let () =
test "pr5758" (pr5758 1.) "A" "Matched A"
;;
| null | https://raw.githubusercontent.com/well-typed-lightbulbs/ocaml-esp32/c24fcbfbee0e3aa6bb71c9b467c60c6bac326cc7/testsuite/tests/basic-more/morematch.ml | ocaml | TEST
include testing
************************************************************
This suite tests the pattern-matching compiler
it should just compile and run.
While compiling the following messages are normal:
************************************************************
Number of integer cases
Number of tag block cases
Tag block cases
True if bound checks needed
True if should not fail
This test is for the compiler itself
Test guards, non trivial
Cases that can't happen...
Cases that can't happen...
Bug 342
Chaining interval tests
Bug 454
Match in trywith should always have a default case
PR#5758: different representations of floats
|
let test msg f arg r =
if f arg <> r then begin
prerr_endline msg ;
failwith "Malaise"
end
;;
type t = A | B | C | D | E | F
;;
let f x = match x with
| A | B | C -> 1
| D | E -> 2
| F -> 3;;
test "un" f C 1 ;
test "un" f D 2 ;
test "un" f F 3 ; ()
;;
let g x = match x with
1 -> 1
| 2 -> 2
| 3 -> 3
| 4 | 5 -> 4
| 6 -> 5
| 7 | 8 -> 6
| 9 -> 7
| _ -> assert false
;;
test "deux" g 5 4 ;
test "deux" g 6 5 ;
test "deux" g 9 7 ; ()
;;
let g x = match x with
1 -> 1
| 2 -> 2
| 3 -> 3
| 4 | 5 -> 4
| 6 -> 5
| 7 | 8 -> 6
| 9 -> 7
| _ -> 8;;
test "trois" g 10 8
;;
let g x= match x with
1 -> 1
| 2 -> 2
| 3 -> 3
| 4 | 5 -> 4
| 6 -> 5
| 4|5|7 -> 100
| 7 | 8 -> 6
| 9 -> 7
| _ -> 8
;;
test "quatre" g 4 4 ;
test "quatre" g 7 100 ; ()
;;
let h x =
match x with
(1,1) -> 1
| (2|3), 1 -> 2
| 2,(2|3) -> 3
| (4,4) -> 5
| _ -> 100
;;
test "cinq" h (2,2) 3 ;
test "cinq" h (2,1) 2 ;
test "cinq" h (2,4) 100 ; ()
;;
idem hh ( 2,5 )
let hh x = match x with
| 1,1 -> 1
| 2,1 -> 2
| (2|3),(1|2|3|4) -> 3
| 2,5 -> 4
| (4,4) -> 5
| _ -> 100
;;
let hhh x = match x with
| 1,1 -> 1
| (2|3),1 -> 2
| 2,2 -> 3
| _ -> 100
;;
let h x =
match x with
(1,1) -> 1
| 3,1 -> 2
| 2,(2|3) -> 3
| (4,4) -> 5
| _ -> 100
;;
let h x = match x with
1 -> 1
| 2|3 -> 2
| 4 -> 4
| 5 -> 5
| 6|7 -> 6
| 8 -> 8
| _ -> 100
;;
let f x = match x with
| ((1|2),(3|4))|((3|4),(1|2)) -> 1
| (3,(5|6)) -> 2
| _ -> 3
;;
test "six" f (1,3) 1 ;
test "six" f (3,2) 1 ;
test "six" f (3,5) 2 ;
test "six" f (3,7) 3 ; ()
;;
type tt = {a : bool list ; b : bool}
let f = function
| {a=([]|[true])} -> 1
| {a=false::_}|{b=(true|false)} -> 2
;;
test "sept" f {a=[] ; b = true} 1 ;
test "sept" f {a=[true] ; b = false} 1 ;
test "sept" f {a=[false ; true] ; b = true} 2 ;
test "sept" f {a=[false] ; b = false} 2 ; ()
;;
let f = function
| (([]|[true]),_) -> 1
| (false::_,_)|(_,(true|false)) -> 2
;;
test "huit" f ([],true) 1 ;
test "huit" f ([true],false) 1 ;
test "huit" f ([false ; true], true) 2 ;
test "huit" f ([false], false) 2 ; ()
;;
let split_cases = function
| `Nil | `Cons _ as x -> `A x
| `Snoc _ as x -> `B x
;;
test "oubli" split_cases `Nil (`A `Nil);
test "oubli" split_cases (`Cons 1) (`A (`Cons 1));
test "oubli" split_cases (`Snoc 1) (`B (`Snoc 1)) ; ()
;;
type t1 = A of int | B of int
let f1 = function
| (A x | B x) -> x
;;
test "neuf" f1 (A 1) 1 ;
test "neuf" f1 (B 1) 1 ;
;;
type coucou = A of int | B of int * int | C
;;
let g = function
| (A x | B (_,x)) -> x
| C -> 0
;;
test "dix" g (A 1) 1 ;
test "dix" g (B (1,2)) 2 ;
;;
let h = function
| ([x]|[1 ; x ]|[1 ; 2 ; x]) -> x
| _ -> 0
;;
test "encore" h [1] 1 ;
test "encore" h [1;2] 2 ;
test "encore" h [1;2;3] 3 ;
test "encore" h [0 ; 0] 0 ; ()
;;
let f = function
| (x,(0 as y)) | (y,x) -> y-x
;;
test "foo1" f (1,0) (-1);
test "foo1" f (1,2) (-1)
;;
let f = function (([]|[_]) as x)|(_::([] as x))|(_::_::x) -> x
;;
test "zob" f [] [] ;
test "zob" f [1] [1] ;
test "zob" f [1;2;3] [3]
;;
type zob = A | B | C | D of zob * int | E of zob * zob
let rec f = function
| (A | B | C) -> A
| D (x,i) -> D (f x,i)
| E (x,_) -> D (f x,0)
;;
test "fin" f B A ;
test "fin" f (D (C,1)) (D (A,1)) ;
test "fin" f (E (C,A)) (D (A,0)) ; ()
;;
type length =
Char of int | Pixel of int | Percent of int | No of string | Default
let length = function
| Char n -> n | Pixel n -> n
| _ -> 0
;;
test "length" length (Char 10) 10 ;
test "length" length (Pixel 20) 20 ;
test "length" length Default 0 ;
test "length" length (Percent 100) 0 ; ()
;;
let length2 = function
| Char n -> n | Percent n -> n
| _ -> 0
;;
test "length2" length2 (Char 10) 10 ;
test "length2" length2 (Pixel 20) 0 ;
test "length2" length2 Default 0 ;
test "length2" length2(Percent 100) 100 ; ()
;;
let length3 = function
| Char _ | No _ -> true
| _ -> false
;;
test "length3" length3 (Char 10) true ;
test "length3" length3 (No "") true ;
test "length3" length3 (Pixel 20) false ;
test "length3" length3 Default false ;
test "length3" length3(Percent 100) false ; ()
;;
type hevea = A | B | C
let h x = match x with
| A -> 1
| B|C -> 2
;;
test "hevea" h A 1 ;
test "hevea" h B 2 ;
test "hevea" h B 2 ; ()
;;
type lambda =
Lvar of int
| Lconst of int
| Lapply of lambda * lambda list
| Lfunction of bool * int list * lambda
| Llet of bool * int * lambda * lambda
| Lletrec of (int * lambda) list * lambda
| Lprim of string * lambda list
| Lswitch of lambda * lambda_switch
| Lstaticfail
| Lcatch of lambda * lambda
| Lstaticraise of int * lambda list
| Lstaticcatch of lambda * (int * int list) * lambda
| Ltrywith of lambda * int * lambda
| Lifthenelse of lambda * lambda * lambda
| Lsequence of lambda * lambda
| Lwhile of lambda * lambda
| Lfor of int * lambda * lambda * bool * lambda
| Lassign of int * lambda
| Lsend of lambda * lambda * lambda list
| Levent of lambda * lambda_event
| Lifused of int * lambda
and lambda_switch =
Integer cases
and lambda_event =
{ lev_loc: int;
lev_kind: bool ;
lev_repr: int ref option;
lev_env: int list }
let rec approx_present v l = true
let rec lower_bind v arg lam = match lam with
| Lifthenelse (cond, ifso, ifnot) -> 1
| Lswitch (ls,({sw_consts=[i,act] ; sw_blocks = []} as _sw))
when not (approx_present v ls) -> 2
| Lswitch (ls,({sw_consts=[] ; sw_blocks = [i,act]} as _sw))
when not (approx_present v ls) -> 3
| Llet (true , vv, lv, l) -> 4
| _ -> 5
;;
test "lower_bind" (lower_bind 0 0) (Llet (true,0, Lvar 1, Lvar 2)) 4 ;
test "lower_bind" (lower_bind 0 0) (Lvar 0) 5 ;
test "lower_bind" (lower_bind 0 0) (Lifthenelse (Lvar 0, Lvar 1, Lvar 2)) 1
;;
type field_kind =
Fvar of field_kind option ref
| Fpresent
| Fabsent
let unify_kind (k1, k2) = match k1, k2 with
(Fvar r, (Fvar _ | Fpresent)) -> 1
| (Fpresent, Fvar r) -> 2
| (Fpresent, Fpresent) -> 3
| _ -> 4
let r = ref (Some Fpresent)
;;
test "unify" unify_kind (Fvar r, Fpresent) 1 ;
test "unify" unify_kind (Fvar r, Fvar r) 1 ;
test "unify" unify_kind (Fvar r, Fabsent) 4 ;
test "unify" unify_kind (Fpresent, Fvar r) 2 ;
test "unify" unify_kind (Fpresent, Fpresent) 3 ;
test "unify" unify_kind (Fabsent, Fpresent) 4 ; ()
;;
type youyou = A | B | C | D of youyou
let foo (k1, k2) = match k1,k2 with
| D _, (A|D _) -> 1
| (A|B),D _ -> 2
| C,_ -> 3
| _, (A|B|C) -> 4
;;
test "foo2" foo (D A,A) 1 ;
test "foo2" foo (D A,B) 4 ;
test "foo2" foo (A,A) 4 ; ()
;;
type yaya = A | B
;;
let yaya = function
| A,_,_ -> 1
| _,A,_ -> 2
| B,B,_ -> 3
| A,_,(100|103) -> 5
;;
test "yaya" yaya (A,A,0) 1 ;
test "yaya" yaya (B,A,0) 2 ;
test "yaya" yaya (B,B,100) 3 ; ()
;;
let yoyo = function
| [],_,_ -> 1
| _,[],_ -> 2
| _::_,_::_,_ -> 3
| [],_,(100|103|104) -> 5
| [],_,(100|103) -> 6
| [],_,(1000|1001|1002|20000) -> 7
;;
test "yoyo" yoyo ([],[],0) 1 ;
test "yoyo" yoyo ([1],[],0) 2 ;
test "yoyo" yoyo ([1],[1],100) 3 ; ()
;;
let youyou = function
| (100|103|104) -> 1
| (100|103|101) -> 2
| (1000|1001|1002|20000) -> 3
| _ -> -1
;;
test "youyou" youyou 100 1 ;
test "youyou" youyou 101 2 ;
test "youyou" youyou 1000 3
;;
type autre =
| C | D | E of autre | F of autre * autre | H of autre | I | J | K of string
let rec autre = function
| C,_,_ -> 1
| _,C,_ -> 2
| D,D,_ -> 3
| (D|F (_,_)|H _|K _),_,_ -> 4
| (_, (D|I|E _|F (_, _)|H _|K _), _) -> 8
| (J,J,((C|D) as x |E x|F (_,x))) | (J,_,((C|J) as x)) -> autre (x,x,x)
| (J, J, (I|H _|K _)) -> 9
| I,_,_ -> 6
| E _,_,_ -> 7
;;
test "autre" autre (J,J,F (D,D)) 3 ;
test "autre" autre (J,J,D) 3 ;
test "autre" autre (J,J,I) 9 ;
test "autre" autre (H I,I,I) 4 ;
test "autre" autre (J,J,H I) 9 ; ()
;;
type youpi = YA | YB | YC
and hola = X | Y | Z | T of hola | U of hola | V of hola
let xyz = function
| YA,_,_ -> 1
| _,YA,_ -> 2
| YB,YB,_ -> 3
| ((YB|YC), (YB|YC), (X|Y|Z|V _|T _)) -> 6
| _,_,(X|U _) -> 8
| _,_,Y -> 5
;;
test "xyz" xyz (YC,YC,X) 6 ;
test "xyz" xyz (YC,YB,U X) 8 ;
test "xyz" xyz (YB,YC,X) 6 ; ()
;;
let eq (x,y) = x=y
;;
test "eq" eq ("coucou", "coucou") true ; ()
;;
let is_none = function
| None -> true
| _ -> false
let guard x = match x with
| (Some _, _) when is_none (snd x) -> 1
| (Some (pc, _), Some pc') when pc = pc' -> 2
| _ -> 3
;;
test "guard" guard (Some (1,1),None) 1 ;
test "guard" guard (Some (1,1),Some 1) 2 ;
test "guard" guard (Some (2,1),Some 1) 3 ; ()
;;
let orstring = function
| ("A"|"B"|"C") -> 2
| "D" -> 3
| _ -> 4
;;
test "orstring" orstring "A" 2 ;
test "orstring" orstring "B" 2 ;
test "orstring" orstring "C" 2 ;
test "orstring" orstring "D" 3 ;
test "orstring" orstring "E" 4 ; ()
;;
type var_t = [`Variant of [ `Some of string | `None | `Foo] ]
let crash (pat:var_t) =
match pat with
| `Variant (`Some tag) -> tag
| `Variant (`None) -> "none"
| _ -> "foo"
;;
test "crash" crash (`Variant `None) "none" ;
test "crash" crash (`Variant (`Some "coucou")) "coucou" ;
test "crash" crash (`Variant (`Foo)) "foo" ; ()
;;
let flatguard c =
let x,y = c in
match x,y with
| (1,2)|(2,3) when y=2 -> 1
| (1,_)|(_,3) -> 2
| _ -> 3
;;
test "flatguard" flatguard (1,2) 1 ;
test "flatguard" flatguard (1,3) 2 ;
test "flatguard" flatguard (2,3) 2 ;
test "flatguard" flatguard (2,4) 3 ; ()
;;
's bugs
type f =
| ABSENT
| FILE
| SYMLINK
| DIRECTORY
type r =
| Unchanged
| Deleted
| Modified
| PropsChanged
| Created
let replicaContent2shortString rc =
let (typ, status) = rc in
match typ, status with
_, Unchanged -> " "
| ABSENT, Deleted -> "deleted "
| FILE, Created -> "new file"
| FILE, Modified -> "changed "
| FILE, PropsChanged -> "props "
| SYMLINK, Created -> "new link"
| SYMLINK, Modified -> "chgd lnk"
| DIRECTORY, Created -> "new dir "
| DIRECTORY, Modified -> "chgd dir"
| DIRECTORY, PropsChanged -> "props "
| ABSENT, (Created | Modified | PropsChanged)
| SYMLINK, PropsChanged
| (FILE|SYMLINK|DIRECTORY), Deleted
-> "assert false"
;;
test "jerome_constr"
replicaContent2shortString (ABSENT, Unchanged) " " ;
test "jerome_constr"
replicaContent2shortString (ABSENT, Deleted) "deleted " ;
test "jerome_constr"
replicaContent2shortString (FILE, Modified) "changed " ;
test "jerome_constr"
replicaContent2shortString (DIRECTORY, PropsChanged) "props " ;
test "jerome_constr"
replicaContent2shortString (FILE, Deleted) "assert false" ;
test "jerome_constr"
replicaContent2shortString (SYMLINK, Deleted) "assert false" ;
test "jerome_constr"
replicaContent2shortString (SYMLINK, PropsChanged) "assert false" ;
test "jerome_constr"
replicaContent2shortString (DIRECTORY, Deleted) "assert false" ;
test "jerome_constr"
replicaContent2shortString (ABSENT, Created) "assert false" ;
test "jerome_constr"
replicaContent2shortString (ABSENT, Modified) "assert false" ;
test "jerome_constr"
replicaContent2shortString (ABSENT, PropsChanged) "assert false" ;
;;
let replicaContent2shortString rc =
let (typ, status) = rc in
match typ, status with
_, `Unchanged -> " "
| `ABSENT, `Deleted -> "deleted "
| `FILE, `Created -> "new file"
| `FILE, `Modified -> "changed "
| `FILE, `PropsChanged -> "props "
| `SYMLINK, `Created -> "new link"
| `SYMLINK, `Modified -> "chgd lnk"
| `DIRECTORY, `Created -> "new dir "
| `DIRECTORY, `Modified -> "chgd dir"
| `DIRECTORY, `PropsChanged -> "props "
| `ABSENT, (`Created | `Modified | `PropsChanged)
| `SYMLINK, `PropsChanged
| (`FILE|`SYMLINK|`DIRECTORY), `Deleted
-> "assert false"
;;
test "jerome_variant"
replicaContent2shortString (`ABSENT, `Unchanged) " " ;
test "jerome_variant"
replicaContent2shortString (`ABSENT, `Deleted) "deleted " ;
test "jerome_variant"
replicaContent2shortString (`FILE, `Modified) "changed " ;
test "jerome_variant"
replicaContent2shortString (`DIRECTORY, `PropsChanged) "props " ;
test "jerome_variant"
replicaContent2shortString (`FILE, `Deleted) "assert false" ;
test "jerome_variant"
replicaContent2shortString (`SYMLINK, `Deleted) "assert false" ;
test "jerome_variant"
replicaContent2shortString (`SYMLINK, `PropsChanged) "assert false" ;
test "jerome_variant"
replicaContent2shortString (`DIRECTORY, `Deleted) "assert false" ;
test "jerome_variant"
replicaContent2shortString (`ABSENT, `Created) "assert false" ;
test "jerome_variant"
replicaContent2shortString (`ABSENT, `Modified) "assert false" ;
test "jerome_variant"
replicaContent2shortString (`ABSENT, `PropsChanged) "assert false" ;
;;
bug 319
type ab = A of int | B of int
type cd = C | D
let ohl = function
| (A (p) | B (p)), C -> p
| (A (p) | B (p)), D -> p
;;
test "ohl" ohl (A 0,C) 0 ;
test "ohl" ohl (B 0,D) 0 ; ()
;;
bug 324
type pottier =
| A
| B
;;
let pottier x =
match x with
| (( (A, 1) | (B, 2)),A) -> false
| _ -> true
;;
test "pottier" pottier ((B,2),A) false ;
test "pottier" pottier ((B,2),B) true ;
test "pottier" pottier ((A,2),A) true ; ()
;;
bug 325 in bytecode compiler
let coquery q = match q with
| y,0,([modu;defs]| [defs;modu;_]) -> y+defs-modu
| _ -> 0
;;
test "coquery" coquery (1,0,[1 ; 2 ; 3]) 0 ;
test "coquery" coquery (1,0,[1 ; 2]) 2 ; ()
;;
Two other variable in or - pat tests
Two other variable in or-pat tests
*)
type vars = A of int | B of (int * int) | C
;;
let vars1 = function
| (A x | B (_,x)) -> x
| C -> 0
;;
test "vars1" vars1 (A 1) 1 ;
test "vars1" vars1 (B (1,2)) 2 ; ()
;;
let vars2 = function
| ([x]|[1 ; x ]|[1 ; 2 ; x]) -> x
| _ -> 0
;;
test"vars2" vars2 [1] 1 ;
test"vars2" vars2 [1;2] 2 ;
test"vars2" vars2 [1;2;3] 3 ;
test"vars2" vars2 [0 ; 0] 0 ; ()
;;
type eber = {x:int; y: int; z:bool}
let eber = function
| {x=a; z=true}
| {y=a; z=false} -> a
;;
test "eber" eber {x=0 ; y=1 ; z=true} 0 ;
test "eber" eber {x=1 ; y=0 ; z=false} 0 ; ()
;;
let escaped = function
| '\"' | '\\' | '\n' | '\t' -> 2
| c -> 1
;;
test "escaped" escaped '\"' 2 ;
test "escaped" escaped '\\' 2 ;
test "escaped" escaped '\n' 2 ;
test "escaped" escaped '\t' 2 ;
test "escaped" escaped '\000' 1 ;
test "escaped" escaped ' ' 1 ;
test "escaped" escaped '\000' 1 ;
test "escaped" escaped '[' 1 ;
test "escaped" escaped ']' 1 ;
test "escaped" escaped '!' 1 ;
test "escaped" escaped '#' 1 ;
()
;;
For compilation speed ( due to )
exception Unknown_Reply of int
type command_reply =
RPL_TRYAGAIN
| RPL_TRACEEND
| RPL_TRACELOG
| RPL_ADMINEMAIL
| RPL_ADMINLOC2
| RPL_ADMINLOC1
| RPL_ADMINME
| RPL_LUSERME
| RPL_LUSERCHANNELS
| RPL_LUSERUNKNOWN
| RPL_LUSEROP
| RPL_LUSERCLIENT
| RPL_STATSDLINE
| RPL_STATSDEBUG
| RPL_STATSDEFINE
| RPL_STATSBLINE
| RPL_STATSPING
| RPL_STATSSLINE
| RPL_STATSHLINE
| RPL_STATSOLINE
| RPL_STATSUPTIME
| RPL_STATSLLINE
| RPL_STATSVLINE
| RPL_SERVLISTEND
| RPL_SERVLIST
| RPL_SERVICE
| RPL_ENDOFSERVICES
| RPL_SERVICEINFO
| RPL_UMODEIS
| RPL_ENDOFSTATS
| RPL_STATSYLINE
| RPL_STATSQLINE
| RPL_STATSKLINE
| RPL_STATSILINE
| RPL_STATSNLINE
| RPL_STATSCLINE
| RPL_STATSCOMMANDS
| RPL_STATSLINKINFO
| RPL_TRACERECONNECT
| RPL_TRACECLASS
| RPL_TRACENEWTYPE
| RPL_TRACESERVICE
| RPL_TRACESERVER
| RPL_TRACEUSER
| RPL_TRACEOPERATOR
| RPL_TRACEUNKNOWN
| RPL_TRACEHANDSHAKE
| RPL_TRACECONNECTING
| RPL_TRACELINK
| RPL_NOUSERS
| RPL_ENDOFUSERS
| RPL_USERS
| RPL_USERSSTART
| RPL_TIME
| RPL_NOTOPERANYMORE
| RPL_MYPORTIS
| RPL_YOURESERVICE
| RPL_REHASHING
| RPL_YOUREOPER
| RPL_ENDOFMOTD
| RPL_MOTDSTART
| RPL_ENDOFINFO
| RPL_INFOSTART
| RPL_MOTD
| RPL_INFO
| RPL_ENDOFBANLIST
| RPL_BANLIST
| RPL_ENDOFLINKS
| RPL_LINKS
| RPL_CLOSEEND
| RPL_CLOSING
| RPL_KILLDONE
| RPL_ENDOFNAMES
| RPL_NAMREPLY
| RPL_ENDOFWHO
| RPL_WHOREPLY
| RPL_VERSION
| RPL_SUMMONING
| RPL_INVITING
| RPL_TOPIC
| RPL_NOTOPIC
| RPL_CHANNELMODEIS
| RPL_LISTEND
| RPL_LIST
| RPL_LISTSTART
| RPL_WHOISCHANNELS
| RPL_ENDOFWHOIS
| RPL_WHOISIDLE
| RPL_WHOISCHANOP
| RPL_ENDOFWHOWAS
| RPL_WHOWASUSER
| RPL_WHOISOPERATOR
| RPL_WHOISSERVER
| RPL_WHOISUSER
| RPL_NOWAWAY
| RPL_UNAWAY
| RPL_TEXT
| RPL_ISON
| RPL_USERHOST
| RPL_AWAY
| RPL_NONE
let get_command_reply n =
match n with
263 -> RPL_TRYAGAIN
| 319 -> RPL_WHOISCHANNELS
| 318 -> RPL_ENDOFWHOIS
| 317 -> RPL_WHOISIDLE
| 316 -> RPL_WHOISCHANOP
| 369 -> RPL_ENDOFWHOWAS
| 314 -> RPL_WHOWASUSER
| 313 -> RPL_WHOISOPERATOR
| 312 -> RPL_WHOISSERVER
| 311 -> RPL_WHOISUSER
| 262 -> RPL_TRACEEND
| 261 -> RPL_TRACELOG
| 259 -> RPL_ADMINEMAIL
| 258 -> RPL_ADMINLOC2
| 257 -> RPL_ADMINLOC1
| 256 -> RPL_ADMINME
| 255 -> RPL_LUSERME
| 254 -> RPL_LUSERCHANNELS
| 253 -> RPL_LUSERUNKNOWN
| 252 -> RPL_LUSEROP
| 251 -> RPL_LUSERCLIENT
| 250 -> RPL_STATSDLINE
| 249 -> RPL_STATSDEBUG
| 248 -> RPL_STATSDEFINE
| 247 -> RPL_STATSBLINE
| 246 -> RPL_STATSPING
| 245 -> RPL_STATSSLINE
| 244 -> RPL_STATSHLINE
| 243 -> RPL_STATSOLINE
| 242 -> RPL_STATSUPTIME
| 241 -> RPL_STATSLLINE
| 240 -> RPL_STATSVLINE
| 235 -> RPL_SERVLISTEND
| 234 -> RPL_SERVLIST
| 233 -> RPL_SERVICE
| 232 -> RPL_ENDOFSERVICES
| 231 -> RPL_SERVICEINFO
| 221 -> RPL_UMODEIS
| 219 -> RPL_ENDOFSTATS
| 218 -> RPL_STATSYLINE
| 217 -> RPL_STATSQLINE
| 216 -> RPL_STATSKLINE
| 215 -> RPL_STATSILINE
| 214 -> RPL_STATSNLINE
| 213 -> RPL_STATSCLINE
| 212 -> RPL_STATSCOMMANDS
| 211 -> RPL_STATSLINKINFO
| 210 -> RPL_TRACERECONNECT
| 209 -> RPL_TRACECLASS
| 208 -> RPL_TRACENEWTYPE
| 207 -> RPL_TRACESERVICE
| 206 -> RPL_TRACESERVER
| 205 -> RPL_TRACEUSER
| 204 -> RPL_TRACEOPERATOR
| 203 -> RPL_TRACEUNKNOWN
| 202 -> RPL_TRACEHANDSHAKE
| 201 -> RPL_TRACECONNECTING
| 200 -> RPL_TRACELINK
| 395 -> RPL_NOUSERS
| 394 -> RPL_ENDOFUSERS
| 393 -> RPL_USERS
| 392 -> RPL_USERSSTART
| 391 -> RPL_TIME
| 385 -> RPL_NOTOPERANYMORE
| 384 -> RPL_MYPORTIS
| 383 -> RPL_YOURESERVICE
| 382 -> RPL_REHASHING
| 381 -> RPL_YOUREOPER
| 376 -> RPL_ENDOFMOTD
| 375 -> RPL_MOTDSTART
| 374 -> RPL_ENDOFINFO
| 373 -> RPL_INFOSTART
| 372 -> RPL_MOTD
| 371 -> RPL_INFO
| 368 -> RPL_ENDOFBANLIST
| 367 -> RPL_BANLIST
| 365 -> RPL_ENDOFLINKS
| 364 -> RPL_LINKS
| 363 -> RPL_CLOSEEND
| 362 -> RPL_CLOSING
| 361 -> RPL_KILLDONE
| 366 -> RPL_ENDOFNAMES
| 353 -> RPL_NAMREPLY
| 315 -> RPL_ENDOFWHO
| 352 -> RPL_WHOREPLY
| 351 -> RPL_VERSION
| 342 -> RPL_SUMMONING
| 341 -> RPL_INVITING
| 332 -> RPL_TOPIC
| 331 -> RPL_NOTOPIC
| 324 -> RPL_CHANNELMODEIS
| 323 -> RPL_LISTEND
| 322 -> RPL_LIST
| 321 -> RPL_LISTSTART
| 306 -> RPL_NOWAWAY
| 305 -> RPL_UNAWAY
| 304 -> RPL_TEXT
| 303 -> RPL_ISON
| 302 -> RPL_USERHOST
| 301 -> RPL_AWAY
| 300 -> RPL_NONE
| _ -> raise (Unknown_Reply n)
type habert_a=
| A of habert_c
| B of habert_c
and habert_c= {lvar:int; lassoc: habert_c;lnb:int}
let habert=function
| (A {lnb=i}|B {lnb=i}) when i=0 -> 1
| A {lassoc=({lnb=j});lnb=i} -> 2
| _ -> 3
;;
let rec ex0 = {lvar=0 ; lnb=0 ; lassoc=ex1}
and ex1 = {lvar=1 ; lnb=1 ; lassoc=ex0} in
test "habert" habert (A ex0) 1 ;
test "habert" habert (B ex0) 1 ;
test "habert" habert (A ex1) 2 ;
test "habert" habert (B ex1) 3 ;
Problems with interval test in arithmetic mod 2 ^ 31 , bug # 359
From
type type_expr = [
| `TTuple of type_expr list
| `TConstr of type_expr list
| `TVar of string
| `TVariant of string list
| `TBlock of int
| `TCopy of type_expr
]
and recurs_type_expr = [
| `TTuple of type_expr list
| `TConstr of type_expr list
| `TVariant of string list
]
let rec maf te =
match te with
| `TCopy te -> 1
| `TVar _ -> 2
| `TBlock _ -> 2
| #recurs_type_expr as desc ->
let te =
(match desc with
`TTuple tl ->
4
| `TConstr tl ->
5
| `TVariant (row) ->
6
)
in
te
;;
let base = `TBlock 0
;;
test "maf" maf (`TCopy base) 1 ;
test "maf" maf (`TVar "test") 2 ;
test "maf" maf (`TBlock 0) 2 ;
test "maf" maf (`TTuple []) 4 ;
test "maf" maf (`TConstr []) 5 ;
test "maf" maf (`TVariant []) 6
;;
PR#3517
Using ` ` get_args '' in place or an ad - hoc ` ` matcher '' function for tuples .
Has made the compiler [ 3.05 ] to fail .
Using ``get_args'' in place or an ad-hoc ``matcher'' function for tuples.
Has made the compiler [3.05] to fail.
*)
type t_seb = Uin | Uout
;;
let rec seb = function
| ((i, Uin) | (i, Uout)), Uout -> 1
| ((j, Uin) | (j, Uout)), Uin -> 2
;;
test "seb" seb ((0,Uin),Uout) 1 ;
test "seb" seb ((0,Uout),Uin) 2 ;
()
;;
Talk with
- type ' b is still open ? ?
- better case generation , accept intervals of size 1 when ok_inter is
false ( in Switch )
- type 'b is still open ??
- better case generation, accept intervals of size 1 when ok_inter is
false (in Switch)
*)
type ('a, 'b) t_j = A of 'a | B of 'b * 'a | C
let f = function
| A (`A|`C) -> 0
| B (`B,`D) -> 1
| C -> 2
let g x = try f x with Match_failure _ -> 3
let _ =
test "jacques" g (A `A) 0 ;
test "jacques" g (A `C) 0 ;
test "jacques" g (B (`B,`D)) 1 ;
test "jacaues" g C 2 ;
test " jacques " g ( B ( ` A,`D ) ) 3 ; ( * type incorrect expected behavior ?
()
Compilation bug , segfault , because of incorrect compilation
of unused match case .. - > " 11 "
Compilation bug, segfault, because of incorrect compilation
of unused match case .. -> "11"
*)
type t_l = A | B
let f = function
| _, _, _, _, _, _, _, _, _, _, _, _, _, B, _, _ -> "0"
| _, _, _, B, A, _, _, _, _, _, _, _, _, _, _, _ -> "1"
| _, _, _, B, _, A, _, _, A, _, _, _, _, _, _, _ -> "2"
| _, _, _, _, _, _, _, _, _, _, B, A, _, A, _, _ -> "3"
| _, _, _, _, _, _, _, B, _, _, _, _, B, _, A, A -> "4"
| A, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ -> "5"
| _, _, _, _, _, _, _, B, _, B, _, _, _, _, _, _ -> "6"
| _, B, _, _, _, _, _, _, _, _, _, _, _, _, _, _ -> "7"
| _, A, A, _, A, _, B, _, _, _, _, _, _, _, _, B -> "8"
| _, _, _, _, B, _, _, _, _, _, _, _, _, _, B, _ -> "9"
| _, _, _, _, _, _, _, _, _, _, _, B, _, _, _, _ -> "10"
| _, _, _, _, _, A, _, _, _, _, B, _, _, _, _, _ -> "11"
| B, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ -> "12"
| _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ -> "13"
let _ =
test "luc" f (B, A, A, A, A, A, A, A, A, A, A, B, A, A, A, A) "10" ;
test "luc" f (B, A, A, A, A, A, A, A, A, A, A, A, A, A, A, A) "12" ;
()
By , compilation raised some assert false i make_failactionneg
By Gilles Peskine, compilation raised some assert false i make_failactionneg
*)
type bg = [
| `False
| `True
]
type vg = [
| `A
| `B
| `U of int
| `V of int
]
type tg = {
v : vg;
x : bg;
}
let predg x = true
let rec gilles o = match o with
| {v = (`U data | `V data); x = `False} when predg o -> 1
| {v = (`A|`B) ; x = `False}
| {v = (`U _ | `V _); x = `False}
| {v = _ ; x = `True}
-> 2
exception Found of string * int
exception Error of string
let lucexn e =
try
try raise e with Error msg -> msg
with Found (s,r) -> s^Int.to_string r
let () =
test "lucexn1" lucexn (Error "coucou") "coucou" ;
test "lucexn2" lucexn (Found ("int: ",0)) "int: 0" ;
()
let pr5758 x str =
match (x, str) with
| (1. , "A") -> "Matched A"
| (1.0, "B") -> "Matched B"
| (1. , "C") -> "Matched C"
| result ->
match result with
| (1., "A") -> "Failed match A then later matched"
| _ -> "Failed twice"
;;
let () =
test "pr5758" (pr5758 1.) "A" "Matched A"
;;
|
05d23493fe42747884dbf2265d023a4596934037e8801d8fd8bdd075d3788843 | chaoxu/fancy-walks | 95.hs |
import Data.List hiding (union)
import Data.Ord
minus (x:xs) (y:ys) = case (compare x y) of
LT -> x : minus xs (y:ys)
EQ -> minus xs ys
GT -> minus (x:xs) ys
minus xs _ = xs
union (x:xs) (y:ys) = case (compare x y) of
LT -> x : union xs (y:ys)
EQ -> x : union xs ys
GT -> y : union (x:xs) ys
union xs ys = xs ++ ys
primes = 2 : primes'
where
primes' = 3 : ([5,7..] `minus` join [[p*p,p*p+2*p..] | p <- primes'])
join ((x:xs):t) = x : union xs (join (pairs t))
pairs ((x:xs):ys:t) = (x : union xs ys) : pairs t
factors n = go n primes
where
go n (x:xs)
| x * x > n = [n]
| n `mod` x == 0 = x : go (n `div` x) (x:xs)
| otherwise = go n xs
divsum n = (+(-n)) . product . map (sum . scanl (*) 1 ) . group . factors $ n
limit = 1000000
chain n = go [n]
where
go lst@ ~(x:_)
| x' < n || x' > limit = []
| x' == n = lst
| x' `elem` lst = []
| otherwise = go (x':lst)
where
x' = divsum x
problem_95 = minimum $ maximumBy (comparing length) [lst | x <- [2..limit], let lst = chain x, not (null lst)]
main = print problem_95
| null | https://raw.githubusercontent.com/chaoxu/fancy-walks/952fcc345883181144131f839aa61e36f488998d/projecteuler.net/95.hs | haskell |
import Data.List hiding (union)
import Data.Ord
minus (x:xs) (y:ys) = case (compare x y) of
LT -> x : minus xs (y:ys)
EQ -> minus xs ys
GT -> minus (x:xs) ys
minus xs _ = xs
union (x:xs) (y:ys) = case (compare x y) of
LT -> x : union xs (y:ys)
EQ -> x : union xs ys
GT -> y : union (x:xs) ys
union xs ys = xs ++ ys
primes = 2 : primes'
where
primes' = 3 : ([5,7..] `minus` join [[p*p,p*p+2*p..] | p <- primes'])
join ((x:xs):t) = x : union xs (join (pairs t))
pairs ((x:xs):ys:t) = (x : union xs ys) : pairs t
factors n = go n primes
where
go n (x:xs)
| x * x > n = [n]
| n `mod` x == 0 = x : go (n `div` x) (x:xs)
| otherwise = go n xs
divsum n = (+(-n)) . product . map (sum . scanl (*) 1 ) . group . factors $ n
limit = 1000000
chain n = go [n]
where
go lst@ ~(x:_)
| x' < n || x' > limit = []
| x' == n = lst
| x' `elem` lst = []
| otherwise = go (x':lst)
where
x' = divsum x
problem_95 = minimum $ maximumBy (comparing length) [lst | x <- [2..limit], let lst = chain x, not (null lst)]
main = print problem_95
| |
a967a57e304383d8cad6f3f76f3a8b165db71760d7765e5dec09aff599ab636f | nubank/midje-nrepl | inhibit_tests.clj | (ns midje-nrepl.middleware.inhibit-tests
(:require [clojure.test :refer [*load-tests*]]
[nrepl.transport :as transport :refer [Transport]]))
(defn- done? [{:keys [status]}]
(contains? status :done))
(defn- transport-proxy [transport load-tests?]
(reify Transport
(recv [_]
(transport/recv transport))
(recv [_ timeout]
(transport/recv transport timeout))
(send [_ message]
(when (done? message)
(alter-var-root #'*load-tests* (constantly load-tests?)))
(transport/send transport message))))
(defn- forward-with-transport-proxy [{:keys [transport load-tests?] :as message} base-handler]
(let [current-value-of-*load-tests* *load-tests*]
(alter-var-root #'*load-tests* (constantly load-tests?))
(base-handler (assoc message :transport (transport-proxy transport current-value-of-*load-tests*)))))
(defn- evaluate-without-running-tests [{:keys [load-tests?] :as message} base-handler]
(update message :session
swap! assoc #'*load-tests* load-tests?)
(base-handler message))
(defn handle-inhibit-tests [{:keys [op] :as message} base-handler]
(let [message (update message :load-tests? (fnil #(Boolean/parseBoolean %) "false"))]
(case op
"eval" (evaluate-without-running-tests message base-handler)
("refresh" "refresh-all" "warm-ast-cache") (forward-with-transport-proxy message base-handler))))
| null | https://raw.githubusercontent.com/nubank/midje-nrepl/b4d505f346114db88ad5b5c6b3c8f0af4e0136fc/src/midje_nrepl/middleware/inhibit_tests.clj | clojure | (ns midje-nrepl.middleware.inhibit-tests
(:require [clojure.test :refer [*load-tests*]]
[nrepl.transport :as transport :refer [Transport]]))
(defn- done? [{:keys [status]}]
(contains? status :done))
(defn- transport-proxy [transport load-tests?]
(reify Transport
(recv [_]
(transport/recv transport))
(recv [_ timeout]
(transport/recv transport timeout))
(send [_ message]
(when (done? message)
(alter-var-root #'*load-tests* (constantly load-tests?)))
(transport/send transport message))))
(defn- forward-with-transport-proxy [{:keys [transport load-tests?] :as message} base-handler]
(let [current-value-of-*load-tests* *load-tests*]
(alter-var-root #'*load-tests* (constantly load-tests?))
(base-handler (assoc message :transport (transport-proxy transport current-value-of-*load-tests*)))))
(defn- evaluate-without-running-tests [{:keys [load-tests?] :as message} base-handler]
(update message :session
swap! assoc #'*load-tests* load-tests?)
(base-handler message))
(defn handle-inhibit-tests [{:keys [op] :as message} base-handler]
(let [message (update message :load-tests? (fnil #(Boolean/parseBoolean %) "false"))]
(case op
"eval" (evaluate-without-running-tests message base-handler)
("refresh" "refresh-all" "warm-ast-cache") (forward-with-transport-proxy message base-handler))))
| |
ac2a30d58e8b9c45dc2994f7b3cb16a8ce06453f4d4b25c8a925370556d647a0 | mondemand/mondemand-server | mondemand_server_config.erl | -module (mondemand_server_config).
-export ([ all/0,
listener_config/1,
num_dispatchers/1,
dispatch_config/1,
backends_to_start/1,
applications_to_start/1,
web_config/1,
backend_config/2,
mappings_config/1
]).
-include_lib ("mondemand/include/mondemand.hrl").
-include ("mondemand_server_internal.hrl").
all () ->
% the application needs to be loaded in order to see the variables for
% some reason
application:load (mondemand_server),
application:get_all_env (mondemand_server).
listener_config (Config) ->
find_in_config (listener, Config).
num_dispatchers (Config) ->
case find_in_config (num_dispatchers, Config) of
undefined -> erlang:system_info(schedulers);
C when is_integer (C) -> C
end.
dispatch_config (Config) ->
dispatch (Config).
applications_to_start (Config) ->
lists:append ([ Mod:required_apps()
|| Mod
<- backends_to_start (Config) ]).
backends_to_start (Config) ->
case dispatch (Config) of
undefined -> exit (no_dispatch_list);
#mds_dispatch { annotation_msg = A,
log_msg = L,
perf_msg = P,
stats_msg = S,
trace_msg = T } ->
% determine the unique list of modules to start from the dispatch list
lists:usort(lists:flatten([A,L,P,S,T]))
end.
web_config (Config) ->
case find_in_config (web, Config) of
undefined -> undefined;
C ->
InitialWebDispatch = find_in_config (dispatch , C, []),
BackendWebDispatch = backend_web_configs (Config),
lists:keystore ( dispatch, 1, C,
{dispatch,
calculate_web_dispatch (InitialWebDispatch ++ BackendWebDispatch)
}
)
end.
backend_web_configs (Config) ->
lists:foldl (fun (Mod, Acc) ->
case find_in_config (Mod, Config) of
undefined -> Acc;
E ->
case find_in_config (dispatch, E) of
undefined -> Acc;
D -> D ++ Acc
end
end
end,
[],
backends_to_start (Config)).
backend_config (BackendModule, Config) ->
find_in_config (BackendModule, Config).
mappings_config (Config) ->
case find_in_config (mappings, Config) of
undefined -> undefined;
C ->
Directory = find_in_config (directory, C, "."),
ReloadSeconds = find_in_config (reload_seconds, C, 60),
{Directory, ReloadSeconds}
end.
%%--------------------------------------------------------------------
Internal functions
%%--------------------------------------------------------------------
find_in_config (Key, Proplist) ->
proplists:get_value (Key, Proplist).
find_in_config (Key, Proplist, Default) ->
proplists:get_value (Key, Proplist, Default).
dispatch (Config) ->
dispatch_to_record (find_in_config (dispatch, Config)).
%% functions to order a webmachine dispatch from
%% -loading-webmachine.html
path_spec_priority ('*') -> 3;
path_spec_priority (X) when is_atom (X) -> 2;
path_spec_priority (X) when is_list (X) -> 1.
dispatch_specificity ({ PathSpecA, _, _ }, { PathSpecB, _, _ }) ->
case erlang:length (PathSpecA) - erlang:length (PathSpecB) of
X when X > 0 ->
true;
X when X < 0 ->
false;
_ ->
PrioPathSpecA = [ path_spec_priority (X) || X <- PathSpecA ],
PrioPathSpecB = [ path_spec_priority (X) || X <- PathSpecB ],
case PrioPathSpecA =< PrioPathSpecB of
false ->
false;
true ->
FullPathSpecA = [ { path_spec_priority (X), X } || X <- PathSpecA ],
FullPathSpecB = [ { path_spec_priority (X), X } || X <- PathSpecB ],
FullPathSpecA =< FullPathSpecB
end
end.
calculate_web_dispatch (InitialDispatch) ->
lists:sort (fun dispatch_specificity/2, InitialDispatch).
% In the application environment variable the dispatch list is of the form
%
% { dispatch,
% [
% { "*", [ mondemand_backend_all_journaller ] }, % add to all
% { "MonDemand::StatsMsg", [ mondemand_backend_stats_file,
% mondemand_backend_stats_aggregator,
% mondemand_backend_lwes ] },
{ " MonDemand::LogMsg " , [ mondemand_backend_log_file ,
% mondemand_backend_lwes ] },
% { "MonDemand::TraceMsg", [ mondemand_backend_trace_file ] },
{ " MonDemand::PerfMsg " , [ mondemand_backend_lwes ,
% mondemand_backend_performance_collate] }
% ]
% }
%
% This will be turned into a flattened structure, and all registered names
% will be added to lists. This should allow for the quickest dispatch.
%
% The final structure for the above will be something like
%
# md_dispatch { annotation = [ mondemand_backend_all_journaller ] ,
% log = [ mondemand_backend_log_file,
% mondemand_backend_lwes,
% mondemand_backend_all_journaller ],
% perf = [ mondemand_backend_lwes,
% mondemand_backend_all_journaller ],
% stats = [ mondemand_backend_stats_file,
% mondemand_backend_stats_aggregator,
% mondemand_backend_lwes,
% mondemand_backend_all_journaller ],
% trace = [ mondemand_backend_trace_file,
% mondemand_backend_all_journaller ]
% }
%
% Dispatching then becomes a lookup in this record followed by
% invoking the process function for each.
dispatch_to_record (undefined) -> undefined;
dispatch_to_record (Dispatch) when is_list (Dispatch) ->
case find_all_for_type (<<"*">>, Dispatch) of
{ok, Wildcard} ->
Annotation = find_all_for_type(?MD_ANNOTATION_EVENT, Dispatch),
Log = find_all_for_type (?MD_LOG_EVENT, Dispatch),
Perf = find_all_for_type (?MD_PERF_EVENT, Dispatch),
Stats = find_all_for_type (?MD_STATS_EVENT, Dispatch),
Trace = find_all_for_type (?MD_TRACE_EVENT, Dispatch),
case {Annotation,Log,Perf,Stats,Trace} of
{{ok,A},{ok,L},{ok,P},{ok,S},{ok,T}} ->
#mds_dispatch {
annotation_msg = A ++ Wildcard,
log_msg = L ++ Wildcard,
perf_msg = P ++ Wildcard,
stats_msg = S ++ Wildcard,
trace_msg = T ++ Wildcard
};
O ->
{error, {malformed_dispatch, O}}
end;
{error, E} ->
{error, {malformed_wildcard, E}}
end.
Given a Dispatch list of the form [ { EventType , [ handlers ] } ] , this will
allow for the EventType to be either a binary , list or atom , and enforce
% that it's in the list of valid mondemand events, or is the wildcard "*"
find_all_for_type (T, Dispatch) when is_binary (T) ->
case lists:filter (
fun (undefined) -> false;
(_) -> true
end,
[ find_dispatch_entry (erlang:binary_to_atom(T,latin1), Dispatch),
find_dispatch_entry (T, Dispatch),
find_dispatch_entry (binary_to_list(T), Dispatch) ]
) of
[One] -> {ok, One};
[] -> {ok, []};
_ -> {error, multiple_entries_for_type}
end.
find_dispatch_entry (K, Dispatch) ->
case lists:keyfind (K,1,Dispatch) of
false -> undefined;
{_,L} -> L
end.
%%--------------------------------------------------------------------
%%% Test functions
%%--------------------------------------------------------------------
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
dispatch_config_test_ () ->
[
?_assertEqual (
#mds_dispatch { annotation_msg = [j],
log_msg = [l, sl, j],
trace_msg = [tf, j],
perf_msg = [l, c, j],
stats_msg = [l, sa, sl, j] },
dispatch_to_record ([{"*",[j]},
{"MonDemand::StatsMsg", [l, sa, sl]},
{"MonDemand::LogMsg", [l, sl]},
{"MonDemand::TraceMsg",[tf]},
{"MonDemand::PerfMsg", [l, c]}
])
),
?_assertEqual (
{error, {malformed_wildcard, multiple_entries_for_type}},
dispatch_to_record([{"*",[a]},{'*',[b]}])),
?_assertEqual (
{error,{malformed_dispatch,{{ok,[]},
{ok,[]},
{ok,[]},
{error,multiple_entries_for_type},
{ok,[]}}}},
dispatch_to_record([{"*",[a]},
{'MonDemand::StatsMsg',[b]},
{"MonDemand::StatsMsg",[c]}]))
].
-endif.
| null | https://raw.githubusercontent.com/mondemand/mondemand-server/88e57ef8ece8a0069a747620f4585104cb560840/src/mondemand_server_config.erl | erlang | the application needs to be loaded in order to see the variables for
some reason
determine the unique list of modules to start from the dispatch list
--------------------------------------------------------------------
--------------------------------------------------------------------
functions to order a webmachine dispatch from
-loading-webmachine.html
In the application environment variable the dispatch list is of the form
{ dispatch,
[
{ "*", [ mondemand_backend_all_journaller ] }, % add to all
{ "MonDemand::StatsMsg", [ mondemand_backend_stats_file,
mondemand_backend_stats_aggregator,
mondemand_backend_lwes ] },
mondemand_backend_lwes ] },
{ "MonDemand::TraceMsg", [ mondemand_backend_trace_file ] },
mondemand_backend_performance_collate] }
]
}
This will be turned into a flattened structure, and all registered names
will be added to lists. This should allow for the quickest dispatch.
The final structure for the above will be something like
log = [ mondemand_backend_log_file,
mondemand_backend_lwes,
mondemand_backend_all_journaller ],
perf = [ mondemand_backend_lwes,
mondemand_backend_all_journaller ],
stats = [ mondemand_backend_stats_file,
mondemand_backend_stats_aggregator,
mondemand_backend_lwes,
mondemand_backend_all_journaller ],
trace = [ mondemand_backend_trace_file,
mondemand_backend_all_journaller ]
}
Dispatching then becomes a lookup in this record followed by
invoking the process function for each.
that it's in the list of valid mondemand events, or is the wildcard "*"
--------------------------------------------------------------------
Test functions
-------------------------------------------------------------------- | -module (mondemand_server_config).
-export ([ all/0,
listener_config/1,
num_dispatchers/1,
dispatch_config/1,
backends_to_start/1,
applications_to_start/1,
web_config/1,
backend_config/2,
mappings_config/1
]).
-include_lib ("mondemand/include/mondemand.hrl").
-include ("mondemand_server_internal.hrl").
all () ->
application:load (mondemand_server),
application:get_all_env (mondemand_server).
listener_config (Config) ->
find_in_config (listener, Config).
num_dispatchers (Config) ->
case find_in_config (num_dispatchers, Config) of
undefined -> erlang:system_info(schedulers);
C when is_integer (C) -> C
end.
dispatch_config (Config) ->
dispatch (Config).
applications_to_start (Config) ->
lists:append ([ Mod:required_apps()
|| Mod
<- backends_to_start (Config) ]).
backends_to_start (Config) ->
case dispatch (Config) of
undefined -> exit (no_dispatch_list);
#mds_dispatch { annotation_msg = A,
log_msg = L,
perf_msg = P,
stats_msg = S,
trace_msg = T } ->
lists:usort(lists:flatten([A,L,P,S,T]))
end.
web_config (Config) ->
case find_in_config (web, Config) of
undefined -> undefined;
C ->
InitialWebDispatch = find_in_config (dispatch , C, []),
BackendWebDispatch = backend_web_configs (Config),
lists:keystore ( dispatch, 1, C,
{dispatch,
calculate_web_dispatch (InitialWebDispatch ++ BackendWebDispatch)
}
)
end.
backend_web_configs (Config) ->
lists:foldl (fun (Mod, Acc) ->
case find_in_config (Mod, Config) of
undefined -> Acc;
E ->
case find_in_config (dispatch, E) of
undefined -> Acc;
D -> D ++ Acc
end
end
end,
[],
backends_to_start (Config)).
backend_config (BackendModule, Config) ->
find_in_config (BackendModule, Config).
mappings_config (Config) ->
case find_in_config (mappings, Config) of
undefined -> undefined;
C ->
Directory = find_in_config (directory, C, "."),
ReloadSeconds = find_in_config (reload_seconds, C, 60),
{Directory, ReloadSeconds}
end.
Internal functions
find_in_config (Key, Proplist) ->
proplists:get_value (Key, Proplist).
find_in_config (Key, Proplist, Default) ->
proplists:get_value (Key, Proplist, Default).
dispatch (Config) ->
dispatch_to_record (find_in_config (dispatch, Config)).
path_spec_priority ('*') -> 3;
path_spec_priority (X) when is_atom (X) -> 2;
path_spec_priority (X) when is_list (X) -> 1.
dispatch_specificity ({ PathSpecA, _, _ }, { PathSpecB, _, _ }) ->
case erlang:length (PathSpecA) - erlang:length (PathSpecB) of
X when X > 0 ->
true;
X when X < 0 ->
false;
_ ->
PrioPathSpecA = [ path_spec_priority (X) || X <- PathSpecA ],
PrioPathSpecB = [ path_spec_priority (X) || X <- PathSpecB ],
case PrioPathSpecA =< PrioPathSpecB of
false ->
false;
true ->
FullPathSpecA = [ { path_spec_priority (X), X } || X <- PathSpecA ],
FullPathSpecB = [ { path_spec_priority (X), X } || X <- PathSpecB ],
FullPathSpecA =< FullPathSpecB
end
end.
calculate_web_dispatch (InitialDispatch) ->
lists:sort (fun dispatch_specificity/2, InitialDispatch).
{ " MonDemand::LogMsg " , [ mondemand_backend_log_file ,
{ " MonDemand::PerfMsg " , [ mondemand_backend_lwes ,
# md_dispatch { annotation = [ mondemand_backend_all_journaller ] ,
dispatch_to_record (undefined) -> undefined;
dispatch_to_record (Dispatch) when is_list (Dispatch) ->
case find_all_for_type (<<"*">>, Dispatch) of
{ok, Wildcard} ->
Annotation = find_all_for_type(?MD_ANNOTATION_EVENT, Dispatch),
Log = find_all_for_type (?MD_LOG_EVENT, Dispatch),
Perf = find_all_for_type (?MD_PERF_EVENT, Dispatch),
Stats = find_all_for_type (?MD_STATS_EVENT, Dispatch),
Trace = find_all_for_type (?MD_TRACE_EVENT, Dispatch),
case {Annotation,Log,Perf,Stats,Trace} of
{{ok,A},{ok,L},{ok,P},{ok,S},{ok,T}} ->
#mds_dispatch {
annotation_msg = A ++ Wildcard,
log_msg = L ++ Wildcard,
perf_msg = P ++ Wildcard,
stats_msg = S ++ Wildcard,
trace_msg = T ++ Wildcard
};
O ->
{error, {malformed_dispatch, O}}
end;
{error, E} ->
{error, {malformed_wildcard, E}}
end.
Given a Dispatch list of the form [ { EventType , [ handlers ] } ] , this will
allow for the EventType to be either a binary , list or atom , and enforce
find_all_for_type (T, Dispatch) when is_binary (T) ->
case lists:filter (
fun (undefined) -> false;
(_) -> true
end,
[ find_dispatch_entry (erlang:binary_to_atom(T,latin1), Dispatch),
find_dispatch_entry (T, Dispatch),
find_dispatch_entry (binary_to_list(T), Dispatch) ]
) of
[One] -> {ok, One};
[] -> {ok, []};
_ -> {error, multiple_entries_for_type}
end.
find_dispatch_entry (K, Dispatch) ->
case lists:keyfind (K,1,Dispatch) of
false -> undefined;
{_,L} -> L
end.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
dispatch_config_test_ () ->
[
?_assertEqual (
#mds_dispatch { annotation_msg = [j],
log_msg = [l, sl, j],
trace_msg = [tf, j],
perf_msg = [l, c, j],
stats_msg = [l, sa, sl, j] },
dispatch_to_record ([{"*",[j]},
{"MonDemand::StatsMsg", [l, sa, sl]},
{"MonDemand::LogMsg", [l, sl]},
{"MonDemand::TraceMsg",[tf]},
{"MonDemand::PerfMsg", [l, c]}
])
),
?_assertEqual (
{error, {malformed_wildcard, multiple_entries_for_type}},
dispatch_to_record([{"*",[a]},{'*',[b]}])),
?_assertEqual (
{error,{malformed_dispatch,{{ok,[]},
{ok,[]},
{ok,[]},
{error,multiple_entries_for_type},
{ok,[]}}}},
dispatch_to_record([{"*",[a]},
{'MonDemand::StatsMsg',[b]},
{"MonDemand::StatsMsg",[c]}]))
].
-endif.
|
411c38d3b34643c74bf6e3cd2e7ff6caf0d4e1e6e5eaa07807660b5b30353095 | returntocorp/ocaml-tree-sitter-core | Src_file.mli | (*
Representation of an input file.
*)
type info = {
path to the file or to an informal descriptor
such as ' < stdin > '
such as '<stdin>' *)
path: string option; (* path to the file, if applicable *)
}
type t = private {
info: info;
Lines represent the contents of the input split after the line
terminator ' \n ' .
If the input ends with ' \n ' , then the last line will be " " i.e.
all the lines are LF - terminated except the last one .
The empty file contains one empty line .
the strings in the array results in the original data .
Lines represent the contents of the input split after the line
terminator '\n'.
If the input ends with '\n', then the last line will be "" i.e.
all the lines are LF-terminated except the last one.
The empty file contains one empty line.
Concatenating the strings in the array results in the original data.
*)
lines: string array;
}
val info : t -> info
(* Return the number of lines (O(1)) *)
val get_num_lines : t -> int
(*
Load an input file. It gets resolved into lines and columns.
*)
val load_file : string -> t
(*
Load source code from a string. The optional 'src_file' is for
pointing to the source file in error messages.
If 'src_file' is unspecified, the name of the file in error messages
can be set with 'src_name' without having to be a valid path.
The default for 'src_name' is "<source>".
*)
val load_string : ?src_name:string -> ?src_file:string -> string -> t
Return the substring corresponding to the specified region .
It may or may not coincide with the boundaries of a token .
Arguments : start , end _ where end _ is the position of the first character
* after * the selection region .
Return the substring corresponding to the specified region.
It may or may not coincide with the boundaries of a token.
Arguments: start, end_ where end_ is the position of the first character
*after* the selection region.
*)
val get_region : t -> Loc.pos -> Loc.pos -> string
Get the specified line from the line array .
The first line ( row ) is numbered 0 .
If the requested line is out of range , the empty string is returned .
Get the specified line from the line array.
The first line (row) is numbered 0.
If the requested line is out of range, the empty string is returned.
*)
val safe_get_row : t -> int -> string
| null | https://raw.githubusercontent.com/returntocorp/ocaml-tree-sitter-core/b2404ed27e053f23745f7b2335405134dcb087ab/src/run/lib/Src_file.mli | ocaml |
Representation of an input file.
path to the file, if applicable
Return the number of lines (O(1))
Load an input file. It gets resolved into lines and columns.
Load source code from a string. The optional 'src_file' is for
pointing to the source file in error messages.
If 'src_file' is unspecified, the name of the file in error messages
can be set with 'src_name' without having to be a valid path.
The default for 'src_name' is "<source>".
|
type info = {
path to the file or to an informal descriptor
such as ' < stdin > '
such as '<stdin>' *)
}
type t = private {
info: info;
Lines represent the contents of the input split after the line
terminator ' \n ' .
If the input ends with ' \n ' , then the last line will be " " i.e.
all the lines are LF - terminated except the last one .
The empty file contains one empty line .
the strings in the array results in the original data .
Lines represent the contents of the input split after the line
terminator '\n'.
If the input ends with '\n', then the last line will be "" i.e.
all the lines are LF-terminated except the last one.
The empty file contains one empty line.
Concatenating the strings in the array results in the original data.
*)
lines: string array;
}
val info : t -> info
val get_num_lines : t -> int
val load_file : string -> t
val load_string : ?src_name:string -> ?src_file:string -> string -> t
Return the substring corresponding to the specified region .
It may or may not coincide with the boundaries of a token .
Arguments : start , end _ where end _ is the position of the first character
* after * the selection region .
Return the substring corresponding to the specified region.
It may or may not coincide with the boundaries of a token.
Arguments: start, end_ where end_ is the position of the first character
*after* the selection region.
*)
val get_region : t -> Loc.pos -> Loc.pos -> string
Get the specified line from the line array .
The first line ( row ) is numbered 0 .
If the requested line is out of range , the empty string is returned .
Get the specified line from the line array.
The first line (row) is numbered 0.
If the requested line is out of range, the empty string is returned.
*)
val safe_get_row : t -> int -> string
|
6d735039b9f42a9432c9191ebfebac8c2cf5213142b885c99f7cd57c85870a22 | GaloisInc/daedalus | ErrorTrie.hs | module Daedalus.Interp.ErrorTrie where
import Data.Map(Map)
import qualified Data.Map as Map
import Data.ByteString.Short(fromShort)
import qualified RTS.ParseError as RTS
import qualified Daedalus.RTS.HasInputs as RTS
import Daedalus.RTS.Input
import Daedalus.RTS.JSON
import Daedalus.Interp.DebugAnnot
import Daedalus.Interp.Env
data ErrorTrie = ErrorTrie (Maybe ParseError) (Map CallSite ErrorTrie)
emptyErrorTrie :: ErrorTrie
emptyErrorTrie = ErrorTrie Nothing mempty
insertError :: [DebugAnnot] -> ParseError -> ErrorTrie -> ErrorTrie
insertError path err (ErrorTrie here there) =
case path of
[] -> ErrorTrie newHere there
where
newHere =
case here of
Nothing -> Just err
Just other -> Just (RTS.mergeError err other)
e : more ->
case e of
TextAnnot {} -> insertError more err (ErrorTrie here there)
ScopeAnnot {} -> insertError more err (ErrorTrie here there)
CallAnnot site ->
let remote = Map.findWithDefault emptyErrorTrie site there
newRemote = insertError more err remote
in ErrorTrie here (Map.insert site newRemote there)
parseErrorToTrie :: ParseError -> ErrorTrie
parseErrorToTrie = foldr insert emptyErrorTrie
. zipWith addNum [ 0 .. ]
. RTS.parseErrorToList
where
insert e t = insertError (reverse (RTS.peStack e)) e t
addNum n e = e { RTS.peNumber = n }
parseErrorTrieToJSON :: ParseError -> JSON
parseErrorTrieToJSON top =
jsObject
[ ("tree", jsTrie (parseErrorToTrie top))
, ("inputs", jsObject [ (fromShort k, jsText v)
| (k,v) <- Map.toList (RTS.getInputs top)
])
]
where
jsTrie (ErrorTrie here there) =
jsObject
[ ("errors", jsErrMb here)
, ("frames", jsMap there)
]
jsMap mp =
jsArray
[ jsObject
[ ("frame",toJSON k)
, ("nest", jsTrie v)
]
| (k,v) <- Map.toList mp ]
jsErrMb mb =
jsArray
case mb of
Nothing -> []
Just es -> [ jsErr e | e <- RTS.parseErrorToList es ]
jsErr pe =
jsObject
[ ("error", jsString (RTS.peMsg pe))
, ("input", toJSON (inputName (RTS.peInput pe)))
, ("offset", toJSON (inputOffset (RTS.peInput pe)))
, ("grammar", toJSON (RTS.peGrammar pe))
, ("trace", toJSON (RTS.peITrace pe))
, ("stack", toJSON (RTS.peStack pe))
, ("number", toJSON (RTS.peNumber pe))
]
| null | https://raw.githubusercontent.com/GaloisInc/daedalus/3f180d29441960e35386654ec79a2b205bddc157/src/Daedalus/Interp/ErrorTrie.hs | haskell | module Daedalus.Interp.ErrorTrie where
import Data.Map(Map)
import qualified Data.Map as Map
import Data.ByteString.Short(fromShort)
import qualified RTS.ParseError as RTS
import qualified Daedalus.RTS.HasInputs as RTS
import Daedalus.RTS.Input
import Daedalus.RTS.JSON
import Daedalus.Interp.DebugAnnot
import Daedalus.Interp.Env
data ErrorTrie = ErrorTrie (Maybe ParseError) (Map CallSite ErrorTrie)
emptyErrorTrie :: ErrorTrie
emptyErrorTrie = ErrorTrie Nothing mempty
insertError :: [DebugAnnot] -> ParseError -> ErrorTrie -> ErrorTrie
insertError path err (ErrorTrie here there) =
case path of
[] -> ErrorTrie newHere there
where
newHere =
case here of
Nothing -> Just err
Just other -> Just (RTS.mergeError err other)
e : more ->
case e of
TextAnnot {} -> insertError more err (ErrorTrie here there)
ScopeAnnot {} -> insertError more err (ErrorTrie here there)
CallAnnot site ->
let remote = Map.findWithDefault emptyErrorTrie site there
newRemote = insertError more err remote
in ErrorTrie here (Map.insert site newRemote there)
parseErrorToTrie :: ParseError -> ErrorTrie
parseErrorToTrie = foldr insert emptyErrorTrie
. zipWith addNum [ 0 .. ]
. RTS.parseErrorToList
where
insert e t = insertError (reverse (RTS.peStack e)) e t
addNum n e = e { RTS.peNumber = n }
parseErrorTrieToJSON :: ParseError -> JSON
parseErrorTrieToJSON top =
jsObject
[ ("tree", jsTrie (parseErrorToTrie top))
, ("inputs", jsObject [ (fromShort k, jsText v)
| (k,v) <- Map.toList (RTS.getInputs top)
])
]
where
jsTrie (ErrorTrie here there) =
jsObject
[ ("errors", jsErrMb here)
, ("frames", jsMap there)
]
jsMap mp =
jsArray
[ jsObject
[ ("frame",toJSON k)
, ("nest", jsTrie v)
]
| (k,v) <- Map.toList mp ]
jsErrMb mb =
jsArray
case mb of
Nothing -> []
Just es -> [ jsErr e | e <- RTS.parseErrorToList es ]
jsErr pe =
jsObject
[ ("error", jsString (RTS.peMsg pe))
, ("input", toJSON (inputName (RTS.peInput pe)))
, ("offset", toJSON (inputOffset (RTS.peInput pe)))
, ("grammar", toJSON (RTS.peGrammar pe))
, ("trace", toJSON (RTS.peITrace pe))
, ("stack", toJSON (RTS.peStack pe))
, ("number", toJSON (RTS.peNumber pe))
]
| |
5be7a40d63668bcd453220f42d571cbed2368ab88255d1e4b8d89c5eec7e33f1 | scymtym/clim.flamegraph | repaint-based-pane.lisp | (cl:in-package #:clim.flamegraph.view.timeline)
(defclass timeline-pane (clim:application-pane)
((%dx :accessor dx)
(%dy :accessor dy))
(:default-initargs
:display-time nil))
(defmethod clim:compose-space ((pane timeline-pane) &key width height)
(clim:make-space-requirement :width 3000 :height 3000))
(defmethod clim:move-sheet ((sheet timeline-pane) dx dy)
(setf (dx sheet) dx (dy sheet) dy)
(when (clim:sheet-viewable-p sheet)
(climi::dispatch-repaint sheet (clim:sheet-region sheet))))
(defmethod clim:redisplay-frame-pane ((frame clim:application-frame)
(pane timeline-pane)
&key force-p)
(declare (ignore force-p))
( clim : draw - circle * pane 50 50 40 )
)
(defmethod clim:handle-repaint ((pane timeline-pane) (event t))
(clim:with-drawing-options (pane :clipping-region (clim:make-rectangle* 0 0 100 100))
(clim:with-translation (pane 0 (dy pane))
(clim:draw-text* pane (format nil "~D ~D" (dx pane) (dy pane)) 50 50 :align-x :center :align-y :center)))
(clim:with-drawing-options (pane :clipping-region (clim:make-rectangle* 100 0 500 500))
(clim:with-drawing-options (pane :transformation (clim:make-translation-transformation
(+ 100 (dx pane)) (dy pane)))
(clim:draw-design pane (clim:sheet-region pane) :ink clim:+green+)
(loop :for i :from 0 :to 1000 :by 100
:do (clim:draw-circle* pane i 50 40 :ink clim:+red+)
(clim:draw-text* pane (format nil "~D" i) i 50 :align-x :center :align-y :center)))))
#+no (defmethod clim:handle-repaint ((pane timeline-pane) (event t))
(let ((climi::*inhibit-dispatch-repaint* t))
(let ((old (clim:sheet-transformation pane))
(old-region (clim:sheet-region pane)))
(setf (clim:sheet-transformation pane) clim:+identity-transformation+
( clim : sheet - region pane ) ( clim : make - rectangle * 0 0 100 100 )
)
(unwind-protect
(clim:with-drawing-options (pane :clipping-region (clim:make-rectangle* 0 0 100 100))
(clim:draw-design pane (clim:sheet-region pane) :ink clim:+blue+)
(clim:draw-text* pane "foo" 20 20))
(setf (clim:sheet-transformation pane) old
(clim:sheet-region pane) old-region
)))
(let ((old (clim:sheet-region pane)))
(setf (clim:sheet-region pane) (clim:region-difference
old
(clim:untransform-region
(clim:sheet-transformation pane)
(clim:make-rectangle* 0 0 100 100))))
(unwind-protect
clim : with - drawing - options ( pane : clipping - region )
(loop :for i :from 100 :to 1000 :by 100
:do (clim:draw-circle* pane i 50 40 :ink clim:+red+)))
(setf (clim:sheet-region pane) old)))))
| null | https://raw.githubusercontent.com/scymtym/clim.flamegraph/03b5e4f08b53af86a98afa975a8e7a29d0ddd3a7/src/view/timeline/repaint-based-pane.lisp | lisp | (cl:in-package #:clim.flamegraph.view.timeline)
(defclass timeline-pane (clim:application-pane)
((%dx :accessor dx)
(%dy :accessor dy))
(:default-initargs
:display-time nil))
(defmethod clim:compose-space ((pane timeline-pane) &key width height)
(clim:make-space-requirement :width 3000 :height 3000))
(defmethod clim:move-sheet ((sheet timeline-pane) dx dy)
(setf (dx sheet) dx (dy sheet) dy)
(when (clim:sheet-viewable-p sheet)
(climi::dispatch-repaint sheet (clim:sheet-region sheet))))
(defmethod clim:redisplay-frame-pane ((frame clim:application-frame)
(pane timeline-pane)
&key force-p)
(declare (ignore force-p))
( clim : draw - circle * pane 50 50 40 )
)
(defmethod clim:handle-repaint ((pane timeline-pane) (event t))
(clim:with-drawing-options (pane :clipping-region (clim:make-rectangle* 0 0 100 100))
(clim:with-translation (pane 0 (dy pane))
(clim:draw-text* pane (format nil "~D ~D" (dx pane) (dy pane)) 50 50 :align-x :center :align-y :center)))
(clim:with-drawing-options (pane :clipping-region (clim:make-rectangle* 100 0 500 500))
(clim:with-drawing-options (pane :transformation (clim:make-translation-transformation
(+ 100 (dx pane)) (dy pane)))
(clim:draw-design pane (clim:sheet-region pane) :ink clim:+green+)
(loop :for i :from 0 :to 1000 :by 100
:do (clim:draw-circle* pane i 50 40 :ink clim:+red+)
(clim:draw-text* pane (format nil "~D" i) i 50 :align-x :center :align-y :center)))))
#+no (defmethod clim:handle-repaint ((pane timeline-pane) (event t))
(let ((climi::*inhibit-dispatch-repaint* t))
(let ((old (clim:sheet-transformation pane))
(old-region (clim:sheet-region pane)))
(setf (clim:sheet-transformation pane) clim:+identity-transformation+
( clim : sheet - region pane ) ( clim : make - rectangle * 0 0 100 100 )
)
(unwind-protect
(clim:with-drawing-options (pane :clipping-region (clim:make-rectangle* 0 0 100 100))
(clim:draw-design pane (clim:sheet-region pane) :ink clim:+blue+)
(clim:draw-text* pane "foo" 20 20))
(setf (clim:sheet-transformation pane) old
(clim:sheet-region pane) old-region
)))
(let ((old (clim:sheet-region pane)))
(setf (clim:sheet-region pane) (clim:region-difference
old
(clim:untransform-region
(clim:sheet-transformation pane)
(clim:make-rectangle* 0 0 100 100))))
(unwind-protect
clim : with - drawing - options ( pane : clipping - region )
(loop :for i :from 100 :to 1000 :by 100
:do (clim:draw-circle* pane i 50 40 :ink clim:+red+)))
(setf (clim:sheet-region pane) old)))))
| |
5324295b66a96f6744710978a7b6aad19fc38c9301dc8e568a184782a076f6c1 | igorhvr/bedlam | macros.scm | (module iasylum/macros
(code->macroexpanded-code file->macroexpanded-code)
;;; FIXXXME Will fail badly in many cases. Handling if namespaces is irresponsible to say the least, but it is still useful in some cases.
(define code->macroexpanded-code
(lambda* (code (warn-me-that-this-is-really-unsafe: warn-me-that-this-is-really-unsafe #t) (kill-namespaces: kill-namespaces #f))
(when warn-me-that-this-is-really-unsafe
(log-warn "code->macroexpanded-code called. It will fail badly (and silently, conceivably) in many cases. Handling of namespaces is irresponsible to say the least, but it is still useful in limited scenarios. Consider yourself warned, and here be dragons!"))
(let ((previous-prefixing (vector-length-prefixing #f)))
(and-let* ((dirty-code (iasylum-write-string (caddr (sc-expand code))))
(bar-code (irregex-replace/all "#%" dirty-code ""))
(namespaced-code (irregex-replace/all '(seq "|") bar-code ""))
(result-with-invalid-empty-list
(if kill-namespaces
(irregex-replace/all '(seq "@" (+ (- any ":")) "::") namespaced-code "")
namespaced-code)
)
(step-1 (irregex-replace/all
'(seq "(lambda ()")
result-with-invalid-empty-list "e68e8d18-13be-4b0d-8cac-6a0045cd99df"))
(step-2 (irregex-replace/all '(seq "()")
step-1 "(list)"))
(step-3 (irregex-replace/all '(seq "e68e8d18-13be-4b0d-8cac-6a0045cd99df")
step-2 "(lambda ()"))
(step-4 (irregex-replace/all '(seq "(lambda #t")
step-3 "(lambda ()"))
(step-5 (irregex-replace/all '(seq whitespace "#(")
step-4 " '#("))
(final-result (with-input-from-string step-5 (lambda () (read)))))
(begin
(vector-length-prefixing previous-prefixing)
final-result)))))
(define (fix-empty-lists str)
(let* ((step-1 (irregex-replace/all '(seq "'()") str "(empty-list)"))
(step-2 (irregex-replace/all '(seq "(quote ())") str "(empty-list)")))
step-2))
(define (file->macroexpanded-code fname)
(let ((cd (with-input-from-string
(fix-empty-lists (file->string fname))
(lambda () (let loop ()
(let ((obj (read)))
(if (eof-object? obj) '()
(cons obj (loop)))))))))
(map code->macroexpanded-code cd)))
)
| null | https://raw.githubusercontent.com/igorhvr/bedlam/b62e0d047105bb0473bdb47c58b23f6ca0f79a4e/iasylum/macros.scm | scheme | FIXXXME Will fail badly in many cases. Handling if namespaces is irresponsible to say the least, but it is still useful in some cases. | (module iasylum/macros
(code->macroexpanded-code file->macroexpanded-code)
(define code->macroexpanded-code
(lambda* (code (warn-me-that-this-is-really-unsafe: warn-me-that-this-is-really-unsafe #t) (kill-namespaces: kill-namespaces #f))
(when warn-me-that-this-is-really-unsafe
(log-warn "code->macroexpanded-code called. It will fail badly (and silently, conceivably) in many cases. Handling of namespaces is irresponsible to say the least, but it is still useful in limited scenarios. Consider yourself warned, and here be dragons!"))
(let ((previous-prefixing (vector-length-prefixing #f)))
(and-let* ((dirty-code (iasylum-write-string (caddr (sc-expand code))))
(bar-code (irregex-replace/all "#%" dirty-code ""))
(namespaced-code (irregex-replace/all '(seq "|") bar-code ""))
(result-with-invalid-empty-list
(if kill-namespaces
(irregex-replace/all '(seq "@" (+ (- any ":")) "::") namespaced-code "")
namespaced-code)
)
(step-1 (irregex-replace/all
'(seq "(lambda ()")
result-with-invalid-empty-list "e68e8d18-13be-4b0d-8cac-6a0045cd99df"))
(step-2 (irregex-replace/all '(seq "()")
step-1 "(list)"))
(step-3 (irregex-replace/all '(seq "e68e8d18-13be-4b0d-8cac-6a0045cd99df")
step-2 "(lambda ()"))
(step-4 (irregex-replace/all '(seq "(lambda #t")
step-3 "(lambda ()"))
(step-5 (irregex-replace/all '(seq whitespace "#(")
step-4 " '#("))
(final-result (with-input-from-string step-5 (lambda () (read)))))
(begin
(vector-length-prefixing previous-prefixing)
final-result)))))
(define (fix-empty-lists str)
(let* ((step-1 (irregex-replace/all '(seq "'()") str "(empty-list)"))
(step-2 (irregex-replace/all '(seq "(quote ())") str "(empty-list)")))
step-2))
(define (file->macroexpanded-code fname)
(let ((cd (with-input-from-string
(fix-empty-lists (file->string fname))
(lambda () (let loop ()
(let ((obj (read)))
(if (eof-object? obj) '()
(cons obj (loop)))))))))
(map code->macroexpanded-code cd)))
)
|
feaf0652b406060d1bde62b6cb0ba5d3c4fb14cceea9aacf8592235bab7e758b | simonmichael/hledger | Account.hs | # LANGUAGE CPP #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
{-|
An 'Account' has a name, a list of subaccounts, an optional parent
account, and subaccounting-excluding and -including balances.
-}
module Hledger.Data.Account
( nullacct
, accountsFromPostings
, accountTree
, showAccounts
, showAccountsBoringFlag
, printAccounts
, lookupAccount
, parentAccounts
, accountsLevels
, mapAccounts
, anyAccounts
, filterAccounts
, sumAccounts
, clipAccounts
, clipAccountsAndAggregate
, pruneAccounts
, flattenAccounts
, accountSetDeclarationInfo
, sortAccountNamesByDeclaration
, sortAccountTreeByAmount
) where
import qualified Data.HashSet as HS
import qualified Data.HashMap.Strict as HM
import Data.List (find, foldl', sortOn)
import Data.List.Extra (groupOn)
import qualified Data.Map as M
import Data.Ord (Down(..))
import Safe (headMay)
import Text.Printf (printf)
import Hledger.Data.AccountName (expandAccountName, clipOrEllipsifyAccountName)
import Hledger.Data.Amount
import Hledger.Data.Types
-- deriving instance Show Account
instance Show Account where
show Account{..} = printf "Account %s (boring:%s, postings:%d, ebalance:%s, ibalance:%s)"
aname
(if aboring then "y" else "n" :: String)
anumpostings
(wbUnpack $ showMixedAmountB noColour aebalance)
(wbUnpack $ showMixedAmountB noColour aibalance)
instance Eq Account where
(==) a b = aname a == aname b -- quick equality test for speed
-- and
-- [ aname a == aname b
-- , aparent a = = aparent b -- avoid infinite recursion
-- , asubs a == asubs b
-- , aebalance a == aebalance b
-- , aibalance a == aibalance b
-- ]
nullacct = Account
{ aname = ""
, adeclarationinfo = Nothing
, asubs = []
, aparent = Nothing
, aboring = False
, anumpostings = 0
, aebalance = nullmixedamt
, aibalance = nullmixedamt
}
| Derive 1 . an account tree and 2 . each account 's total exclusive
-- and inclusive changes from a list of postings.
-- This is the core of the balance command (and of *ledger).
-- The accounts are returned as a list in flattened tree order,
-- and also reference each other as a tree.
( The first account is the root of the tree . )
accountsFromPostings :: [Posting] -> [Account]
accountsFromPostings ps =
let
summed = foldr (\p -> HM.insertWith addAndIncrement (paccount p) (1, pamount p)) mempty ps
where addAndIncrement (n, a) (m, b) = (n + m, a `maPlus` b)
acctstree = accountTree "root" $ HM.keys summed
acctswithebals = mapAccounts setnumpsebalance acctstree
where setnumpsebalance a = a{anumpostings=numps, aebalance=total}
where (numps, total) = HM.lookupDefault (0, nullmixedamt) (aname a) summed
acctswithibals = sumAccounts acctswithebals
acctswithparents = tieAccountParents acctswithibals
acctsflattened = flattenAccounts acctswithparents
in
acctsflattened
-- | Convert a list of account names to a tree of Account objects,
-- with just the account names filled in.
-- A single root account with the given name is added.
accountTree :: AccountName -> [AccountName] -> Account
accountTree rootname as = nullacct{aname=rootname, asubs=map (uncurry accountTree') $ M.assocs m }
where
T m = treeFromPaths $ map expandAccountName as :: FastTree AccountName
accountTree' a (T m') =
nullacct{
aname=a
,asubs=map (uncurry accountTree') $ M.assocs m'
}
| An efficient - to - build tree suggested by , probably
-- better than accountNameTreeFrom.
newtype FastTree a = T (M.Map a (FastTree a))
deriving (Show, Eq, Ord)
mergeTrees :: (Ord a) => FastTree a -> FastTree a -> FastTree a
mergeTrees (T m) (T m') = T (M.unionWith mergeTrees m m')
treeFromPath :: [a] -> FastTree a
treeFromPath [] = T M.empty
treeFromPath (x:xs) = T (M.singleton x (treeFromPath xs))
treeFromPaths :: (Ord a) => [[a]] -> FastTree a
treeFromPaths = foldl' mergeTrees (T M.empty) . map treeFromPath
-- | Tie the knot so all subaccounts' parents are set correctly.
tieAccountParents :: Account -> Account
tieAccountParents = tie Nothing
where
tie parent a@Account{..} = a'
where
a' = a{aparent=parent, asubs=map (tie (Just a')) asubs}
-- | Get this account's parent accounts, from the nearest up to the root.
parentAccounts :: Account -> [Account]
parentAccounts Account{aparent=Nothing} = []
parentAccounts Account{aparent=Just a} = a:parentAccounts a
-- | List the accounts at each level of the account tree.
accountsLevels :: Account -> [[Account]]
accountsLevels = takeWhile (not . null) . iterate (concatMap asubs) . (:[])
-- | Map a (non-tree-structure-modifying) function over this and sub accounts.
mapAccounts :: (Account -> Account) -> Account -> Account
mapAccounts f a = f a{asubs = map (mapAccounts f) $ asubs a}
-- | Is the predicate true on any of this account or its subaccounts ?
anyAccounts :: (Account -> Bool) -> Account -> Bool
anyAccounts p a
| p a = True
| otherwise = any (anyAccounts p) $ asubs a
-- | Add subaccount-inclusive balances to an account tree.
sumAccounts :: Account -> Account
sumAccounts a
| null $ asubs a = a{aibalance=aebalance a}
| otherwise = a{aibalance=ibal, asubs=subs}
where
subs = map sumAccounts $ asubs a
ibal = maSum $ aebalance a : map aibalance subs
-- | Remove all subaccounts below a certain depth.
clipAccounts :: Int -> Account -> Account
clipAccounts 0 a = a{asubs=[]}
clipAccounts d a = a{asubs=subs}
where
subs = map (clipAccounts (d-1)) $ asubs a
-- | Remove subaccounts below the specified depth, aggregating their balance at the depth limit
-- (accounts at the depth limit will have any sub-balances merged into their exclusive balance).
-- If the depth is Nothing, return the original accounts
clipAccountsAndAggregate :: Maybe Int -> [Account] -> [Account]
clipAccountsAndAggregate Nothing as = as
clipAccountsAndAggregate (Just d) as = combined
where
clipped = [a{aname=clipOrEllipsifyAccountName (Just d) $ aname a} | a <- as]
combined = [a{aebalance=maSum $ map aebalance same}
| same@(a:_) <- groupOn aname clipped]
test cases , assuming d=1 :
assets : cash 1 1
assets : checking 1 1
- >
as : [ assets : cash 1 1 , assets : checking 1 1 ]
clipped : [ assets 1 1 , assets 1 1 ]
combined : [ assets 2 2 ]
assets 0 2
assets : cash 1 1
assets : checking 1 1
- >
as : [ assets 0 2 , assets : cash 1 1 , assets : checking 1 1 ]
clipped : [ assets 0 2 , assets 1 1 , assets 1 1 ]
combined : [ assets 2 2 ]
assets 0 2
assets : bank 1 2
assets : bank : checking 1 1
- >
as : [ assets 0 2 , assets : bank 1 2 , assets : bank : checking 1 1 ]
clipped : [ assets 0 2 , assets 1 2 , assets 1 1 ]
combined : [ assets 2 2 ]
test cases, assuming d=1:
assets:cash 1 1
assets:checking 1 1
->
as: [assets:cash 1 1, assets:checking 1 1]
clipped: [assets 1 1, assets 1 1]
combined: [assets 2 2]
assets 0 2
assets:cash 1 1
assets:checking 1 1
->
as: [assets 0 2, assets:cash 1 1, assets:checking 1 1]
clipped: [assets 0 2, assets 1 1, assets 1 1]
combined: [assets 2 2]
assets 0 2
assets:bank 1 2
assets:bank:checking 1 1
->
as: [assets 0 2, assets:bank 1 2, assets:bank:checking 1 1]
clipped: [assets 0 2, assets 1 2, assets 1 1]
combined: [assets 2 2]
-}
-- | Remove all leaf accounts and subtrees matching a predicate.
pruneAccounts :: (Account -> Bool) -> Account -> Maybe Account
pruneAccounts p = headMay . prune
where
prune a
| null prunedsubs = if p a then [] else [a']
| otherwise = [a']
where
prunedsubs = concatMap prune $ asubs a
a' = a{asubs=prunedsubs}
-- | Flatten an account tree into a list, which is sometimes
-- convenient. Note since accounts link to their parents/subs, the
-- tree's structure remains intact and can still be used. It's a tree/list!
flattenAccounts :: Account -> [Account]
flattenAccounts a = squish a []
where squish a' as = a' : Prelude.foldr squish as (asubs a')
-- | Filter an account tree (to a list).
filterAccounts :: (Account -> Bool) -> Account -> [Account]
filterAccounts p a
| p a = a : concatMap (filterAccounts p) (asubs a)
| otherwise = concatMap (filterAccounts p) (asubs a)
-- | Sort each group of siblings in an account tree by inclusive amount,
so that the accounts with largest normal balances are listed first .
-- The provided normal balance sign determines whether normal balances
-- are negative or positive, affecting the sort order. Ie,
-- if balances are normally negative, then the most negative balances
sort first , and vice versa .
sortAccountTreeByAmount :: NormalSign -> Account -> Account
sortAccountTreeByAmount normalsign = mapAccounts $ \a -> a{asubs=sortSubs $ asubs a}
where
sortSubs = case normalsign of
NormallyPositive -> sortOn (\a -> (Down $ amt a, aname a))
NormallyNegative -> sortOn (\a -> (amt a, aname a))
amt = mixedAmountStripPrices . aibalance
| Add extra info for this account derived from the Journal 's
-- account directives, if any (comment, tags, declaration order..).
accountSetDeclarationInfo :: Journal -> Account -> Account
accountSetDeclarationInfo j a@Account{..} =
a{ adeclarationinfo=lookup aname $ jdeclaredaccounts j }
-- | Sort account names by the order in which they were declared in
-- the journal, at each level of the account tree (ie within each
-- group of siblings). Undeclared accounts are sorted last and
-- alphabetically.
This is hledger 's default sort for reports organised by account .
-- The account list is converted to a tree temporarily, adding any
-- missing parents; these can be kept (suitable for a tree-mode report)
-- or removed (suitable for a flat-mode report).
--
sortAccountNamesByDeclaration :: Journal -> Bool -> [AccountName] -> [AccountName]
sortAccountNamesByDeclaration j keepparents as =
(if keepparents then id else filter (`HS.member` HS.fromList as)) $ -- maybe discard missing parents that were added
map aname $ -- keep just the names
drop 1 $ -- drop the root node that was added
flattenAccounts $ -- convert to an account list
sortAccountTreeByDeclaration $ -- sort by declaration order (and name)
mapAccounts (accountSetDeclarationInfo j) $ -- add declaration order info
accountTree "root" -- convert to an account tree
as
-- | Sort each group of siblings in an account tree by declaration order, then account name.
So each group will contain first the declared accounts ,
-- in the same order as their account directives were parsed,
-- and then the undeclared accounts, sorted by account name.
sortAccountTreeByDeclaration :: Account -> Account
sortAccountTreeByDeclaration a
| null $ asubs a = a
| otherwise = a{asubs=
sortOn accountDeclarationOrderAndName $
map sortAccountTreeByDeclaration $ asubs a
}
accountDeclarationOrderAndName :: Account -> (Int, AccountName)
accountDeclarationOrderAndName a = (adeclarationorder', aname a)
where
adeclarationorder' = maybe maxBound adideclarationorder $ adeclarationinfo a
-- | Search an account list by name.
lookupAccount :: AccountName -> [Account] -> Maybe Account
lookupAccount a = find ((==a).aname)
-- debug helpers
printAccounts :: Account -> IO ()
printAccounts = putStrLn . showAccounts
showAccounts = unlines . map showAccountDebug . flattenAccounts
showAccountsBoringFlag = unlines . map (show . aboring) . flattenAccounts
showAccountDebug a = printf "%-25s %4s %4s %s"
(aname a)
(wbUnpack . showMixedAmountB noColour $ aebalance a)
(wbUnpack . showMixedAmountB noColour $ aibalance a)
(if aboring a then "b" else " " :: String)
| null | https://raw.githubusercontent.com/simonmichael/hledger/b46cb8a7f77df569373d0b2b12bba5e97eff76c7/hledger-lib/Hledger/Data/Account.hs | haskell | # LANGUAGE OverloadedStrings #
|
An 'Account' has a name, a list of subaccounts, an optional parent
account, and subaccounting-excluding and -including balances.
deriving instance Show Account
quick equality test for speed
and
[ aname a == aname b
, aparent a = = aparent b -- avoid infinite recursion
, asubs a == asubs b
, aebalance a == aebalance b
, aibalance a == aibalance b
]
and inclusive changes from a list of postings.
This is the core of the balance command (and of *ledger).
The accounts are returned as a list in flattened tree order,
and also reference each other as a tree.
| Convert a list of account names to a tree of Account objects,
with just the account names filled in.
A single root account with the given name is added.
better than accountNameTreeFrom.
| Tie the knot so all subaccounts' parents are set correctly.
| Get this account's parent accounts, from the nearest up to the root.
| List the accounts at each level of the account tree.
| Map a (non-tree-structure-modifying) function over this and sub accounts.
| Is the predicate true on any of this account or its subaccounts ?
| Add subaccount-inclusive balances to an account tree.
| Remove all subaccounts below a certain depth.
| Remove subaccounts below the specified depth, aggregating their balance at the depth limit
(accounts at the depth limit will have any sub-balances merged into their exclusive balance).
If the depth is Nothing, return the original accounts
| Remove all leaf accounts and subtrees matching a predicate.
| Flatten an account tree into a list, which is sometimes
convenient. Note since accounts link to their parents/subs, the
tree's structure remains intact and can still be used. It's a tree/list!
| Filter an account tree (to a list).
| Sort each group of siblings in an account tree by inclusive amount,
The provided normal balance sign determines whether normal balances
are negative or positive, affecting the sort order. Ie,
if balances are normally negative, then the most negative balances
account directives, if any (comment, tags, declaration order..).
| Sort account names by the order in which they were declared in
the journal, at each level of the account tree (ie within each
group of siblings). Undeclared accounts are sorted last and
alphabetically.
The account list is converted to a tree temporarily, adding any
missing parents; these can be kept (suitable for a tree-mode report)
or removed (suitable for a flat-mode report).
maybe discard missing parents that were added
keep just the names
drop the root node that was added
convert to an account list
sort by declaration order (and name)
add declaration order info
convert to an account tree
| Sort each group of siblings in an account tree by declaration order, then account name.
in the same order as their account directives were parsed,
and then the undeclared accounts, sorted by account name.
| Search an account list by name.
debug helpers | # LANGUAGE CPP #
# LANGUAGE RecordWildCards #
module Hledger.Data.Account
( nullacct
, accountsFromPostings
, accountTree
, showAccounts
, showAccountsBoringFlag
, printAccounts
, lookupAccount
, parentAccounts
, accountsLevels
, mapAccounts
, anyAccounts
, filterAccounts
, sumAccounts
, clipAccounts
, clipAccountsAndAggregate
, pruneAccounts
, flattenAccounts
, accountSetDeclarationInfo
, sortAccountNamesByDeclaration
, sortAccountTreeByAmount
) where
import qualified Data.HashSet as HS
import qualified Data.HashMap.Strict as HM
import Data.List (find, foldl', sortOn)
import Data.List.Extra (groupOn)
import qualified Data.Map as M
import Data.Ord (Down(..))
import Safe (headMay)
import Text.Printf (printf)
import Hledger.Data.AccountName (expandAccountName, clipOrEllipsifyAccountName)
import Hledger.Data.Amount
import Hledger.Data.Types
instance Show Account where
show Account{..} = printf "Account %s (boring:%s, postings:%d, ebalance:%s, ibalance:%s)"
aname
(if aboring then "y" else "n" :: String)
anumpostings
(wbUnpack $ showMixedAmountB noColour aebalance)
(wbUnpack $ showMixedAmountB noColour aibalance)
instance Eq Account where
nullacct = Account
{ aname = ""
, adeclarationinfo = Nothing
, asubs = []
, aparent = Nothing
, aboring = False
, anumpostings = 0
, aebalance = nullmixedamt
, aibalance = nullmixedamt
}
| Derive 1 . an account tree and 2 . each account 's total exclusive
( The first account is the root of the tree . )
accountsFromPostings :: [Posting] -> [Account]
accountsFromPostings ps =
let
summed = foldr (\p -> HM.insertWith addAndIncrement (paccount p) (1, pamount p)) mempty ps
where addAndIncrement (n, a) (m, b) = (n + m, a `maPlus` b)
acctstree = accountTree "root" $ HM.keys summed
acctswithebals = mapAccounts setnumpsebalance acctstree
where setnumpsebalance a = a{anumpostings=numps, aebalance=total}
where (numps, total) = HM.lookupDefault (0, nullmixedamt) (aname a) summed
acctswithibals = sumAccounts acctswithebals
acctswithparents = tieAccountParents acctswithibals
acctsflattened = flattenAccounts acctswithparents
in
acctsflattened
accountTree :: AccountName -> [AccountName] -> Account
accountTree rootname as = nullacct{aname=rootname, asubs=map (uncurry accountTree') $ M.assocs m }
where
T m = treeFromPaths $ map expandAccountName as :: FastTree AccountName
accountTree' a (T m') =
nullacct{
aname=a
,asubs=map (uncurry accountTree') $ M.assocs m'
}
| An efficient - to - build tree suggested by , probably
newtype FastTree a = T (M.Map a (FastTree a))
deriving (Show, Eq, Ord)
mergeTrees :: (Ord a) => FastTree a -> FastTree a -> FastTree a
mergeTrees (T m) (T m') = T (M.unionWith mergeTrees m m')
treeFromPath :: [a] -> FastTree a
treeFromPath [] = T M.empty
treeFromPath (x:xs) = T (M.singleton x (treeFromPath xs))
treeFromPaths :: (Ord a) => [[a]] -> FastTree a
treeFromPaths = foldl' mergeTrees (T M.empty) . map treeFromPath
tieAccountParents :: Account -> Account
tieAccountParents = tie Nothing
where
tie parent a@Account{..} = a'
where
a' = a{aparent=parent, asubs=map (tie (Just a')) asubs}
parentAccounts :: Account -> [Account]
parentAccounts Account{aparent=Nothing} = []
parentAccounts Account{aparent=Just a} = a:parentAccounts a
accountsLevels :: Account -> [[Account]]
accountsLevels = takeWhile (not . null) . iterate (concatMap asubs) . (:[])
mapAccounts :: (Account -> Account) -> Account -> Account
mapAccounts f a = f a{asubs = map (mapAccounts f) $ asubs a}
anyAccounts :: (Account -> Bool) -> Account -> Bool
anyAccounts p a
| p a = True
| otherwise = any (anyAccounts p) $ asubs a
sumAccounts :: Account -> Account
sumAccounts a
| null $ asubs a = a{aibalance=aebalance a}
| otherwise = a{aibalance=ibal, asubs=subs}
where
subs = map sumAccounts $ asubs a
ibal = maSum $ aebalance a : map aibalance subs
clipAccounts :: Int -> Account -> Account
clipAccounts 0 a = a{asubs=[]}
clipAccounts d a = a{asubs=subs}
where
subs = map (clipAccounts (d-1)) $ asubs a
clipAccountsAndAggregate :: Maybe Int -> [Account] -> [Account]
clipAccountsAndAggregate Nothing as = as
clipAccountsAndAggregate (Just d) as = combined
where
clipped = [a{aname=clipOrEllipsifyAccountName (Just d) $ aname a} | a <- as]
combined = [a{aebalance=maSum $ map aebalance same}
| same@(a:_) <- groupOn aname clipped]
test cases , assuming d=1 :
assets : cash 1 1
assets : checking 1 1
- >
as : [ assets : cash 1 1 , assets : checking 1 1 ]
clipped : [ assets 1 1 , assets 1 1 ]
combined : [ assets 2 2 ]
assets 0 2
assets : cash 1 1
assets : checking 1 1
- >
as : [ assets 0 2 , assets : cash 1 1 , assets : checking 1 1 ]
clipped : [ assets 0 2 , assets 1 1 , assets 1 1 ]
combined : [ assets 2 2 ]
assets 0 2
assets : bank 1 2
assets : bank : checking 1 1
- >
as : [ assets 0 2 , assets : bank 1 2 , assets : bank : checking 1 1 ]
clipped : [ assets 0 2 , assets 1 2 , assets 1 1 ]
combined : [ assets 2 2 ]
test cases, assuming d=1:
assets:cash 1 1
assets:checking 1 1
->
as: [assets:cash 1 1, assets:checking 1 1]
clipped: [assets 1 1, assets 1 1]
combined: [assets 2 2]
assets 0 2
assets:cash 1 1
assets:checking 1 1
->
as: [assets 0 2, assets:cash 1 1, assets:checking 1 1]
clipped: [assets 0 2, assets 1 1, assets 1 1]
combined: [assets 2 2]
assets 0 2
assets:bank 1 2
assets:bank:checking 1 1
->
as: [assets 0 2, assets:bank 1 2, assets:bank:checking 1 1]
clipped: [assets 0 2, assets 1 2, assets 1 1]
combined: [assets 2 2]
-}
pruneAccounts :: (Account -> Bool) -> Account -> Maybe Account
pruneAccounts p = headMay . prune
where
prune a
| null prunedsubs = if p a then [] else [a']
| otherwise = [a']
where
prunedsubs = concatMap prune $ asubs a
a' = a{asubs=prunedsubs}
flattenAccounts :: Account -> [Account]
flattenAccounts a = squish a []
where squish a' as = a' : Prelude.foldr squish as (asubs a')
filterAccounts :: (Account -> Bool) -> Account -> [Account]
filterAccounts p a
| p a = a : concatMap (filterAccounts p) (asubs a)
| otherwise = concatMap (filterAccounts p) (asubs a)
so that the accounts with largest normal balances are listed first .
sort first , and vice versa .
sortAccountTreeByAmount :: NormalSign -> Account -> Account
sortAccountTreeByAmount normalsign = mapAccounts $ \a -> a{asubs=sortSubs $ asubs a}
where
sortSubs = case normalsign of
NormallyPositive -> sortOn (\a -> (Down $ amt a, aname a))
NormallyNegative -> sortOn (\a -> (amt a, aname a))
amt = mixedAmountStripPrices . aibalance
| Add extra info for this account derived from the Journal 's
accountSetDeclarationInfo :: Journal -> Account -> Account
accountSetDeclarationInfo j a@Account{..} =
a{ adeclarationinfo=lookup aname $ jdeclaredaccounts j }
This is hledger 's default sort for reports organised by account .
sortAccountNamesByDeclaration :: Journal -> Bool -> [AccountName] -> [AccountName]
sortAccountNamesByDeclaration j keepparents as =
as
So each group will contain first the declared accounts ,
sortAccountTreeByDeclaration :: Account -> Account
sortAccountTreeByDeclaration a
| null $ asubs a = a
| otherwise = a{asubs=
sortOn accountDeclarationOrderAndName $
map sortAccountTreeByDeclaration $ asubs a
}
accountDeclarationOrderAndName :: Account -> (Int, AccountName)
accountDeclarationOrderAndName a = (adeclarationorder', aname a)
where
adeclarationorder' = maybe maxBound adideclarationorder $ adeclarationinfo a
lookupAccount :: AccountName -> [Account] -> Maybe Account
lookupAccount a = find ((==a).aname)
printAccounts :: Account -> IO ()
printAccounts = putStrLn . showAccounts
showAccounts = unlines . map showAccountDebug . flattenAccounts
showAccountsBoringFlag = unlines . map (show . aboring) . flattenAccounts
showAccountDebug a = printf "%-25s %4s %4s %s"
(aname a)
(wbUnpack . showMixedAmountB noColour $ aebalance a)
(wbUnpack . showMixedAmountB noColour $ aibalance a)
(if aboring a then "b" else " " :: String)
|
3b1fe1bd85ae994ed55bd5abd59016c77a9f35f9477a67993a61574f408e5db0 | MastodonC/kixi.hecuba | projects.clj | (ns kixi.hecuba.api.projects
(:require
[clojure.core.match :refer (match)]
[cheshire.core :as json]
[clojure.tools.logging :as log]
[kixi.hecuba.security :refer (has-admin? has-programme-manager? has-project-manager? has-user?) :as sec]
[kixi.hecuba.api :refer (decode-body authorized?) :as api]
[liberator.core :refer (defresource)]
[liberator.representation :refer (ring-response)]
[kixi.hecuba.storage.db :as db]
[kixi.hecuba.storage.uuid :refer (uuid-str)]
[kixi.hecuba.web-paths :as p]
[kixi.hecuba.data.users :as users]
[kixi.hecuba.data.projects :as projects]
[kixi.hecuba.data.programmes :as programmes]
[schema.core :as s]
[kixi.amon-schema :as schema]
[kixi.hecuba.data.entities.search :as search]))
(def ^:private programme-projects-index (p/index-path-string :programme-projects-index))
(def ^:private programme-projects-resource (p/resource-path-string :programme-projects-resource))
(def ^:private projects-index (p/index-path-string :projects-index))
(def ^:private project-resource (p/resource-path-string :project-resource))
(def ^:private project-properties-index (p/index-path-string :project-properties-index))
(defn- programme_id-from [ctx]
(get-in ctx [:request :route-params :programme_id]))
(defn- project_id-from [ctx]
(get-in ctx [:request :route-params :project_id]))
(defn filter-by-allowed-ids [allowed-programme-ids allowed-project-ids projects]
(filter #(or (some #{(:programme_id %)} allowed-programme-ids)
(some #{(:project_id %)} allowed-project-ids)) projects))
(defmulti enrich-by-role (fn [role resource] role))
(defmethod enrich-by-role :kixi.hecuba.security/user [_ resource]
(assoc resource :editable false))
(defmethod enrich-by-role :kixi.hecuba.security/programme-manager [_ resource]
(assoc resource :editable true))
(defmethod enrich-by-role :kixi.hecuba.security/project-manager [_ resource]
(assoc resource :editable true))
(defn filter-by-ids-and-roles
([allowed-programmes allowed-projects projects]
(let [allowed-programme-ids (into #{} (keys allowed-programmes))
allowed-project-ids (into #{} (keys allowed-projects))]
(->> projects
(map (fn [project]
(let [programme-id (:programme_id project)
project-id (:project_id project)]
(cond
(some #{programme-id} allowed-programme-ids) (enrich-by-role (get allowed-programmes programme-id) project)
(some #{project-id} allowed-project-ids) (enrich-by-role (get allowed-projects project-id) project)))))
(remove nil?))))
([allowed-programmes allowed-projects projects store]
(db/with-session [session (:hecuba-session store)]
(let [allowed-programme-ids (into #{} (keys allowed-programmes))
allowed-project-ids (into #{} (keys allowed-projects))
all-programmes (programmes/get-all session)]
(->> projects
(map (fn [project]
(let [programme-id (:programme_id project)
project-id (:project_id project)]
(cond
(some #{programme-id} allowed-programme-ids) (enrich-by-role (get allowed-programmes programme-id) project)
(some #{project-id} allowed-project-ids) (enrich-by-role (get allowed-projects project-id) project)
(-> (filter #(= programme-id (:programme_id %)) all-programmes) first :public_access) (assoc project :editable false)))))
(remove nil?))))))
(defn allowed?*
([programme-id project-id allowed-programmes allowed-projects role request-method store]
;; Specific project
(log/infof "allowed?* programme-id: %s project-id: %s allowed-programmes: %s allowed-projects: %s role: %s request-method: %s"
programme-id project-id allowed-programmes allowed-projects role request-method)
(db/with-session [session (:hecuba-session store)]
(let [programme (programmes/get-by-id session programme-id)
project (projects/get-by-id session project-id)]
(match [(has-admin? role)
(has-programme-manager? programme-id allowed-programmes)
(has-project-manager? project-id allowed-projects)
(has-user? programme-id allowed-programmes project-id allowed-projects)
request-method]
[true _ _ _ _] [true {::item (assoc project :editable true)}]
[_ true _ _ _] [true {::item (assoc project :editable true)}]
[_ _ true _ _] [true {::item (assoc project :editable true)}]
[_ _ _ true :get][true {::item (if (:public_access programme)
project
(filter-by-ids-and-roles allowed-programmes allowed-projects [project] store))}]
:else false))))
([programme-id allowed-programmes allowed-projects role request-method store]
;; All projects for a programme-id
(log/infof "allowed?* programme-id: %s allowed-programmes: %s allowed-projects: %s role: %s request-method: %s"
programme-id allowed-programmes allowed-projects role request-method)
(db/with-session [session (:hecuba-session store)]
(let [programme (programmes/get-by-id session programme-id)
all-projects (projects/get-all session programme-id)]
(match [(has-admin? role)
(has-programme-manager? programme-id allowed-programmes)
request-method]
[true _ _] [true {::items (mapv #(assoc % :editable true) all-projects)}]
[_ true _] [true {::items (mapv #(assoc % :editable true) all-projects)}]
[_ _ :get] [true {::items (if (:public_access programme)
all-projects
(filter-by-ids-and-roles allowed-programmes allowed-projects all-projects store))}]
:else false))))
([allowed-programmes allowed-projects role request-method store]
;; All projects
(log/infof "allowed?* allowed-programmes: %s allowed-projects: %s role: %s request-method: %s"
allowed-programmes allowed-projects role request-method)
(db/with-session [session (:hecuba-session store)]
(let [all-projects (projects/get-all session)]
(match [(has-admin? role)
request-method]
[true _] [true {::items (mapv #(assoc % :editable true) all-projects)}]
[_ :get] [true {::items (filter-by-ids-and-roles allowed-programmes allowed-projects all-projects store)}]
:else false)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Index
(defn index-allowed? [store]
(fn [ctx]
(let [{:keys [request-method session params]} (:request ctx)
{:keys [programmes projects role]} (sec/current-authentication session)
programme_id (programme_id-from ctx)]
(if programme_id
(allowed?* programme_id programmes projects role request-method store)
(allowed?* programmes projects role request-method store)))))
(defn index-malformed? [ctx]
(let [request (:request ctx)
{:keys [route-params request-method]} request]
(case request-method
:post (let [project (decode-body request)]
(if (s/check schema/BaseProject project)
true
[false {:project project}]))
false)))
(defn index-handle-ok [store ctx]
(db/with-session [session (:hecuba-session store)]
(let [request (:request ctx)
web-session (-> ctx :request :session)
programme_id (-> (:route-params request) :programme_id)
items (::items ctx)]
(->> items
(map #(-> %
(assoc :href (format programme-projects-resource (:programme_id %) (:project_id %))
:properties (format project-properties-index (:project_id %)))))
(api/render-items request)))))
(defn index-post! [store ctx]
(db/with-session [session (:hecuba-session store)]
(let [username (sec/session-username (-> ctx :request :session))
user_id (:id (users/get-by-username session username))
project (:project ctx)
project_id (uuid-str)]
(projects/insert session (assoc project :user_id user_id :project_id project_id))
{::project_id project_id})))
;; FIXME: Should return programmes/%s/projects/%s
(defn index-handle-created [ctx]
(let [request (:request ctx)
location (format project-resource (::project_id ctx))]
(ring-response {:headers {"Location" location}
:body (json/encode {:location location
:status "OK"
:version "4"})})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Resource
(defn resource-allowed? [store ctx]
(let [{:keys [body request-method session params]} (:request ctx)
{:keys [programmes projects role]} (sec/current-authentication session)
programme_id (programme_id-from ctx)
project_id (project_id-from ctx)]
(if (and programme_id project_id)
(allowed?* programme_id project_id programmes projects role request-method store)
true)))
(defn resource-malformed? [ctx]
(let [request (:request ctx)
{:keys [route-params request-method]} request]
(case request-method
:post (let [project (decode-body request)
{:keys [name]} project]
(if name
[false {:project project}]
true))
:put (let [project (decode-body request)
{:keys [programme_id project_id]} route-params]
(if (and programme_id project_id)
[false {:project (assoc project :id project_id)}]
true))
false)))
(defn resource-put! [store ctx]
(db/with-session [session (:hecuba-session store)]
(let [{:keys [request project]} ctx
username (sec/session-username (-> ctx :request :session))]
(when project
(projects/update session (:id project) (assoc project :user_id username))))))
(defn resource-exists? [store ctx]
(db/with-session [session (:hecuba-session store)]
(when-let [item (projects/get-by-id session (project_id-from ctx))]
{::item item})))
(defn resource-handle-ok [ctx]
(api/render-item ctx
(as-> (::item ctx) item
(assoc item
:properties (format project-properties-index (:project_id item)))
(dissoc item :user_id))))
(defn resource-delete! [store ctx]
(db/with-session [session (:hecuba-session store)]
(let [existing-project (::item ctx)
{:keys [project_id]} existing-project
response (projects/delete project_id session)
{:keys [entities]} response]
(doseq [entity_id entities]
(search/delete-by-id entity_id (:search-session store)))
"Delete Accepted")))
(defresource index [store]
:allowed-methods #{:get :post}
:available-media-types ["application/json" "application/edn"]
:known-content-type? #{"application/edn" "application/json"}
:authorized? (authorized? store)
:allowed? (index-allowed? store)
:malformed? index-malformed?
:handle-ok (partial index-handle-ok store)
:post! (partial index-post! store)
:handle-created (partial index-handle-created))
(defresource resource [store]
:allowed-methods #{:get :put :delete}
:available-media-types ["application/json" "application/edn"]
:known-content-type? #{"application/edn" "application/json"}
:authorized? (authorized? store)
:allowed? (partial resource-allowed? store)
:exists? (partial resource-exists? store)
:malformed? resource-malformed?
:put! (partial resource-put! store)
:delete! (partial resource-delete! store)
:handle-ok (partial resource-handle-ok))
| null | https://raw.githubusercontent.com/MastodonC/kixi.hecuba/467400bbe670e74420a2711f7d49e869ab2b3e21/src/clj/kixi/hecuba/api/projects.clj | clojure | Specific project
All projects for a programme-id
All projects
Index
FIXME: Should return programmes/%s/projects/%s
| (ns kixi.hecuba.api.projects
(:require
[clojure.core.match :refer (match)]
[cheshire.core :as json]
[clojure.tools.logging :as log]
[kixi.hecuba.security :refer (has-admin? has-programme-manager? has-project-manager? has-user?) :as sec]
[kixi.hecuba.api :refer (decode-body authorized?) :as api]
[liberator.core :refer (defresource)]
[liberator.representation :refer (ring-response)]
[kixi.hecuba.storage.db :as db]
[kixi.hecuba.storage.uuid :refer (uuid-str)]
[kixi.hecuba.web-paths :as p]
[kixi.hecuba.data.users :as users]
[kixi.hecuba.data.projects :as projects]
[kixi.hecuba.data.programmes :as programmes]
[schema.core :as s]
[kixi.amon-schema :as schema]
[kixi.hecuba.data.entities.search :as search]))
(def ^:private programme-projects-index (p/index-path-string :programme-projects-index))
(def ^:private programme-projects-resource (p/resource-path-string :programme-projects-resource))
(def ^:private projects-index (p/index-path-string :projects-index))
(def ^:private project-resource (p/resource-path-string :project-resource))
(def ^:private project-properties-index (p/index-path-string :project-properties-index))
(defn- programme_id-from [ctx]
(get-in ctx [:request :route-params :programme_id]))
(defn- project_id-from [ctx]
(get-in ctx [:request :route-params :project_id]))
(defn filter-by-allowed-ids [allowed-programme-ids allowed-project-ids projects]
(filter #(or (some #{(:programme_id %)} allowed-programme-ids)
(some #{(:project_id %)} allowed-project-ids)) projects))
(defmulti enrich-by-role (fn [role resource] role))
(defmethod enrich-by-role :kixi.hecuba.security/user [_ resource]
(assoc resource :editable false))
(defmethod enrich-by-role :kixi.hecuba.security/programme-manager [_ resource]
(assoc resource :editable true))
(defmethod enrich-by-role :kixi.hecuba.security/project-manager [_ resource]
(assoc resource :editable true))
(defn filter-by-ids-and-roles
([allowed-programmes allowed-projects projects]
(let [allowed-programme-ids (into #{} (keys allowed-programmes))
allowed-project-ids (into #{} (keys allowed-projects))]
(->> projects
(map (fn [project]
(let [programme-id (:programme_id project)
project-id (:project_id project)]
(cond
(some #{programme-id} allowed-programme-ids) (enrich-by-role (get allowed-programmes programme-id) project)
(some #{project-id} allowed-project-ids) (enrich-by-role (get allowed-projects project-id) project)))))
(remove nil?))))
([allowed-programmes allowed-projects projects store]
(db/with-session [session (:hecuba-session store)]
(let [allowed-programme-ids (into #{} (keys allowed-programmes))
allowed-project-ids (into #{} (keys allowed-projects))
all-programmes (programmes/get-all session)]
(->> projects
(map (fn [project]
(let [programme-id (:programme_id project)
project-id (:project_id project)]
(cond
(some #{programme-id} allowed-programme-ids) (enrich-by-role (get allowed-programmes programme-id) project)
(some #{project-id} allowed-project-ids) (enrich-by-role (get allowed-projects project-id) project)
(-> (filter #(= programme-id (:programme_id %)) all-programmes) first :public_access) (assoc project :editable false)))))
(remove nil?))))))
(defn allowed?*
([programme-id project-id allowed-programmes allowed-projects role request-method store]
(log/infof "allowed?* programme-id: %s project-id: %s allowed-programmes: %s allowed-projects: %s role: %s request-method: %s"
programme-id project-id allowed-programmes allowed-projects role request-method)
(db/with-session [session (:hecuba-session store)]
(let [programme (programmes/get-by-id session programme-id)
project (projects/get-by-id session project-id)]
(match [(has-admin? role)
(has-programme-manager? programme-id allowed-programmes)
(has-project-manager? project-id allowed-projects)
(has-user? programme-id allowed-programmes project-id allowed-projects)
request-method]
[true _ _ _ _] [true {::item (assoc project :editable true)}]
[_ true _ _ _] [true {::item (assoc project :editable true)}]
[_ _ true _ _] [true {::item (assoc project :editable true)}]
[_ _ _ true :get][true {::item (if (:public_access programme)
project
(filter-by-ids-and-roles allowed-programmes allowed-projects [project] store))}]
:else false))))
([programme-id allowed-programmes allowed-projects role request-method store]
(log/infof "allowed?* programme-id: %s allowed-programmes: %s allowed-projects: %s role: %s request-method: %s"
programme-id allowed-programmes allowed-projects role request-method)
(db/with-session [session (:hecuba-session store)]
(let [programme (programmes/get-by-id session programme-id)
all-projects (projects/get-all session programme-id)]
(match [(has-admin? role)
(has-programme-manager? programme-id allowed-programmes)
request-method]
[true _ _] [true {::items (mapv #(assoc % :editable true) all-projects)}]
[_ true _] [true {::items (mapv #(assoc % :editable true) all-projects)}]
[_ _ :get] [true {::items (if (:public_access programme)
all-projects
(filter-by-ids-and-roles allowed-programmes allowed-projects all-projects store))}]
:else false))))
([allowed-programmes allowed-projects role request-method store]
(log/infof "allowed?* allowed-programmes: %s allowed-projects: %s role: %s request-method: %s"
allowed-programmes allowed-projects role request-method)
(db/with-session [session (:hecuba-session store)]
(let [all-projects (projects/get-all session)]
(match [(has-admin? role)
request-method]
[true _] [true {::items (mapv #(assoc % :editable true) all-projects)}]
[_ :get] [true {::items (filter-by-ids-and-roles allowed-programmes allowed-projects all-projects store)}]
:else false)))))
(defn index-allowed? [store]
(fn [ctx]
(let [{:keys [request-method session params]} (:request ctx)
{:keys [programmes projects role]} (sec/current-authentication session)
programme_id (programme_id-from ctx)]
(if programme_id
(allowed?* programme_id programmes projects role request-method store)
(allowed?* programmes projects role request-method store)))))
(defn index-malformed? [ctx]
(let [request (:request ctx)
{:keys [route-params request-method]} request]
(case request-method
:post (let [project (decode-body request)]
(if (s/check schema/BaseProject project)
true
[false {:project project}]))
false)))
(defn index-handle-ok [store ctx]
(db/with-session [session (:hecuba-session store)]
(let [request (:request ctx)
web-session (-> ctx :request :session)
programme_id (-> (:route-params request) :programme_id)
items (::items ctx)]
(->> items
(map #(-> %
(assoc :href (format programme-projects-resource (:programme_id %) (:project_id %))
:properties (format project-properties-index (:project_id %)))))
(api/render-items request)))))
(defn index-post! [store ctx]
(db/with-session [session (:hecuba-session store)]
(let [username (sec/session-username (-> ctx :request :session))
user_id (:id (users/get-by-username session username))
project (:project ctx)
project_id (uuid-str)]
(projects/insert session (assoc project :user_id user_id :project_id project_id))
{::project_id project_id})))
(defn index-handle-created [ctx]
(let [request (:request ctx)
location (format project-resource (::project_id ctx))]
(ring-response {:headers {"Location" location}
:body (json/encode {:location location
:status "OK"
:version "4"})})))
Resource
(defn resource-allowed? [store ctx]
(let [{:keys [body request-method session params]} (:request ctx)
{:keys [programmes projects role]} (sec/current-authentication session)
programme_id (programme_id-from ctx)
project_id (project_id-from ctx)]
(if (and programme_id project_id)
(allowed?* programme_id project_id programmes projects role request-method store)
true)))
(defn resource-malformed? [ctx]
(let [request (:request ctx)
{:keys [route-params request-method]} request]
(case request-method
:post (let [project (decode-body request)
{:keys [name]} project]
(if name
[false {:project project}]
true))
:put (let [project (decode-body request)
{:keys [programme_id project_id]} route-params]
(if (and programme_id project_id)
[false {:project (assoc project :id project_id)}]
true))
false)))
(defn resource-put! [store ctx]
(db/with-session [session (:hecuba-session store)]
(let [{:keys [request project]} ctx
username (sec/session-username (-> ctx :request :session))]
(when project
(projects/update session (:id project) (assoc project :user_id username))))))
(defn resource-exists? [store ctx]
(db/with-session [session (:hecuba-session store)]
(when-let [item (projects/get-by-id session (project_id-from ctx))]
{::item item})))
(defn resource-handle-ok [ctx]
(api/render-item ctx
(as-> (::item ctx) item
(assoc item
:properties (format project-properties-index (:project_id item)))
(dissoc item :user_id))))
(defn resource-delete! [store ctx]
(db/with-session [session (:hecuba-session store)]
(let [existing-project (::item ctx)
{:keys [project_id]} existing-project
response (projects/delete project_id session)
{:keys [entities]} response]
(doseq [entity_id entities]
(search/delete-by-id entity_id (:search-session store)))
"Delete Accepted")))
(defresource index [store]
:allowed-methods #{:get :post}
:available-media-types ["application/json" "application/edn"]
:known-content-type? #{"application/edn" "application/json"}
:authorized? (authorized? store)
:allowed? (index-allowed? store)
:malformed? index-malformed?
:handle-ok (partial index-handle-ok store)
:post! (partial index-post! store)
:handle-created (partial index-handle-created))
(defresource resource [store]
:allowed-methods #{:get :put :delete}
:available-media-types ["application/json" "application/edn"]
:known-content-type? #{"application/edn" "application/json"}
:authorized? (authorized? store)
:allowed? (partial resource-allowed? store)
:exists? (partial resource-exists? store)
:malformed? resource-malformed?
:put! (partial resource-put! store)
:delete! (partial resource-delete! store)
:handle-ok (partial resource-handle-ok))
|
13401a3c2fc3af786d0fcca227f7ce89d8c9340f78e4ad776842133f43275190 | ztmr/egtm | egtm_util.erl | %%
%% $Id: $
%%
%% Module: egtm_util -- description
Created : 07 - APR-2012 15:31
%% Author: tmr
%%
Copyright 2012 , IDEA Systems .
%%
%% This program is free software: you can redistribute
%% it and/or modify it under the terms of the GNU Affero
General Public License as published by the Free Software
Foundation , either version 3 of the License ,
%% or (at your option) any later version.
%%
%% This program is distributed in the hope that it will
%% be useful, but WITHOUT ANY WARRANTY; without even
%% the implied warranty of MERCHANTABILITY or FITNESS
%% FOR A PARTICULAR PURPOSE. See the GNU Affero General
%% Public License for more details.
%%
%% You should have received a copy of the GNU Affero
%% General Public License along with this program.
%% If not, see </>.
@doc EGTM Utilities .
-module (egtm_util).
-export ([
stringify/1,
gforeach/0, gforeach/1,
foreach/3, foreach/2,
set_term/1, transaction/1, lock/2, lock/3,
longstring_set/4, longstring_set/3,
longstring_get/2, longstring_kill/2
]).
-include_lib ("egtm.hrl").
%% @doc Convert any datatype to string.
-spec stringify (Data::any ()) -> Result::string ().
stringify (X) when is_list (X) -> X;
stringify (X) -> io_lib:format ("~p", [X]).
%% @doc Sets terminal characteristics. At the moment,
%% only `NOCENABLE' is currently supported. (= `U $P:NOCENABLE').
-spec set_term (Flag::nocenable) -> ok.
set_term ('nocenable') -> egtm:xecute ("u $p:nocenable").
@doc implementation based on ` egtm : order ( ) '
%% and `egtm:get()'.
%% `Gvn' is a name of MUMPS global array.
%% `Subs' is a list of keys (subscripts) `[S1,...,SN]
so that S(N+1 ) will be used to ` egtm : order ( ) ' over .
` Fun ' argument is function of arity 2 ( Gvn , Subs )
or arity 3 ( Gvn , Subs , ResultAccumulator ) .
%% If the `Fun' argument is not specified, all the
%% records will be written on standard output via
%% `io:format()'.
%%
%% Example:
%% ```
%% egtm_util:foreach ("^Foo", [1,2,3], fun (G, S) ->
%% io:format ("~p ~p -> ~p", [G, S, egtm:get (G, S)]) end).
%% '''
%% ...is equivalent of MUMPS code similar to this:
%% ```
N X S X= " " F S X=$O(^Foo(1,2,3,X ) ) Q : X= " " D
%% . W $NA(^(X)),"=",$G(^(X)),!
%% '''
-spec foreach (Gvn::global_name (),
Subs::subscripts ()) ->
nomatch | {ok, AccumulatedData::list ()}.
foreach (Gvn, Subs) ->
foreach (Gvn, Subs, fun (G,S) ->
io:format ("~s~p=~p~n",
[G, S, egtm:get (G, S)]), lists:last (S) end).
%% @equiv foreach (Gvn, Subs)
-spec foreach (Gvn::global_name (),
Subs::subscripts (),
Fun::function ()) ->
nomatch | {ok, AccumulatedData::list ()}.
foreach (Gvn, Subs, Fun) when is_function (Fun, 2) ->
case foreach (Gvn, Subs, fun (G, S, R) -> [Fun (G, S)|R] end) of
{ok, Res} -> {ok, lists:reverse (Res)};
Whatever -> Whatever
end;
foreach (Gvn, Subs, Fun) when is_function (Fun, 3) ->
case egtm:data (Gvn, Subs) > 0 of
true -> foreach_internal (Gvn, Subs, length (Subs), [], Fun, []);
false -> nomatch
end.
foreach_internal (Gvn, SubH, SubHLen, SubT, Fun, Res) ->
NewSubs = egtm:order (Gvn, SubH++[SubT]),
NewSubT = lists:nth (SubHLen+1, NewSubs),
case NewSubT of
[] ->
{ok, Res};
_ ->
foreach_internal (Gvn, SubH, SubHLen, NewSubT,
Fun, Fun (Gvn, NewSubs, Res))
end.
%% @equiv gforeach (PrintAllGvnsFunction)
gforeach () ->
gforeach (fun (G) ->
io:format ("~s=~p~n", [G, egtm:get (G)]), G end).
@doc Global Variable Name .
%% Uses special case of MUMPS `$Order'
%% (GT.M-specific trick!) to iterate over
%% all global variables available.
%%
` Fun ' is function of arity one or two .
First argument passed to ` Fun ' is the
name of global variable and the second
%% (optional) is result accumulator variable.
%%
When used with the ` Fun ' of arity of one ,
%% the accumulator is automatically collected
%% as the list of all the results of each
%% `Fun' call.
%%
%% Examples:
%% ```
%% erl> egtm_util:gforeach ().
%% ^%EUnit=[]
%% ^ZFOO="1"
^ZTMR="1 "
%% {ok,["^%EUnit","^ZFOO","^ZTMR"]}
%%
%% erl> egtm_util:gforeach (fun (G) -> G end).
%% {ok,["^%EUnit","^ZFOO","^ZTMR"]}
%%
%% erl> egtm_util:gforeach (fun (G) -> egtm:data (G) end).
%% {ok,[10,1,11]}
%%
erl > egtm_util : gforeach ( fun ( G , Res ) - > { ok , R } = egtm_util : foreach ( G , [ ] ) , Res end ) .
%% ^%EUnit["perf"]=[]
%% ^ZTMR["1"]="2"
%% ^ZTMR["2"]="3"
%% ^ZTMR["3"]="4"
%% {ok,[]}
%%
%% erl> egtm_util:gforeach (fun (G, Res) ->
%% {ok, R} = egtm_util:foreach (G, [],
%% fun (G, S, A) -> [{S,egtm:get (G, S)}|A] end),
[ { G , R}|Res ] end ) .
%% {ok,[{"^ZTMR",[{["3"],"4"},{["2"],"3"},{["1"],"2"}]},
%% {"^ZFOO",[]},
%% {"^%EUnit",[{["perf"],[]}]}]}
%% '''
gforeach (Fun) when is_function (Fun, 1) ->
case gforeach (fun (G, R) -> [Fun (G)|R] end) of
{ok, Res} -> {ok, lists:reverse (Res)};
Whatever -> Whatever
end;
gforeach (Fun) when is_function (Fun, 2) ->
gforeach_internal (egtm:gorder (), Fun, []).
gforeach_internal ([], _Fun, Res) -> {ok, Res};
gforeach_internal (Gvn, Fun, Res) ->
gforeach_internal (egtm:order (Gvn), Fun, Fun (Gvn, Res)).
tp_finish (ok) -> tp_finish (true);
tp_finish (true) -> tp_finish ({ok, done});
tp_finish (false) -> tp_finish ({error, unknown});
tp_finish ({ok, Status}) ->
egtm:tcommit (), {ok, commit, Status};
tp_finish ({'EXIT', Error}) ->
tp_finish ({error, {exception, Error}});
tp_finish ({error, Status}) ->
egtm:trollback (), {ok, rollback, Status};
tp_finish (Status) ->
egtm:trollback (), {ok, rollback, Status}.
%% @doc Transaction processing (TP) support.
%% The only parameter of `Fun' is function
%% that is to be run within a transaction
%% block.
%%
%% Example:
%% ```
%% case transaction (fun () ->
%% egtm:lock (Gvn, Subs),
egtm : kill ( , Subs ) ,
%% egtm:set (Gvn, Subs, Value),
%% egtm:unlock (Gvn, Subs)
%% end) of
%%
{ ok , commit , { ok , Res } ;
%% Whatever -> {error, Whatever}
%% end
%% '''
... is a Erlang equivalent of MUMPS code similar to
( if ` Gvn="^Foo " ' , ` Subs=[1,2,3 ] ' and ` Value="abc " ' ):
%% ```TS L +^Foo(1,2,3) K ^Foo(1,2,3) S ^Foo(1,2,3)="abc" L -^Foo(1,2,3) TC'''
-spec transaction (Fun::function ()) ->
{ok, commit | rollback, Status::any ()}.
transaction (Fun) when is_function (Fun, 0) ->
egtm:tstart (), tp_finish (catch (Fun ())).
%% @doc Run a function `Fun' within a lock-block
%% on `Gvn' global with `Subs' subscripts.
-spec lock (Gvn::global_name (), Subs::subscripts (),
Fun::function ()) -> Result::any ().
lock (Gvn, Subs, Fun) when is_function (Fun) ->
egtm:lock (Gvn, Subs),
Res = Fun (),
egtm:unlock (Gvn, Subs),
Res.
%% @equiv lock (Gvn, Subs, Fun)
-spec lock (Gvn::global_name (), Fun::function ()) -> Result::any ().
lock (Gvn, Fun) when is_function (Fun) ->
lock (Gvn, [], Fun).
@doc Longstring get / set support .
%% `longstring_set' and `longstring_get' usually operates
%% on strings with sizes larger than maximal GT.M string
%% (1MB) limit.
%% The mechanism is based on cutting strings into chunks
of ( optionally ) specified length ` BlockSize ' and putting
%% them into subindex of `Subs' subscripts in `Gvn' global.
%%
` BlockSize ' can not be less than 1 and if specified
%% incorrectly or completely missing, it is expected
to be 4000 by default .
%%
%% Example (`BlockSize=5'):
%% ```
erl > egtm_util : longstring_set ( " ^Foo " , [ " a","b " ] , " hello world from erlang and gt.m ! " , 5 ) .
%% {ok,done}
%%
%% erl> egtm_util:longstring_get ("^Foo", ["a","b"]).
" hello world from erlang and gt.m ! "
%%
%% erl> egtm_util:foreach ("^Foo", ["a","b"]).
%% ^Foo["a","b","1"]="hello"
%% ^Foo["a","b","2"]=" worl"
%% ^Foo["a","b","3"]="d fro"
%% ^Foo["a","b","4"]="m erl"
%% ^Foo["a","b","5"]="ang a"
%% ^Foo["a","b","6"]="nd gt"
%% ^Foo["a","b","7"]=".m!"
%% '''
-spec longstring_set (Gvn::global_name (),
Subs::subscripts (),
Text::string (),
BlockSize::integer ()) ->
{ok, Result::any ()} | {error, Reason::any ()}.
longstring_set (Gvn, Subs, Text, BlockSize) when is_integer (BlockSize) ->
BS = (fun (N) when N < 1 -> ?EGTM_LONGSTRING_BLOCKSIZE;
(N) -> N end)(BlockSize),
case transaction (fun () ->
egtm:lock (Gvn, Subs),
Orig = egtm:get (Gvn, Subs),
egtm:kill (Gvn, Subs),
Data = case Text of
undefined -> []; null -> [];
Val when is_atom (Val) -> atom_to_list (Val);
Val when is_binary (Val) -> binary_to_list (Val);
Val when is_list (Val) -> Val;
Val -> lists:flatten (io_lib:format ("~p", [Val]))
end,
longstring_set_internal (Gvn, Subs, Text,
length (Data), BS, 1),
case Orig of
[] -> ok;
_ -> egtm:set (Gvn, Subs, Orig)
end,
egtm:unlock (Gvn, Subs)
end) of
{ok, commit, Res} -> {ok, Res};
Whatever -> {error, Whatever}
end.
@equiv longstring_set ( Gvn , Subs , Text , 4000 )
-spec longstring_set (Gvn::global_name (),
Subs::subscripts (),
Text::string ()) ->
{ok, Result::any ()} | {error, Reason::any ()}.
longstring_set (Gvn, Subs, Text) ->
longstring_set (Gvn, Subs, Text, ?EGTM_LONGSTRING_BLOCKSIZE).
longstring_set_internal (_, _, [], _, _, _) -> ok;
longstring_set_internal (Gvn, Subs, Text, TextLen, BS, N) ->
{H, T} = case TextLen < BS of
true -> {Text, []};
false -> lists:split (BS, Text)
end,
egtm:set (Gvn, Subs++[N], H),
longstring_set_internal (Gvn, Subs, T, TextLen-BS, BS, N+1).
%% @equiv longstring_set (Gvn, Subs, Text)
-spec longstring_get (Gvn::global_name (),
Subs::subscripts ()) ->
Result::string ().
longstring_get (Gvn, Subs) ->
case egtm_util:foreach (Gvn, Subs,
fun (G,S) -> egtm:get (G,S)
end) of
{ok, Res} -> lists:flatten (Res);
nomatch -> []
end.
%% @equiv longstring_set (Gvn, Subs, [])
-spec longstring_kill (Gvn::global_name (),
Subs::subscripts ()) ->
{ok, Result::any ()} | {error, Reason::any ()}.
longstring_kill (Gvn, Subs) ->
longstring_set (Gvn, Subs, []).
EUnit Tests
-ifdef (TEST).
-include_lib ("eunit/include/eunit.hrl").
longstring_test () ->
egtm:start (),
RndStr = egtm_util_eunit:rnd_str_fun (),
{Gvn, Subs} = {"^EUnit", [RndStr (8), "longstring"]},
TextLong = RndStr (10240), TextShort = RndStr (1024),
longstring_set (Gvn, Subs, TextLong),
?assertEqual (TextLong, longstring_get (Gvn, Subs)),
longstring_set (Gvn, Subs, TextShort),
?assertEqual (TextShort, longstring_get (Gvn, Subs)),
longstring_set (Gvn, Subs, ""),
?assertEqual ("", longstring_get (Gvn, Subs)),
egtm:set (Gvn, Subs, TextShort),
longstring_set (Gvn, Subs, TextLong),
?assertEqual (TextLong, longstring_get (Gvn, Subs)),
?assertEqual (TextShort, egtm:get (Gvn, Subs)),
longstring_kill (Gvn, Subs),
?assertEqual ("", longstring_get (Gvn, Subs)),
?assertEqual (TextShort, egtm:get (Gvn, Subs)),
egtm:stop (),
ok.
foreach_test () ->
ok.
-endif.
vim : fdm = syntax : fdn=3 : tw=74 : ts=2 : syn = erlang
| null | https://raw.githubusercontent.com/ztmr/egtm/06f4be66cef2ff702c579d9d9ad30cf23e95076d/src/egtm_util.erl | erlang |
$Id: $
Module: egtm_util -- description
Author: tmr
This program is free software: you can redistribute
it and/or modify it under the terms of the GNU Affero
or (at your option) any later version.
This program is distributed in the hope that it will
be useful, but WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General
Public License for more details.
You should have received a copy of the GNU Affero
General Public License along with this program.
If not, see </>.
@doc Convert any datatype to string.
@doc Sets terminal characteristics. At the moment,
only `NOCENABLE' is currently supported. (= `U $P:NOCENABLE').
and `egtm:get()'.
`Gvn' is a name of MUMPS global array.
`Subs' is a list of keys (subscripts) `[S1,...,SN]
If the `Fun' argument is not specified, all the
records will be written on standard output via
`io:format()'.
Example:
```
egtm_util:foreach ("^Foo", [1,2,3], fun (G, S) ->
io:format ("~p ~p -> ~p", [G, S, egtm:get (G, S)]) end).
'''
...is equivalent of MUMPS code similar to this:
```
. W $NA(^(X)),"=",$G(^(X)),!
'''
@equiv foreach (Gvn, Subs)
@equiv gforeach (PrintAllGvnsFunction)
Uses special case of MUMPS `$Order'
(GT.M-specific trick!) to iterate over
all global variables available.
(optional) is result accumulator variable.
the accumulator is automatically collected
as the list of all the results of each
`Fun' call.
Examples:
```
erl> egtm_util:gforeach ().
^%EUnit=[]
^ZFOO="1"
{ok,["^%EUnit","^ZFOO","^ZTMR"]}
erl> egtm_util:gforeach (fun (G) -> G end).
{ok,["^%EUnit","^ZFOO","^ZTMR"]}
erl> egtm_util:gforeach (fun (G) -> egtm:data (G) end).
{ok,[10,1,11]}
^%EUnit["perf"]=[]
^ZTMR["1"]="2"
^ZTMR["2"]="3"
^ZTMR["3"]="4"
{ok,[]}
erl> egtm_util:gforeach (fun (G, Res) ->
{ok, R} = egtm_util:foreach (G, [],
fun (G, S, A) -> [{S,egtm:get (G, S)}|A] end),
{ok,[{"^ZTMR",[{["3"],"4"},{["2"],"3"},{["1"],"2"}]},
{"^ZFOO",[]},
{"^%EUnit",[{["perf"],[]}]}]}
'''
@doc Transaction processing (TP) support.
The only parameter of `Fun' is function
that is to be run within a transaction
block.
Example:
```
case transaction (fun () ->
egtm:lock (Gvn, Subs),
egtm:set (Gvn, Subs, Value),
egtm:unlock (Gvn, Subs)
end) of
Whatever -> {error, Whatever}
end
'''
```TS L +^Foo(1,2,3) K ^Foo(1,2,3) S ^Foo(1,2,3)="abc" L -^Foo(1,2,3) TC'''
@doc Run a function `Fun' within a lock-block
on `Gvn' global with `Subs' subscripts.
@equiv lock (Gvn, Subs, Fun)
`longstring_set' and `longstring_get' usually operates
on strings with sizes larger than maximal GT.M string
(1MB) limit.
The mechanism is based on cutting strings into chunks
them into subindex of `Subs' subscripts in `Gvn' global.
incorrectly or completely missing, it is expected
Example (`BlockSize=5'):
```
{ok,done}
erl> egtm_util:longstring_get ("^Foo", ["a","b"]).
erl> egtm_util:foreach ("^Foo", ["a","b"]).
^Foo["a","b","1"]="hello"
^Foo["a","b","2"]=" worl"
^Foo["a","b","3"]="d fro"
^Foo["a","b","4"]="m erl"
^Foo["a","b","5"]="ang a"
^Foo["a","b","6"]="nd gt"
^Foo["a","b","7"]=".m!"
'''
@equiv longstring_set (Gvn, Subs, Text)
@equiv longstring_set (Gvn, Subs, []) | Created : 07 - APR-2012 15:31
Copyright 2012 , IDEA Systems .
General Public License as published by the Free Software
Foundation , either version 3 of the License ,
@doc EGTM Utilities .
-module (egtm_util).
-export ([
stringify/1,
gforeach/0, gforeach/1,
foreach/3, foreach/2,
set_term/1, transaction/1, lock/2, lock/3,
longstring_set/4, longstring_set/3,
longstring_get/2, longstring_kill/2
]).
-include_lib ("egtm.hrl").
-spec stringify (Data::any ()) -> Result::string ().
stringify (X) when is_list (X) -> X;
stringify (X) -> io_lib:format ("~p", [X]).
-spec set_term (Flag::nocenable) -> ok.
set_term ('nocenable') -> egtm:xecute ("u $p:nocenable").
@doc implementation based on ` egtm : order ( ) '
so that S(N+1 ) will be used to ` egtm : order ( ) ' over .
` Fun ' argument is function of arity 2 ( Gvn , Subs )
or arity 3 ( Gvn , Subs , ResultAccumulator ) .
N X S X= " " F S X=$O(^Foo(1,2,3,X ) ) Q : X= " " D
-spec foreach (Gvn::global_name (),
Subs::subscripts ()) ->
nomatch | {ok, AccumulatedData::list ()}.
foreach (Gvn, Subs) ->
foreach (Gvn, Subs, fun (G,S) ->
io:format ("~s~p=~p~n",
[G, S, egtm:get (G, S)]), lists:last (S) end).
-spec foreach (Gvn::global_name (),
Subs::subscripts (),
Fun::function ()) ->
nomatch | {ok, AccumulatedData::list ()}.
foreach (Gvn, Subs, Fun) when is_function (Fun, 2) ->
case foreach (Gvn, Subs, fun (G, S, R) -> [Fun (G, S)|R] end) of
{ok, Res} -> {ok, lists:reverse (Res)};
Whatever -> Whatever
end;
foreach (Gvn, Subs, Fun) when is_function (Fun, 3) ->
case egtm:data (Gvn, Subs) > 0 of
true -> foreach_internal (Gvn, Subs, length (Subs), [], Fun, []);
false -> nomatch
end.
foreach_internal (Gvn, SubH, SubHLen, SubT, Fun, Res) ->
NewSubs = egtm:order (Gvn, SubH++[SubT]),
NewSubT = lists:nth (SubHLen+1, NewSubs),
case NewSubT of
[] ->
{ok, Res};
_ ->
foreach_internal (Gvn, SubH, SubHLen, NewSubT,
Fun, Fun (Gvn, NewSubs, Res))
end.
gforeach () ->
gforeach (fun (G) ->
io:format ("~s=~p~n", [G, egtm:get (G)]), G end).
@doc Global Variable Name .
` Fun ' is function of arity one or two .
First argument passed to ` Fun ' is the
name of global variable and the second
When used with the ` Fun ' of arity of one ,
^ZTMR="1 "
erl > egtm_util : gforeach ( fun ( G , Res ) - > { ok , R } = egtm_util : foreach ( G , [ ] ) , Res end ) .
[ { G , R}|Res ] end ) .
gforeach (Fun) when is_function (Fun, 1) ->
case gforeach (fun (G, R) -> [Fun (G)|R] end) of
{ok, Res} -> {ok, lists:reverse (Res)};
Whatever -> Whatever
end;
gforeach (Fun) when is_function (Fun, 2) ->
gforeach_internal (egtm:gorder (), Fun, []).
gforeach_internal ([], _Fun, Res) -> {ok, Res};
gforeach_internal (Gvn, Fun, Res) ->
gforeach_internal (egtm:order (Gvn), Fun, Fun (Gvn, Res)).
tp_finish (ok) -> tp_finish (true);
tp_finish (true) -> tp_finish ({ok, done});
tp_finish (false) -> tp_finish ({error, unknown});
tp_finish ({ok, Status}) ->
egtm:tcommit (), {ok, commit, Status};
tp_finish ({'EXIT', Error}) ->
tp_finish ({error, {exception, Error}});
tp_finish ({error, Status}) ->
egtm:trollback (), {ok, rollback, Status};
tp_finish (Status) ->
egtm:trollback (), {ok, rollback, Status}.
egtm : kill ( , Subs ) ,
{ ok , commit , { ok , Res } ;
... is a Erlang equivalent of MUMPS code similar to
( if ` Gvn="^Foo " ' , ` Subs=[1,2,3 ] ' and ` Value="abc " ' ):
-spec transaction (Fun::function ()) ->
{ok, commit | rollback, Status::any ()}.
transaction (Fun) when is_function (Fun, 0) ->
egtm:tstart (), tp_finish (catch (Fun ())).
-spec lock (Gvn::global_name (), Subs::subscripts (),
Fun::function ()) -> Result::any ().
lock (Gvn, Subs, Fun) when is_function (Fun) ->
egtm:lock (Gvn, Subs),
Res = Fun (),
egtm:unlock (Gvn, Subs),
Res.
-spec lock (Gvn::global_name (), Fun::function ()) -> Result::any ().
lock (Gvn, Fun) when is_function (Fun) ->
lock (Gvn, [], Fun).
@doc Longstring get / set support .
of ( optionally ) specified length ` BlockSize ' and putting
` BlockSize ' can not be less than 1 and if specified
to be 4000 by default .
erl > egtm_util : longstring_set ( " ^Foo " , [ " a","b " ] , " hello world from erlang and gt.m ! " , 5 ) .
" hello world from erlang and gt.m ! "
-spec longstring_set (Gvn::global_name (),
Subs::subscripts (),
Text::string (),
BlockSize::integer ()) ->
{ok, Result::any ()} | {error, Reason::any ()}.
longstring_set (Gvn, Subs, Text, BlockSize) when is_integer (BlockSize) ->
BS = (fun (N) when N < 1 -> ?EGTM_LONGSTRING_BLOCKSIZE;
(N) -> N end)(BlockSize),
case transaction (fun () ->
egtm:lock (Gvn, Subs),
Orig = egtm:get (Gvn, Subs),
egtm:kill (Gvn, Subs),
Data = case Text of
undefined -> []; null -> [];
Val when is_atom (Val) -> atom_to_list (Val);
Val when is_binary (Val) -> binary_to_list (Val);
Val when is_list (Val) -> Val;
Val -> lists:flatten (io_lib:format ("~p", [Val]))
end,
longstring_set_internal (Gvn, Subs, Text,
length (Data), BS, 1),
case Orig of
[] -> ok;
_ -> egtm:set (Gvn, Subs, Orig)
end,
egtm:unlock (Gvn, Subs)
end) of
{ok, commit, Res} -> {ok, Res};
Whatever -> {error, Whatever}
end.
@equiv longstring_set ( Gvn , Subs , Text , 4000 )
-spec longstring_set (Gvn::global_name (),
Subs::subscripts (),
Text::string ()) ->
{ok, Result::any ()} | {error, Reason::any ()}.
longstring_set (Gvn, Subs, Text) ->
longstring_set (Gvn, Subs, Text, ?EGTM_LONGSTRING_BLOCKSIZE).
longstring_set_internal (_, _, [], _, _, _) -> ok;
longstring_set_internal (Gvn, Subs, Text, TextLen, BS, N) ->
{H, T} = case TextLen < BS of
true -> {Text, []};
false -> lists:split (BS, Text)
end,
egtm:set (Gvn, Subs++[N], H),
longstring_set_internal (Gvn, Subs, T, TextLen-BS, BS, N+1).
-spec longstring_get (Gvn::global_name (),
Subs::subscripts ()) ->
Result::string ().
longstring_get (Gvn, Subs) ->
case egtm_util:foreach (Gvn, Subs,
fun (G,S) -> egtm:get (G,S)
end) of
{ok, Res} -> lists:flatten (Res);
nomatch -> []
end.
-spec longstring_kill (Gvn::global_name (),
Subs::subscripts ()) ->
{ok, Result::any ()} | {error, Reason::any ()}.
longstring_kill (Gvn, Subs) ->
longstring_set (Gvn, Subs, []).
EUnit Tests
-ifdef (TEST).
-include_lib ("eunit/include/eunit.hrl").
longstring_test () ->
egtm:start (),
RndStr = egtm_util_eunit:rnd_str_fun (),
{Gvn, Subs} = {"^EUnit", [RndStr (8), "longstring"]},
TextLong = RndStr (10240), TextShort = RndStr (1024),
longstring_set (Gvn, Subs, TextLong),
?assertEqual (TextLong, longstring_get (Gvn, Subs)),
longstring_set (Gvn, Subs, TextShort),
?assertEqual (TextShort, longstring_get (Gvn, Subs)),
longstring_set (Gvn, Subs, ""),
?assertEqual ("", longstring_get (Gvn, Subs)),
egtm:set (Gvn, Subs, TextShort),
longstring_set (Gvn, Subs, TextLong),
?assertEqual (TextLong, longstring_get (Gvn, Subs)),
?assertEqual (TextShort, egtm:get (Gvn, Subs)),
longstring_kill (Gvn, Subs),
?assertEqual ("", longstring_get (Gvn, Subs)),
?assertEqual (TextShort, egtm:get (Gvn, Subs)),
egtm:stop (),
ok.
foreach_test () ->
ok.
-endif.
vim : fdm = syntax : fdn=3 : tw=74 : ts=2 : syn = erlang
|
4307fc12e7216e2b6f4a6304f9408d602b49edbc665c0dca4a57f448634e325f | racket/web-server | cookie-parse.rkt | #lang racket/base
(require web-server/http/request-structs
net/cookies/common
net/cookies/server
web-server/private/util
racket/match
racket/contract)
(provide (contract-out
[struct client-cookie
([name (and/c string? cookie-name?)]
[value (and/c string? cookie-value?)]
[domain (or/c #f domain-value?)]
[path (or/c #f path/extension-value?)])]
[request-cookies (-> request?
(listof client-cookie?))]
))
(define-struct client-cookie
(name value domain path)
#:prefab)
(define handle-quoted-value
(match-lambda
[(regexp #rx"^\"(.*)\"$" (list _ inner))
inner]
[val val]))
(define (request-cookies req)
(for/fold ([cookies-so-far null])
([this-header (in-list (request-headers/raw req))]
#:when (bytes-ci=? #"Cookie"
(header-field this-header)))
(append cookies-so-far
(for/list ([pr (in-list (cookie-header->alist
(header-value this-header)))])
(client-cookie (bytes->string/utf-8 (car pr))
(handle-quoted-value (bytes->string/utf-8 (cdr pr)))
#f
#f)))))
| null | https://raw.githubusercontent.com/racket/web-server/f718800b5b3f407f7935adf85dfa663c4bba1651/web-server-lib/web-server/http/cookie-parse.rkt | racket | #lang racket/base
(require web-server/http/request-structs
net/cookies/common
net/cookies/server
web-server/private/util
racket/match
racket/contract)
(provide (contract-out
[struct client-cookie
([name (and/c string? cookie-name?)]
[value (and/c string? cookie-value?)]
[domain (or/c #f domain-value?)]
[path (or/c #f path/extension-value?)])]
[request-cookies (-> request?
(listof client-cookie?))]
))
(define-struct client-cookie
(name value domain path)
#:prefab)
(define handle-quoted-value
(match-lambda
[(regexp #rx"^\"(.*)\"$" (list _ inner))
inner]
[val val]))
(define (request-cookies req)
(for/fold ([cookies-so-far null])
([this-header (in-list (request-headers/raw req))]
#:when (bytes-ci=? #"Cookie"
(header-field this-header)))
(append cookies-so-far
(for/list ([pr (in-list (cookie-header->alist
(header-value this-header)))])
(client-cookie (bytes->string/utf-8 (car pr))
(handle-quoted-value (bytes->string/utf-8 (cdr pr)))
#f
#f)))))
| |
7e6d50367b888673166e730907b92316aaa5184d056619ce82e642c058bed62d | maximedenes/native-coq | nametab.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Errors
open Util
open Compat
open Pp
open Names
open Libnames
open Nameops
open Declarations
exception GlobalizationError of qualid
exception GlobalizationConstantError of qualid
let error_global_not_found_loc loc q =
Loc.raise loc (GlobalizationError q)
let error_global_constant_not_found_loc loc q =
Loc.raise loc (GlobalizationConstantError q)
let error_global_not_found q = raise (GlobalizationError q)
(* Kinds of global names *)
type ltac_constant = kernel_name
(* The visibility can be registered either
- for all suffixes not shorter then a given int - when the object
is loaded inside a module
or
- for a precise suffix, when the module containing (the module
containing ...) the object is open (imported)
*)
type visibility = Until of int | Exactly of int
(* Data structure for nametabs *******************************************)
(* This module type will be instantiated by [full_path] of [dir_path] *)
(* The [repr] function is assumed to return the reversed list of idents. *)
module type UserName = sig
type t
val to_string : t -> string
val repr : t -> identifier * module_ident list
end
A [ ' a t ] is a map from [ user_name ] to [ ' a ] , with possible lookup by
partially qualified names of type [ qualid ] . The mapping of
partially qualified names to [ ' a ] is determined by the [ visibility ]
parameter of [ push ] .
The [ shortest_qualid ] function given a user_name Coq . A.B.x , tries
to find the shortest among x , B.x , A.B.x and Coq . A.B.x that denotes
the same object .
partially qualified names of type [qualid]. The mapping of
partially qualified names to ['a] is determined by the [visibility]
parameter of [push].
The [shortest_qualid] function given a user_name Coq.A.B.x, tries
to find the shortest among x, B.x, A.B.x and Coq.A.B.x that denotes
the same object.
*)
module type NAMETREE = sig
type 'a t
type user_name
val empty : 'a t
val push : visibility -> user_name -> 'a -> 'a t -> 'a t
val locate : qualid -> 'a t -> 'a
val find : user_name -> 'a t -> 'a
val exists : user_name -> 'a t -> bool
val user_name : qualid -> 'a t -> user_name
val shortest_qualid : Idset.t -> user_name -> 'a t -> qualid
val find_prefixes : qualid -> 'a t -> 'a list
end
module Make(U:UserName) : NAMETREE with type user_name = U.t
=
struct
type user_name = U.t
type 'a path_status =
Nothing
| Relative of user_name * 'a
| Absolute of user_name * 'a
(* Dictionaries of short names *)
type 'a nametree = ('a path_status * 'a nametree ModIdmap.t)
type 'a t = 'a nametree Idmap.t
let empty = Idmap.empty
[ push_until ] is used to register [ Until vis ] visibility and
[ push_exactly ] to [ Exactly vis ] and [ push_tree ] chooses the right one
[push_exactly] to [Exactly vis] and [push_tree] chooses the right one*)
let rec push_until uname o level (current,dirmap) = function
| modid :: path ->
let mc =
try ModIdmap.find modid dirmap
with Not_found -> (Nothing, ModIdmap.empty)
in
let this =
if level <= 0 then
match current with
| Absolute (n,_) ->
(* This is an absolute name, we must keep it
otherwise it may become unaccessible forever *)
Flags.if_warn
msg_warning (str ("Trying to mask the absolute name \""
^ U.to_string n ^ "\"!"));
current
| Nothing
| Relative _ -> Relative (uname,o)
else current
in
let ptab' = push_until uname o (level-1) mc path in
(this, ModIdmap.add modid ptab' dirmap)
| [] ->
match current with
| Absolute (uname',o') ->
if o'=o then begin
assert (uname=uname');
current, dirmap
we are putting the same thing for the second time :)
end
else
(* This is an absolute name, we must keep it otherwise it may
become unaccessible forever *)
(* But ours is also absolute! This is an error! *)
error ("Cannot mask the absolute name \""
^ U.to_string uname' ^ "\"!")
| Nothing
| Relative _ -> Absolute (uname,o), dirmap
let rec push_exactly uname o level (current,dirmap) = function
| modid :: path ->
let mc =
try ModIdmap.find modid dirmap
with Not_found -> (Nothing, ModIdmap.empty)
in
if level = 0 then
let this =
match current with
| Absolute (n,_) ->
(* This is an absolute name, we must keep it
otherwise it may become unaccessible forever *)
Flags.if_warn
msg_warning (str ("Trying to mask the absolute name \""
^ U.to_string n ^ "\"!"));
current
| Nothing
| Relative _ -> Relative (uname,o)
in
(this, dirmap)
else (* not right level *)
let ptab' = push_exactly uname o (level-1) mc path in
(current, ModIdmap.add modid ptab' dirmap)
| [] ->
anomaly "Prefix longer than path! Impossible!"
let push visibility uname o tab =
let id,dir = U.repr uname in
let ptab =
try Idmap.find id tab
with Not_found -> (Nothing, ModIdmap.empty)
in
let ptab' = match visibility with
| Until i -> push_until uname o (i-1) ptab dir
| Exactly i -> push_exactly uname o (i-1) ptab dir
in
Idmap.add id ptab' tab
let rec search (current,modidtab) = function
| modid :: path -> search (ModIdmap.find modid modidtab) path
| [] -> current
let find_node qid tab =
let (dir,id) = repr_qualid qid in
search (Idmap.find id tab) (repr_dirpath dir)
let locate qid tab =
let o = match find_node qid tab with
| Absolute (uname,o) | Relative (uname,o) -> o
| Nothing -> raise Not_found
in
o
let user_name qid tab =
let uname = match find_node qid tab with
| Absolute (uname,o) | Relative (uname,o) -> uname
| Nothing -> raise Not_found
in
uname
let find uname tab =
let id,l = U.repr uname in
match search (Idmap.find id tab) l with
Absolute (_,o) -> o
| _ -> raise Not_found
let exists uname tab =
try
let _ = find uname tab in
true
with
Not_found -> false
let shortest_qualid ctx uname tab =
let id,dir = U.repr uname in
let hidden = Idset.mem id ctx in
let rec find_uname pos dir (path,tab) = match path with
| Absolute (u,_) | Relative (u,_)
when u=uname && not(pos=[] && hidden) -> List.rev pos
| _ ->
match dir with
[] -> raise Not_found
| id::dir -> find_uname (id::pos) dir (ModIdmap.find id tab)
in
let ptab = Idmap.find id tab in
let found_dir = find_uname [] dir ptab in
make_qualid (make_dirpath found_dir) id
let push_node node l =
match node with
| Absolute (_,o) | Relative (_,o) when not (List.mem o l) -> o::l
| _ -> l
let rec flatten_idmap tab l =
ModIdmap.fold (fun _ (current,idtab) l ->
flatten_idmap idtab (push_node current l)) tab l
let rec search_prefixes (current,modidtab) = function
| modid :: path -> search_prefixes (ModIdmap.find modid modidtab) path
| [] -> List.rev (flatten_idmap modidtab (push_node current []))
let find_prefixes qid tab =
try
let (dir,id) = repr_qualid qid in
search_prefixes (Idmap.find id tab) (repr_dirpath dir)
with Not_found -> []
end
(* Global name tables *************************************************)
module SpTab = Make (struct
type t = full_path
let to_string = string_of_path
let repr sp =
let dir,id = repr_path sp in
id, (repr_dirpath dir)
end)
type ccitab = extended_global_reference SpTab.t
let the_ccitab = ref (SpTab.empty : ccitab)
type kntab = kernel_name SpTab.t
let the_tactictab = ref (SpTab.empty : kntab)
type mptab = module_path SpTab.t
let the_modtypetab = ref (SpTab.empty : mptab)
module DirTab = Make(struct
type t = dir_path
let to_string = string_of_dirpath
let repr dir = match repr_dirpath dir with
| [] -> anomaly "Empty dirpath"
| id::l -> (id,l)
end)
(* If we have a (closed) module M having a submodule N, than N does not
have the entry in [the_dirtab]. *)
type dirtab = global_dir_reference DirTab.t
let the_dirtab = ref (DirTab.empty : dirtab)
(* Reversed name tables ***************************************************)
(* This table translates extended_global_references back to section paths *)
module Globrevtab = Map.Make(ExtRefOrdered)
type globrevtab = full_path Globrevtab.t
let the_globrevtab = ref (Globrevtab.empty : globrevtab)
type mprevtab = dir_path MPmap.t
let the_modrevtab = ref (MPmap.empty : mprevtab)
type mptrevtab = full_path MPmap.t
let the_modtyperevtab = ref (MPmap.empty : mptrevtab)
type knrevtab = full_path KNmap.t
let the_tacticrevtab = ref (KNmap.empty : knrevtab)
(* Push functions *********************************************************)
This is for permanent constructions ( never discharged -- but with
possibly limited visibility , i.e. Theorem , Lemma , Definition , Axiom ,
Parameter but also Remark and Fact )
possibly limited visibility, i.e. Theorem, Lemma, Definition, Axiom,
Parameter but also Remark and Fact) *)
let push_xref visibility sp xref =
match visibility with
| Until _ ->
the_ccitab := SpTab.push visibility sp xref !the_ccitab;
the_globrevtab := Globrevtab.add xref sp !the_globrevtab
| _ ->
begin
if SpTab.exists sp !the_ccitab then
match SpTab.find sp !the_ccitab with
| TrueGlobal( ConstRef _) | TrueGlobal( IndRef _) |
TrueGlobal( ConstructRef _) as xref ->
the_ccitab := SpTab.push visibility sp xref !the_ccitab;
| _ ->
the_ccitab := SpTab.push visibility sp xref !the_ccitab;
else
the_ccitab := SpTab.push visibility sp xref !the_ccitab;
end
let push_cci visibility sp ref =
push_xref visibility sp (TrueGlobal ref)
This is for Syntactic Definitions
let push_syndef visibility sp kn =
push_xref visibility sp (SynDef kn)
let push = push_cci
let push_modtype vis sp kn =
the_modtypetab := SpTab.push vis sp kn !the_modtypetab;
the_modtyperevtab := MPmap.add kn sp !the_modtyperevtab
(* This is for tactic definition names *)
let push_tactic vis sp kn =
the_tactictab := SpTab.push vis sp kn !the_tactictab;
the_tacticrevtab := KNmap.add kn sp !the_tacticrevtab
(* This is to remember absolute Section/Module names and to avoid redundancy *)
let push_dir vis dir dir_ref =
the_dirtab := DirTab.push vis dir dir_ref !the_dirtab;
match dir_ref with
DirModule (_,(mp,_)) -> the_modrevtab := MPmap.add mp dir !the_modrevtab
| _ -> ()
(* Locate functions *******************************************************)
(* This should be used when syntactic definitions are allowed *)
let locate_extended qid = SpTab.locate qid !the_ccitab
(* This should be used when no syntactic definitions is expected *)
let locate qid = match locate_extended qid with
| TrueGlobal ref -> ref
| SynDef _ -> raise Not_found
let full_name_cci qid = SpTab.user_name qid !the_ccitab
let locate_syndef qid = match locate_extended qid with
| TrueGlobal _ -> raise Not_found
| SynDef kn -> kn
let locate_modtype qid = SpTab.locate qid !the_modtypetab
let full_name_modtype qid = SpTab.user_name qid !the_modtypetab
let locate_tactic qid = SpTab.locate qid !the_tactictab
let locate_dir qid = DirTab.locate qid !the_dirtab
let locate_module qid =
match locate_dir qid with
| DirModule (_,(mp,_)) -> mp
| _ -> raise Not_found
let full_name_module qid =
match locate_dir qid with
| DirModule (dir,_) -> dir
| _ -> raise Not_found
let locate_section qid =
match locate_dir qid with
| DirOpenSection (dir, _)
| DirClosedSection dir -> dir
| _ -> raise Not_found
let locate_all qid =
List.fold_right (fun a l -> match a with TrueGlobal a -> a::l | _ -> l)
(SpTab.find_prefixes qid !the_ccitab) []
let locate_extended_all qid = SpTab.find_prefixes qid !the_ccitab
(* Derived functions *)
let locate_constant qid =
match locate_extended qid with
| TrueGlobal (ConstRef kn) -> kn
| _ -> raise Not_found
let global_of_path sp =
match SpTab.find sp !the_ccitab with
| TrueGlobal ref -> ref
| _ -> raise Not_found
let extended_global_of_path sp = SpTab.find sp !the_ccitab
let global r =
let (loc,qid) = qualid_of_reference r in
try match locate_extended qid with
| TrueGlobal ref -> ref
| SynDef _ ->
user_err_loc (loc,"global",
str "Unexpected reference to a notation: " ++
pr_qualid qid)
with Not_found ->
error_global_not_found_loc loc qid
(* Exists functions ********************************************************)
let exists_cci sp = SpTab.exists sp !the_ccitab
let exists_dir dir = DirTab.exists dir !the_dirtab
let exists_section = exists_dir
let exists_module = exists_dir
let exists_modtype sp = SpTab.exists sp !the_modtypetab
(* Reverse locate functions ***********************************************)
let path_of_global ref =
match ref with
| VarRef id -> make_path empty_dirpath id
| _ -> Globrevtab.find (TrueGlobal ref) !the_globrevtab
let dirpath_of_global ref =
fst (repr_path (path_of_global ref))
let basename_of_global ref =
snd (repr_path (path_of_global ref))
let path_of_syndef kn =
Globrevtab.find (SynDef kn) !the_globrevtab
let dirpath_of_module mp =
MPmap.find mp !the_modrevtab
let path_of_tactic kn =
KNmap.find kn !the_tacticrevtab
Shortest qualid functions * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
let shortest_qualid_of_global ctx ref =
match ref with
| VarRef id -> make_qualid empty_dirpath id
| _ ->
let sp = Globrevtab.find (TrueGlobal ref) !the_globrevtab in
SpTab.shortest_qualid ctx sp !the_ccitab
let shortest_qualid_of_syndef ctx kn =
let sp = path_of_syndef kn in
SpTab.shortest_qualid ctx sp !the_ccitab
let shortest_qualid_of_module mp =
let dir = MPmap.find mp !the_modrevtab in
DirTab.shortest_qualid Idset.empty dir !the_dirtab
let shortest_qualid_of_modtype kn =
let sp = MPmap.find kn !the_modtyperevtab in
SpTab.shortest_qualid Idset.empty sp !the_modtypetab
let shortest_qualid_of_tactic kn =
let sp = KNmap.find kn !the_tacticrevtab in
SpTab.shortest_qualid Idset.empty sp !the_tactictab
let pr_global_env env ref =
Il est important let - in , car les streams s'évaluent
paresseusement : l'évaluation pour capturer
l'éventuelle levée d'une exception ( le cas échoit dans le debugger )
paresseusement : il faut forcer l'évaluation pour capturer
l'éventuelle levée d'une exception (le cas échoit dans le debugger) *)
let s = string_of_qualid (shortest_qualid_of_global env ref) in
(str s)
let global_inductive r =
match global r with
| IndRef ind -> ind
| ref ->
user_err_loc (loc_of_reference r,"global_inductive",
pr_reference r ++ spc () ++ str "is not an inductive type")
(********************************************************************)
(********************************************************************)
(* Registration of tables as a global table and rollback *)
type frozen = ccitab * dirtab * kntab * kntab
* globrevtab * mprevtab * knrevtab * knrevtab
let init () =
the_ccitab := SpTab.empty;
the_dirtab := DirTab.empty;
the_modtypetab := SpTab.empty;
the_tactictab := SpTab.empty;
the_globrevtab := Globrevtab.empty;
the_modrevtab := MPmap.empty;
the_modtyperevtab := MPmap.empty;
the_tacticrevtab := KNmap.empty
let freeze () =
!the_ccitab,
!the_dirtab,
!the_modtypetab,
!the_tactictab,
!the_globrevtab,
!the_modrevtab,
!the_modtyperevtab,
!the_tacticrevtab
let unfreeze (ccit,dirt,mtyt,tact,globr,modr,mtyr,tacr) =
the_ccitab := ccit;
the_dirtab := dirt;
the_modtypetab := mtyt;
the_tactictab := tact;
the_globrevtab := globr;
the_modrevtab := modr;
the_modtyperevtab := mtyr;
the_tacticrevtab := tacr
let _ =
Summary.declare_summary "names"
{ Summary.freeze_function = freeze;
Summary.unfreeze_function = unfreeze;
Summary.init_function = init }
(* Deprecated synonyms *)
let extended_locate = locate_extended
let absolute_reference = global_of_path
| null | https://raw.githubusercontent.com/maximedenes/native-coq/3623a4d9fe95c165f02f7119c0e6564a83a9f4c9/library/nametab.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
Kinds of global names
The visibility can be registered either
- for all suffixes not shorter then a given int - when the object
is loaded inside a module
or
- for a precise suffix, when the module containing (the module
containing ...) the object is open (imported)
Data structure for nametabs ******************************************
This module type will be instantiated by [full_path] of [dir_path]
The [repr] function is assumed to return the reversed list of idents.
Dictionaries of short names
This is an absolute name, we must keep it
otherwise it may become unaccessible forever
This is an absolute name, we must keep it otherwise it may
become unaccessible forever
But ours is also absolute! This is an error!
This is an absolute name, we must keep it
otherwise it may become unaccessible forever
not right level
Global name tables ************************************************
If we have a (closed) module M having a submodule N, than N does not
have the entry in [the_dirtab].
Reversed name tables **************************************************
This table translates extended_global_references back to section paths
Push functions ********************************************************
This is for tactic definition names
This is to remember absolute Section/Module names and to avoid redundancy
Locate functions ******************************************************
This should be used when syntactic definitions are allowed
This should be used when no syntactic definitions is expected
Derived functions
Exists functions *******************************************************
Reverse locate functions **********************************************
******************************************************************
******************************************************************
Registration of tables as a global table and rollback
Deprecated synonyms | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Errors
open Util
open Compat
open Pp
open Names
open Libnames
open Nameops
open Declarations
exception GlobalizationError of qualid
exception GlobalizationConstantError of qualid
let error_global_not_found_loc loc q =
Loc.raise loc (GlobalizationError q)
let error_global_constant_not_found_loc loc q =
Loc.raise loc (GlobalizationConstantError q)
let error_global_not_found q = raise (GlobalizationError q)
type ltac_constant = kernel_name
type visibility = Until of int | Exactly of int
module type UserName = sig
type t
val to_string : t -> string
val repr : t -> identifier * module_ident list
end
A [ ' a t ] is a map from [ user_name ] to [ ' a ] , with possible lookup by
partially qualified names of type [ qualid ] . The mapping of
partially qualified names to [ ' a ] is determined by the [ visibility ]
parameter of [ push ] .
The [ shortest_qualid ] function given a user_name Coq . A.B.x , tries
to find the shortest among x , B.x , A.B.x and Coq . A.B.x that denotes
the same object .
partially qualified names of type [qualid]. The mapping of
partially qualified names to ['a] is determined by the [visibility]
parameter of [push].
The [shortest_qualid] function given a user_name Coq.A.B.x, tries
to find the shortest among x, B.x, A.B.x and Coq.A.B.x that denotes
the same object.
*)
module type NAMETREE = sig
type 'a t
type user_name
val empty : 'a t
val push : visibility -> user_name -> 'a -> 'a t -> 'a t
val locate : qualid -> 'a t -> 'a
val find : user_name -> 'a t -> 'a
val exists : user_name -> 'a t -> bool
val user_name : qualid -> 'a t -> user_name
val shortest_qualid : Idset.t -> user_name -> 'a t -> qualid
val find_prefixes : qualid -> 'a t -> 'a list
end
module Make(U:UserName) : NAMETREE with type user_name = U.t
=
struct
type user_name = U.t
type 'a path_status =
Nothing
| Relative of user_name * 'a
| Absolute of user_name * 'a
type 'a nametree = ('a path_status * 'a nametree ModIdmap.t)
type 'a t = 'a nametree Idmap.t
let empty = Idmap.empty
[ push_until ] is used to register [ Until vis ] visibility and
[ push_exactly ] to [ Exactly vis ] and [ push_tree ] chooses the right one
[push_exactly] to [Exactly vis] and [push_tree] chooses the right one*)
let rec push_until uname o level (current,dirmap) = function
| modid :: path ->
let mc =
try ModIdmap.find modid dirmap
with Not_found -> (Nothing, ModIdmap.empty)
in
let this =
if level <= 0 then
match current with
| Absolute (n,_) ->
Flags.if_warn
msg_warning (str ("Trying to mask the absolute name \""
^ U.to_string n ^ "\"!"));
current
| Nothing
| Relative _ -> Relative (uname,o)
else current
in
let ptab' = push_until uname o (level-1) mc path in
(this, ModIdmap.add modid ptab' dirmap)
| [] ->
match current with
| Absolute (uname',o') ->
if o'=o then begin
assert (uname=uname');
current, dirmap
we are putting the same thing for the second time :)
end
else
error ("Cannot mask the absolute name \""
^ U.to_string uname' ^ "\"!")
| Nothing
| Relative _ -> Absolute (uname,o), dirmap
let rec push_exactly uname o level (current,dirmap) = function
| modid :: path ->
let mc =
try ModIdmap.find modid dirmap
with Not_found -> (Nothing, ModIdmap.empty)
in
if level = 0 then
let this =
match current with
| Absolute (n,_) ->
Flags.if_warn
msg_warning (str ("Trying to mask the absolute name \""
^ U.to_string n ^ "\"!"));
current
| Nothing
| Relative _ -> Relative (uname,o)
in
(this, dirmap)
let ptab' = push_exactly uname o (level-1) mc path in
(current, ModIdmap.add modid ptab' dirmap)
| [] ->
anomaly "Prefix longer than path! Impossible!"
let push visibility uname o tab =
let id,dir = U.repr uname in
let ptab =
try Idmap.find id tab
with Not_found -> (Nothing, ModIdmap.empty)
in
let ptab' = match visibility with
| Until i -> push_until uname o (i-1) ptab dir
| Exactly i -> push_exactly uname o (i-1) ptab dir
in
Idmap.add id ptab' tab
let rec search (current,modidtab) = function
| modid :: path -> search (ModIdmap.find modid modidtab) path
| [] -> current
let find_node qid tab =
let (dir,id) = repr_qualid qid in
search (Idmap.find id tab) (repr_dirpath dir)
let locate qid tab =
let o = match find_node qid tab with
| Absolute (uname,o) | Relative (uname,o) -> o
| Nothing -> raise Not_found
in
o
let user_name qid tab =
let uname = match find_node qid tab with
| Absolute (uname,o) | Relative (uname,o) -> uname
| Nothing -> raise Not_found
in
uname
let find uname tab =
let id,l = U.repr uname in
match search (Idmap.find id tab) l with
Absolute (_,o) -> o
| _ -> raise Not_found
let exists uname tab =
try
let _ = find uname tab in
true
with
Not_found -> false
let shortest_qualid ctx uname tab =
let id,dir = U.repr uname in
let hidden = Idset.mem id ctx in
let rec find_uname pos dir (path,tab) = match path with
| Absolute (u,_) | Relative (u,_)
when u=uname && not(pos=[] && hidden) -> List.rev pos
| _ ->
match dir with
[] -> raise Not_found
| id::dir -> find_uname (id::pos) dir (ModIdmap.find id tab)
in
let ptab = Idmap.find id tab in
let found_dir = find_uname [] dir ptab in
make_qualid (make_dirpath found_dir) id
let push_node node l =
match node with
| Absolute (_,o) | Relative (_,o) when not (List.mem o l) -> o::l
| _ -> l
let rec flatten_idmap tab l =
ModIdmap.fold (fun _ (current,idtab) l ->
flatten_idmap idtab (push_node current l)) tab l
let rec search_prefixes (current,modidtab) = function
| modid :: path -> search_prefixes (ModIdmap.find modid modidtab) path
| [] -> List.rev (flatten_idmap modidtab (push_node current []))
let find_prefixes qid tab =
try
let (dir,id) = repr_qualid qid in
search_prefixes (Idmap.find id tab) (repr_dirpath dir)
with Not_found -> []
end
module SpTab = Make (struct
type t = full_path
let to_string = string_of_path
let repr sp =
let dir,id = repr_path sp in
id, (repr_dirpath dir)
end)
type ccitab = extended_global_reference SpTab.t
let the_ccitab = ref (SpTab.empty : ccitab)
type kntab = kernel_name SpTab.t
let the_tactictab = ref (SpTab.empty : kntab)
type mptab = module_path SpTab.t
let the_modtypetab = ref (SpTab.empty : mptab)
module DirTab = Make(struct
type t = dir_path
let to_string = string_of_dirpath
let repr dir = match repr_dirpath dir with
| [] -> anomaly "Empty dirpath"
| id::l -> (id,l)
end)
type dirtab = global_dir_reference DirTab.t
let the_dirtab = ref (DirTab.empty : dirtab)
module Globrevtab = Map.Make(ExtRefOrdered)
type globrevtab = full_path Globrevtab.t
let the_globrevtab = ref (Globrevtab.empty : globrevtab)
type mprevtab = dir_path MPmap.t
let the_modrevtab = ref (MPmap.empty : mprevtab)
type mptrevtab = full_path MPmap.t
let the_modtyperevtab = ref (MPmap.empty : mptrevtab)
type knrevtab = full_path KNmap.t
let the_tacticrevtab = ref (KNmap.empty : knrevtab)
This is for permanent constructions ( never discharged -- but with
possibly limited visibility , i.e. Theorem , Lemma , Definition , Axiom ,
Parameter but also Remark and Fact )
possibly limited visibility, i.e. Theorem, Lemma, Definition, Axiom,
Parameter but also Remark and Fact) *)
let push_xref visibility sp xref =
match visibility with
| Until _ ->
the_ccitab := SpTab.push visibility sp xref !the_ccitab;
the_globrevtab := Globrevtab.add xref sp !the_globrevtab
| _ ->
begin
if SpTab.exists sp !the_ccitab then
match SpTab.find sp !the_ccitab with
| TrueGlobal( ConstRef _) | TrueGlobal( IndRef _) |
TrueGlobal( ConstructRef _) as xref ->
the_ccitab := SpTab.push visibility sp xref !the_ccitab;
| _ ->
the_ccitab := SpTab.push visibility sp xref !the_ccitab;
else
the_ccitab := SpTab.push visibility sp xref !the_ccitab;
end
let push_cci visibility sp ref =
push_xref visibility sp (TrueGlobal ref)
This is for Syntactic Definitions
let push_syndef visibility sp kn =
push_xref visibility sp (SynDef kn)
let push = push_cci
let push_modtype vis sp kn =
the_modtypetab := SpTab.push vis sp kn !the_modtypetab;
the_modtyperevtab := MPmap.add kn sp !the_modtyperevtab
let push_tactic vis sp kn =
the_tactictab := SpTab.push vis sp kn !the_tactictab;
the_tacticrevtab := KNmap.add kn sp !the_tacticrevtab
let push_dir vis dir dir_ref =
the_dirtab := DirTab.push vis dir dir_ref !the_dirtab;
match dir_ref with
DirModule (_,(mp,_)) -> the_modrevtab := MPmap.add mp dir !the_modrevtab
| _ -> ()
let locate_extended qid = SpTab.locate qid !the_ccitab
let locate qid = match locate_extended qid with
| TrueGlobal ref -> ref
| SynDef _ -> raise Not_found
let full_name_cci qid = SpTab.user_name qid !the_ccitab
let locate_syndef qid = match locate_extended qid with
| TrueGlobal _ -> raise Not_found
| SynDef kn -> kn
let locate_modtype qid = SpTab.locate qid !the_modtypetab
let full_name_modtype qid = SpTab.user_name qid !the_modtypetab
let locate_tactic qid = SpTab.locate qid !the_tactictab
let locate_dir qid = DirTab.locate qid !the_dirtab
let locate_module qid =
match locate_dir qid with
| DirModule (_,(mp,_)) -> mp
| _ -> raise Not_found
let full_name_module qid =
match locate_dir qid with
| DirModule (dir,_) -> dir
| _ -> raise Not_found
let locate_section qid =
match locate_dir qid with
| DirOpenSection (dir, _)
| DirClosedSection dir -> dir
| _ -> raise Not_found
let locate_all qid =
List.fold_right (fun a l -> match a with TrueGlobal a -> a::l | _ -> l)
(SpTab.find_prefixes qid !the_ccitab) []
let locate_extended_all qid = SpTab.find_prefixes qid !the_ccitab
let locate_constant qid =
match locate_extended qid with
| TrueGlobal (ConstRef kn) -> kn
| _ -> raise Not_found
let global_of_path sp =
match SpTab.find sp !the_ccitab with
| TrueGlobal ref -> ref
| _ -> raise Not_found
let extended_global_of_path sp = SpTab.find sp !the_ccitab
let global r =
let (loc,qid) = qualid_of_reference r in
try match locate_extended qid with
| TrueGlobal ref -> ref
| SynDef _ ->
user_err_loc (loc,"global",
str "Unexpected reference to a notation: " ++
pr_qualid qid)
with Not_found ->
error_global_not_found_loc loc qid
let exists_cci sp = SpTab.exists sp !the_ccitab
let exists_dir dir = DirTab.exists dir !the_dirtab
let exists_section = exists_dir
let exists_module = exists_dir
let exists_modtype sp = SpTab.exists sp !the_modtypetab
let path_of_global ref =
match ref with
| VarRef id -> make_path empty_dirpath id
| _ -> Globrevtab.find (TrueGlobal ref) !the_globrevtab
let dirpath_of_global ref =
fst (repr_path (path_of_global ref))
let basename_of_global ref =
snd (repr_path (path_of_global ref))
let path_of_syndef kn =
Globrevtab.find (SynDef kn) !the_globrevtab
let dirpath_of_module mp =
MPmap.find mp !the_modrevtab
let path_of_tactic kn =
KNmap.find kn !the_tacticrevtab
Shortest qualid functions * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
let shortest_qualid_of_global ctx ref =
match ref with
| VarRef id -> make_qualid empty_dirpath id
| _ ->
let sp = Globrevtab.find (TrueGlobal ref) !the_globrevtab in
SpTab.shortest_qualid ctx sp !the_ccitab
let shortest_qualid_of_syndef ctx kn =
let sp = path_of_syndef kn in
SpTab.shortest_qualid ctx sp !the_ccitab
let shortest_qualid_of_module mp =
let dir = MPmap.find mp !the_modrevtab in
DirTab.shortest_qualid Idset.empty dir !the_dirtab
let shortest_qualid_of_modtype kn =
let sp = MPmap.find kn !the_modtyperevtab in
SpTab.shortest_qualid Idset.empty sp !the_modtypetab
let shortest_qualid_of_tactic kn =
let sp = KNmap.find kn !the_tacticrevtab in
SpTab.shortest_qualid Idset.empty sp !the_tactictab
let pr_global_env env ref =
Il est important let - in , car les streams s'évaluent
paresseusement : l'évaluation pour capturer
l'éventuelle levée d'une exception ( le cas échoit dans le debugger )
paresseusement : il faut forcer l'évaluation pour capturer
l'éventuelle levée d'une exception (le cas échoit dans le debugger) *)
let s = string_of_qualid (shortest_qualid_of_global env ref) in
(str s)
let global_inductive r =
match global r with
| IndRef ind -> ind
| ref ->
user_err_loc (loc_of_reference r,"global_inductive",
pr_reference r ++ spc () ++ str "is not an inductive type")
type frozen = ccitab * dirtab * kntab * kntab
* globrevtab * mprevtab * knrevtab * knrevtab
let init () =
the_ccitab := SpTab.empty;
the_dirtab := DirTab.empty;
the_modtypetab := SpTab.empty;
the_tactictab := SpTab.empty;
the_globrevtab := Globrevtab.empty;
the_modrevtab := MPmap.empty;
the_modtyperevtab := MPmap.empty;
the_tacticrevtab := KNmap.empty
let freeze () =
!the_ccitab,
!the_dirtab,
!the_modtypetab,
!the_tactictab,
!the_globrevtab,
!the_modrevtab,
!the_modtyperevtab,
!the_tacticrevtab
let unfreeze (ccit,dirt,mtyt,tact,globr,modr,mtyr,tacr) =
the_ccitab := ccit;
the_dirtab := dirt;
the_modtypetab := mtyt;
the_tactictab := tact;
the_globrevtab := globr;
the_modrevtab := modr;
the_modtyperevtab := mtyr;
the_tacticrevtab := tacr
let _ =
Summary.declare_summary "names"
{ Summary.freeze_function = freeze;
Summary.unfreeze_function = unfreeze;
Summary.init_function = init }
let extended_locate = locate_extended
let absolute_reference = global_of_path
|
dd2bab3e3367bb45399f75e621186f5a1c0b6b169b6c472d0931630ed5fc042c | input-output-hk/goblins | test.hs | import Control.Monad (unless)
import Hedgehog
import System.Exit (exitFailure)
import System.IO (hSetEncoding, stderr, stdout, utf8)
import Test.Goblin.Properties
-- | Main testing action
main :: IO ()
main = runTests $ checkSequential <$>
[ Test.Goblin.Properties.tests
]
-- Lifted from `cardano-prelude`:
-- -output-hk/cardano-prelude/blob/d2a4f06827bfa11c021ce719285e8d0bb6ac8e44/test/Test/Cardano/Prelude/Tripping.hs#L141
runTests :: [IO Bool] -> IO ()
runTests tests' = do
ensure UTF-8 . As that 's what hedgehog needs .
hSetEncoding stdout utf8
hSetEncoding stderr utf8
result <- and <$> sequence tests'
unless result exitFailure
| null | https://raw.githubusercontent.com/input-output-hk/goblins/cde90a2b27f79187ca8310b6549331e59595e7ba/test/test.hs | haskell | | Main testing action
Lifted from `cardano-prelude`:
-output-hk/cardano-prelude/blob/d2a4f06827bfa11c021ce719285e8d0bb6ac8e44/test/Test/Cardano/Prelude/Tripping.hs#L141 | import Control.Monad (unless)
import Hedgehog
import System.Exit (exitFailure)
import System.IO (hSetEncoding, stderr, stdout, utf8)
import Test.Goblin.Properties
main :: IO ()
main = runTests $ checkSequential <$>
[ Test.Goblin.Properties.tests
]
runTests :: [IO Bool] -> IO ()
runTests tests' = do
ensure UTF-8 . As that 's what hedgehog needs .
hSetEncoding stdout utf8
hSetEncoding stderr utf8
result <- and <$> sequence tests'
unless result exitFailure
|
610cead05abbd8fac172d7b7ae456475bcac259b29a7f909a72f2aab8da982ed | camlp4/camlp4 | EmptyPrinter.mli | (****************************************************************************)
(* *)
(* OCaml *)
(* *)
(* INRIA Rocquencourt *)
(* *)
Copyright 2006 - 2006 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License , with the special
(* exception on linking described in LICENSE at the top of the Camlp4 *)
(* source tree. *)
(* *)
(****************************************************************************)
Authors :
* - : initial version
* - Nicolas Pouillard: initial version
*)
module Make (Ast : Sig.Ast) : (Sig.Printer Ast).S;
| null | https://raw.githubusercontent.com/camlp4/camlp4/9b3314ea63288decb857239bd94f0c3342136844/camlp4/Camlp4/Struct/EmptyPrinter.mli | ocaml | **************************************************************************
OCaml
INRIA Rocquencourt
exception on linking described in LICENSE at the top of the Camlp4
source tree.
************************************************************************** | Copyright 2006 - 2006 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed under
the terms of the GNU Library General Public License , with the special
Authors :
* - : initial version
* - Nicolas Pouillard: initial version
*)
module Make (Ast : Sig.Ast) : (Sig.Printer Ast).S;
|
5505bfb6dacc64bcd8123e4af187dddbffccbd1fe08d5258db79a9d8e576fb95 | passy/rss-markdown-proxy | Metrics.hs | module Lib.Metrics
( getMetricsMiddleware
) where
import Lib.Types (Metrics, Port)
import Network.Wai (Middleware)
getMetricsMiddleware :: Port Metrics -> IO Middleware
getMetricsMiddleware = const $ return id
| null | https://raw.githubusercontent.com/passy/rss-markdown-proxy/71f08e8eda84701b23e369affbe59da3d9f5d53d/metrics-noop/Lib/Metrics.hs | haskell | module Lib.Metrics
( getMetricsMiddleware
) where
import Lib.Types (Metrics, Port)
import Network.Wai (Middleware)
getMetricsMiddleware :: Port Metrics -> IO Middleware
getMetricsMiddleware = const $ return id
| |
3a50d8192c11094a770827f9d01aa8151235cf63ffe87be139e706408c8ee491 | zoomhub/zoomhub | APIUser.hs | module ZoomHub.Types.APIUser
( APIUser (..),
)
where
import Data.Text (Text)
data APIUser = APIUser
{ username :: Text,
password :: Text
}
| null | https://raw.githubusercontent.com/zoomhub/zoomhub/6b397f7dbb5abcdbd1c97f7c4a7460326e1498d8/src/ZoomHub/Types/APIUser.hs | haskell | module ZoomHub.Types.APIUser
( APIUser (..),
)
where
import Data.Text (Text)
data APIUser = APIUser
{ username :: Text,
password :: Text
}
| |
e0c1cc66ac9a80ada1e2cafbcc346c3c9dd0fa4ee42c2efe6542be5da0544788 | grin-compiler/ghc-grin | showGHCStg.hs | # LANGUAGE RecordWildCards #
module Main where
import Control.Monad
import Control.Monad.IO.Class
import System.Environment
import Stg.Util
import Stg.ToStg
import qualified GHC.Stg.Syntax as GHC
import qualified GHC.Utils.Outputable as GHC
import qualified GHC.Driver.Session as GHC
import GHC.Paths ( libdir )
import GHC
showSDoc :: GHC.SDoc -> String
showSDoc = GHC.showSDoc GHC.unsafeGlobalDynFlags
main :: IO ()
main = runGhc (Just libdir) . liftIO $ do
stgbins <- getArgs
forM_ stgbins $ \stgbinName -> do
putStrLn $ "reading " ++ stgbinName
extStgModule <- readStgbin stgbinName
let StgModule{..} = toStg extStgModule
putStrLn . showSDoc $ GHC.pprStgTopBindings stgTopBindings
| null | https://raw.githubusercontent.com/grin-compiler/ghc-grin/ebc4dca2e1f5b3581d4b84726730564ce909d786/patched-lambda-to-ghc-stg/mini-ghc-grin/app/showGHCStg.hs | haskell | # LANGUAGE RecordWildCards #
module Main where
import Control.Monad
import Control.Monad.IO.Class
import System.Environment
import Stg.Util
import Stg.ToStg
import qualified GHC.Stg.Syntax as GHC
import qualified GHC.Utils.Outputable as GHC
import qualified GHC.Driver.Session as GHC
import GHC.Paths ( libdir )
import GHC
showSDoc :: GHC.SDoc -> String
showSDoc = GHC.showSDoc GHC.unsafeGlobalDynFlags
main :: IO ()
main = runGhc (Just libdir) . liftIO $ do
stgbins <- getArgs
forM_ stgbins $ \stgbinName -> do
putStrLn $ "reading " ++ stgbinName
extStgModule <- readStgbin stgbinName
let StgModule{..} = toStg extStgModule
putStrLn . showSDoc $ GHC.pprStgTopBindings stgTopBindings
| |
46507165d0ae7442860c18f4d50a9217f5570c429ce64ac6df8c23a51851ac55 | kloimhardt/babashka-scittle-guestbook | guestbook.clj | (require '[clojure.edn :as edn]
'[clojure.java.browse :as browse]
'[clojure.java.io :as io]
'[cognitect.transit :as transit]
'[org.httpkit.server :as srv]
'[hiccup.core :as hp])
(import 'java.io.ByteArrayOutputStream)
(def port 8083)
(def filename "messages.txt")
(defn html [cljs-file]
(hp/html
[:html
[:head
[:meta {:charset "UTF-8"}]
[:meta {:name "viewport" :content "width=device-width, initial-scale=1"}]
[:link {:rel "shortcut icon" :href "data:,"}]
[:link {:rel "apple-touch-icon" :href "data:,"}]
[:link {:rel "stylesheet" :href "@0.9.0/css/bulma.min.css"}]
[:script {:crossorigin nil :src "@17/umd/react.production.min.js"}]
[:script {:crossorigin nil :src "-dom@17/umd/react-dom.production.min.js"}]
[:script {:src "@0.0.1/js/scittle.js" :type "application/javascript"}]
[:script {:src "@0.0.1/js/scittle.reagent.js" :type "application/javascript"}]
[:script {:src "@0.0.1/js/scittle.cljs-ajax.js" :type "application/javascript"}]
[:title "Guestbook"]]
[:body
[:div {:id "content"}]
[:script {:type "application/x-scittle" :src cljs-file}]]]))
(defn home-save-message! [req]
(let [params (transit/read (transit/reader (:body req) :json))
text (prn-str (assoc params :timestamp (java.util.Date.)))]
(spit filename text :append true)
"post success!"))
(defn db-get-messages []
(if (.exists (io/file filename))
(edn/read-string (str "[" (slurp filename) "]"))
[]))
(defn home-message-list [_]
(let [out (ByteArrayOutputStream. 4096)
writer (transit/writer out :json)]
(transit/write writer {:messages (db-get-messages)})
(.toString out)))
(defn home-page [_request cljs-file]
(html cljs-file))
(defn home-routes [{:keys [:request-method :uri] :as req}]
(case [request-method uri]
[:get "/"] {:body (home-page req "guestbook.cljs")
:status 200}
[:get "/messages"] {:headers {"Content-type" "application/transit+json"}
:body (home-message-list req)
:status 200}
[:post "/message"] {:body (home-save-message! req)
:status 200}
[:get "/guestbook.cljs"] {:body (slurp"guestbook.cljs")
:status 200}))
(defn core-http-server []
(srv/run-server home-routes {:port port}))
(let [url (str ":" port "/")]
(core-http-server)
(println "serving" url)
(browse/browse-url url)
@(promise))
| null | https://raw.githubusercontent.com/kloimhardt/babashka-scittle-guestbook/08fc6700cf0023780cc9ae67b2e49a578c7793e2/guestbook.clj | clojure | (require '[clojure.edn :as edn]
'[clojure.java.browse :as browse]
'[clojure.java.io :as io]
'[cognitect.transit :as transit]
'[org.httpkit.server :as srv]
'[hiccup.core :as hp])
(import 'java.io.ByteArrayOutputStream)
(def port 8083)
(def filename "messages.txt")
(defn html [cljs-file]
(hp/html
[:html
[:head
[:meta {:charset "UTF-8"}]
[:meta {:name "viewport" :content "width=device-width, initial-scale=1"}]
[:link {:rel "shortcut icon" :href "data:,"}]
[:link {:rel "apple-touch-icon" :href "data:,"}]
[:link {:rel "stylesheet" :href "@0.9.0/css/bulma.min.css"}]
[:script {:crossorigin nil :src "@17/umd/react.production.min.js"}]
[:script {:crossorigin nil :src "-dom@17/umd/react-dom.production.min.js"}]
[:script {:src "@0.0.1/js/scittle.js" :type "application/javascript"}]
[:script {:src "@0.0.1/js/scittle.reagent.js" :type "application/javascript"}]
[:script {:src "@0.0.1/js/scittle.cljs-ajax.js" :type "application/javascript"}]
[:title "Guestbook"]]
[:body
[:div {:id "content"}]
[:script {:type "application/x-scittle" :src cljs-file}]]]))
(defn home-save-message! [req]
(let [params (transit/read (transit/reader (:body req) :json))
text (prn-str (assoc params :timestamp (java.util.Date.)))]
(spit filename text :append true)
"post success!"))
(defn db-get-messages []
(if (.exists (io/file filename))
(edn/read-string (str "[" (slurp filename) "]"))
[]))
(defn home-message-list [_]
(let [out (ByteArrayOutputStream. 4096)
writer (transit/writer out :json)]
(transit/write writer {:messages (db-get-messages)})
(.toString out)))
(defn home-page [_request cljs-file]
(html cljs-file))
(defn home-routes [{:keys [:request-method :uri] :as req}]
(case [request-method uri]
[:get "/"] {:body (home-page req "guestbook.cljs")
:status 200}
[:get "/messages"] {:headers {"Content-type" "application/transit+json"}
:body (home-message-list req)
:status 200}
[:post "/message"] {:body (home-save-message! req)
:status 200}
[:get "/guestbook.cljs"] {:body (slurp"guestbook.cljs")
:status 200}))
(defn core-http-server []
(srv/run-server home-routes {:port port}))
(let [url (str ":" port "/")]
(core-http-server)
(println "serving" url)
(browse/browse-url url)
@(promise))
| |
0fa12ed8b0738e874af3c3efe2c8549f894e41d0318595a865ac4dbfc577dba7 | RichiH/git-annex | DropKey.hs | git - annex command
-
- Copyright 2010,2016 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2010,2016 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Command.DropKey where
import Command
import qualified Annex
import Logs.Location
import Annex.Content
cmd :: Command
cmd = noCommit $ withGlobalOptions [jsonOption] $
command "dropkey" SectionPlumbing
"drops annexed content for specified keys"
(paramRepeating paramKey)
(seek <$$> optParser)
data DropKeyOptions = DropKeyOptions
{ toDrop :: [String]
, batchOption :: BatchMode
}
optParser :: CmdParamsDesc -> Parser DropKeyOptions
optParser desc = DropKeyOptions
<$> cmdParams desc
<*> parseBatchOption
seek :: DropKeyOptions -> CommandSeek
seek o = do
unlessM (Annex.getState Annex.force) $
giveup "dropkey can cause data loss; use --force if you're sure you want to do this"
withKeys start (toDrop o)
case batchOption o of
Batch -> batchInput parsekey $ batchCommandAction . start
NoBatch -> noop
where
parsekey = maybe (Left "bad key") Right . file2key
start :: Key -> CommandStart
start key = do
showStart' "dropkey" key (mkActionItem key)
next $ perform key
perform :: Key -> CommandPerform
perform key = ifM (inAnnex key)
( lockContentForRemoval key $ \contentlock -> do
removeAnnex contentlock
next $ cleanup key
, next $ return True
)
cleanup :: Key -> CommandCleanup
cleanup key = do
logStatus key InfoMissing
return True
| null | https://raw.githubusercontent.com/RichiH/git-annex/bbcad2b0af8cd9264d0cb86e6ca126ae626171f3/Command/DropKey.hs | haskell | git - annex command
-
- Copyright 2010,2016 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2010,2016 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Command.DropKey where
import Command
import qualified Annex
import Logs.Location
import Annex.Content
cmd :: Command
cmd = noCommit $ withGlobalOptions [jsonOption] $
command "dropkey" SectionPlumbing
"drops annexed content for specified keys"
(paramRepeating paramKey)
(seek <$$> optParser)
data DropKeyOptions = DropKeyOptions
{ toDrop :: [String]
, batchOption :: BatchMode
}
optParser :: CmdParamsDesc -> Parser DropKeyOptions
optParser desc = DropKeyOptions
<$> cmdParams desc
<*> parseBatchOption
seek :: DropKeyOptions -> CommandSeek
seek o = do
unlessM (Annex.getState Annex.force) $
giveup "dropkey can cause data loss; use --force if you're sure you want to do this"
withKeys start (toDrop o)
case batchOption o of
Batch -> batchInput parsekey $ batchCommandAction . start
NoBatch -> noop
where
parsekey = maybe (Left "bad key") Right . file2key
start :: Key -> CommandStart
start key = do
showStart' "dropkey" key (mkActionItem key)
next $ perform key
perform :: Key -> CommandPerform
perform key = ifM (inAnnex key)
( lockContentForRemoval key $ \contentlock -> do
removeAnnex contentlock
next $ cleanup key
, next $ return True
)
cleanup :: Key -> CommandCleanup
cleanup key = do
logStatus key InfoMissing
return True
| |
4aa873a1ef360822f4b6832548c8b2c348d84e332d764f1d6c51698e9991c15a | wireless-net/erlang-nommu | erl_expand_records.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2005 - 2012 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%
%% Purpose : Expand records into tuples.
%% N.B. Although structs (tagged tuples) are not yet allowed in the
language there is code included in pattern/2 and expr/3 ( commented out )
%% that handles them.
-module(erl_expand_records).
-export([module/2]).
-import(lists, [map/2,foldl/3,foldr/3,sort/1,reverse/1,duplicate/2]).
-record(exprec, {compile=[], % Compile flags
vcount=0, % Variable counter
imports=[], % Imports
records=dict:new(), % Record definitions
trecords=sets:new(), % Typed records
uses_types=false, % Are there -spec or -type in the module
strict_ra=[], % strict record accesses
checked_ra=[] % successfully accessed records
}).
-spec(module(AbsForms, CompileOptions) -> AbsForms when
AbsForms :: [erl_parse:abstract_form()],
CompileOptions :: [compile:option()]).
Is is assumed that Fs is a valid list of forms . It should pass
%% erl_lint without errors.
module(Fs0, Opts0) ->
Opts = compiler_options(Fs0) ++ Opts0,
TRecs = typed_records(Fs0),
UsesTypes = uses_types(Fs0),
St0 = #exprec{compile = Opts, trecords = TRecs, uses_types = UsesTypes},
{Fs,_St} = forms(Fs0, St0),
Fs.
compiler_options(Forms) ->
lists:flatten([C || {attribute,_,compile,C} <- Forms]).
typed_records(Fs) ->
typed_records(Fs, sets:new()).
typed_records([{attribute,_L,type,{{record, Name},_Defs,[]}} | Fs], Trecs) ->
typed_records(Fs, sets:add_element(Name, Trecs));
typed_records([_|Fs], Trecs) ->
typed_records(Fs, Trecs);
typed_records([], Trecs) ->
Trecs.
uses_types([{attribute,_L,spec,_}|_]) -> true;
uses_types([{attribute,_L,type,_}|_]) -> true;
uses_types([{attribute,_L,opaque,_}|_]) -> true;
uses_types([_|Fs]) -> uses_types(Fs);
uses_types([]) -> false.
forms([{attribute,L,record,{Name,Defs}} | Fs], St0) ->
NDefs = normalise_fields(Defs),
St = St0#exprec{records=dict:store(Name, NDefs, St0#exprec.records)},
{Fs1, St1} = forms(Fs, St),
%% Check if we need to keep the record information for usage in types.
case St#exprec.uses_types of
true ->
case sets:is_element(Name, St#exprec.trecords) of
true -> {Fs1, St1};
false -> {[{attribute,L,type,{{record,Name},Defs,[]}}|Fs1], St1}
end;
false ->
{Fs1, St1}
end;
forms([{attribute,L,import,Is} | Fs0], St0) ->
St1 = import(Is, St0),
{Fs,St2} = forms(Fs0, St1),
{[{attribute,L,import,Is} | Fs], St2};
forms([{function,L,N,A,Cs0} | Fs0], St0) ->
{Cs,St1} = clauses(Cs0, St0),
{Fs,St2} = forms(Fs0, St1),
{[{function,L,N,A,Cs} | Fs],St2};
forms([F | Fs0], St0) ->
{Fs,St} = forms(Fs0, St0),
{[F | Fs], St};
forms([], St) -> {[],St}.
clauses([{clause,Line,H0,G0,B0} | Cs0], St0) ->
{H1,St1} = head(H0, St0),
{G1,St2} = guard(G0, St1),
{H,G} = optimize_is_record(H1, G1, St2),
{B,St3} = exprs(B0, St2),
{Cs,St4} = clauses(Cs0, St3),
{[{clause,Line,H,G,B} | Cs],St4};
clauses([], St) -> {[],St}.
head(As, St) -> pattern_list(As, St).
pattern({var,_,'_'}=Var, St) ->
{Var,St};
pattern({var,_,_}=Var, St) ->
{Var,St};
pattern({char,_,_}=Char, St) ->
{Char,St};
pattern({integer,_,_}=Int, St) ->
{Int,St};
pattern({float,_,_}=Float, St) ->
{Float,St};
pattern({atom,_,_}=Atom, St) ->
{Atom,St};
pattern({string,_,_}=String, St) ->
{String,St};
pattern({nil,_}=Nil, St) ->
{Nil,St};
pattern({cons,Line,H,T}, St0) ->
{TH,St1} = pattern(H, St0),
{TT,St2} = pattern(T, St1),
{{cons,Line,TH,TT},St2};
pattern({tuple,Line,Ps}, St0) ->
{TPs,St1} = pattern_list(Ps, St0),
{{tuple,Line,TPs},St1};
pattern({map,Line,Ps}, St0) ->
{TPs,St1} = pattern_list(Ps, St0),
{{map,Line,TPs},St1};
pattern({map_field_exact,Line,K0,V0}, St0) ->
{K,St1} = expr(K0, St0),
{V,St2} = pattern(V0, St1),
{{map_field_exact,Line,K,V},St2};
pattern({struct , Line , Tag , Ps } , St0 ) - >
, St1 } = pattern_list(Ps , St0 ) ,
{ { struct , Line , Tag , TPs},TPsvs , St1 } ;
pattern({record_index,Line,Name,Field}, St) ->
{index_expr(Line, Field, Name, record_fields(Name, St)),St};
pattern({record,Line,Name,Pfs}, St0) ->
Fs = record_fields(Name, St0),
{TMs,St1} = pattern_list(pattern_fields(Fs, Pfs), St0),
{{tuple,Line,[{atom,Line,Name} | TMs]},St1};
pattern({bin,Line,Es0}, St0) ->
{Es1,St1} = pattern_bin(Es0, St0),
{{bin,Line,Es1},St1};
pattern({match,Line,Pat1, Pat2}, St0) ->
{TH,St1} = pattern(Pat2, St0),
{TT,St2} = pattern(Pat1, St1),
{{match,Line,TT,TH},St2};
pattern({op,Line,Op,A0}, St0) ->
{A,St1} = pattern(A0, St0),
{{op,Line,Op,A},St1};
pattern({op,Line,Op,L0,R0}, St0) ->
{L,St1} = pattern(L0, St0),
{R,St2} = pattern(R0, St1),
{{op,Line,Op,L,R},St2}.
pattern_list([P0 | Ps0], St0) ->
{P,St1} = pattern(P0, St0),
{Ps,St2} = pattern_list(Ps0, St1),
{[P | Ps],St2};
pattern_list([], St) -> {[],St}.
guard([G0 | Gs0], St0) ->
{G,St1} = guard_tests(G0, St0),
{Gs,St2} = guard(Gs0, St1),
{[G | Gs],St2};
guard([], St) -> {[],St}.
guard_tests(Gts0, St0) ->
{Gts1,St1} = guard_tests1(Gts0, St0),
{Gts1,St1#exprec{checked_ra = []}}.
guard_tests1([Gt0 | Gts0], St0) ->
{Gt1,St1} = guard_test(Gt0, St0),
{Gts1,St2} = guard_tests1(Gts0, St1),
{[Gt1 | Gts1],St2};
guard_tests1([], St) -> {[],St}.
guard_test(G0, St0) ->
in_guard(fun() ->
{G1,St1} = guard_test1(G0, St0),
strict_record_access(G1, St1)
end).
Normalising guard tests ensures that none of the Boolean operands
%% created by strict_record_access/2 calls any of the old guard tests.
guard_test1({call,Line,{atom,Lt,Tname},As}, St) ->
Test = {atom,Lt,normalise_test(Tname, length(As))},
expr({call,Line,Test,As}, St);
guard_test1(Test, St) ->
expr(Test, St).
normalise_test(atom, 1) -> is_atom;
normalise_test(binary, 1) -> is_binary;
normalise_test(float, 1) -> is_float;
normalise_test(function, 1) -> is_function;
normalise_test(integer, 1) -> is_integer;
normalise_test(list, 1) -> is_list;
normalise_test(number, 1) -> is_number;
normalise_test(pid, 1) -> is_pid;
normalise_test(port, 1) -> is_port;
normalise_test(record, 2) -> is_record;
normalise_test(reference, 1) -> is_reference;
normalise_test(tuple, 1) -> is_tuple;
normalise_test(Name, _) -> Name.
is_in_guard() ->
get(erl_expand_records_in_guard) =/= undefined.
in_guard(F) ->
undefined = put(erl_expand_records_in_guard, true),
Res = F(),
true = erase(erl_expand_records_in_guard),
Res.
record_test(Line , Term , Name , Vs , St ) - >
Generate code for is_record/1 .
record_test(Line, Term, Name, St) ->
case is_in_guard() of
false ->
record_test_in_body(Line, Term, Name, St);
true ->
record_test_in_guard(Line, Term, Name, St)
end.
record_test_in_guard(Line, Term, Name, St) ->
case not_a_tuple(Term) of
true ->
%% In case that later optimization passes have been turned off.
expr({atom,Line,false}, St);
false ->
Fs = record_fields(Name, St),
NLine = neg_line(Line),
expr({call,NLine,{remote,NLine,{atom,NLine,erlang},{atom,NLine,is_record}},
[Term,{atom,Line,Name},{integer,Line,length(Fs)+1}]},
St)
end.
not_a_tuple({atom,_,_}) -> true;
not_a_tuple({integer,_,_}) -> true;
not_a_tuple({float,_,_}) -> true;
not_a_tuple({nil,_}) -> true;
not_a_tuple({cons,_,_,_}) -> true;
not_a_tuple({char,_,_}) -> true;
not_a_tuple({string,_,_}) -> true;
not_a_tuple({record_index,_,_,_}) -> true;
not_a_tuple({bin,_,_}) -> true;
not_a_tuple({op,_,_,_}) -> true;
not_a_tuple({op,_,_,_,_}) -> true;
not_a_tuple(_) -> false.
record_test_in_body(Line, Expr, Name, St0) ->
As may have side effects , we must evaluate it
%% first and bind the value to a new variable.
We must use also handle the case that does not
%% evaluate to a tuple properly.
Fs = record_fields(Name, St0),
{Var,St} = new_var(Line, St0),
NLine = neg_line(Line),
expr({block,Line,
[{match,Line,Var,Expr},
{call,NLine,{remote,NLine,{atom,NLine,erlang},
{atom,NLine,is_record}},
[Var,{atom,Line,Name},{integer,Line,length(Fs)+1}]}]}, St).
exprs([E0 | Es0], St0) ->
{E,St1} = expr(E0, St0),
{Es,St2} = exprs(Es0, St1),
{[E | Es],St2};
exprs([], St) -> {[],St}.
expr({var,_,_}=Var, St) ->
{Var,St};
expr({char,_,_}=Char, St) ->
{Char,St};
expr({integer,_,_}=Int, St) ->
{Int,St};
expr({float,_,_}=Float, St) ->
{Float,St};
expr({atom,_,_}=Atom, St) ->
{Atom,St};
expr({string,_,_}=String, St) ->
{String,St};
expr({nil,_}=Nil, St) ->
{Nil,St};
expr({cons,Line,H0,T0}, St0) ->
{H,St1} = expr(H0, St0),
{T,St2} = expr(T0, St1),
{{cons,Line,H,T},St2};
expr({lc,Line,E0,Qs0}, St0) ->
{Qs1,St1} = lc_tq(Line, Qs0, St0),
{E1,St2} = expr(E0, St1),
{{lc,Line,E1,Qs1},St2};
expr({bc,Line,E0,Qs0}, St0) ->
{Qs1,St1} = lc_tq(Line, Qs0, St0),
{E1,St2} = expr(E0, St1),
{{bc,Line,E1,Qs1},St2};
expr({tuple,Line,Es0}, St0) ->
{Es1,St1} = expr_list(Es0, St0),
{{tuple,Line,Es1},St1};
expr({map,Line,Es0}, St0) ->
{Es1,St1} = expr_list(Es0, St0),
{{map,Line,Es1},St1};
expr({map,Line,Arg0,Es0}, St0) ->
{Arg1,St1} = expr(Arg0, St0),
{Es1,St2} = expr_list(Es0, St1),
{{map,Line,Arg1,Es1},St2};
expr({map_field_assoc,Line,K0,V0}, St0) ->
{K,St1} = expr(K0, St0),
{V,St2} = expr(V0, St1),
{{map_field_assoc,Line,K,V},St2};
expr({map_field_exact,Line,K0,V0}, St0) ->
{K,St1} = expr(K0, St0),
{V,St2} = expr(V0, St1),
{{map_field_exact,Line,K,V},St2};
%%expr({struct,Line,Tag,Es0}, Vs, St0) ->
{ Es1,Esvs , Esus , St1 } = expr_list(Es0 , Vs , St0 ) ,
{ { struct , Line , Tag , Es1},Esvs , Esus , St1 } ;
expr({record_index,Line,Name,F}, St) ->
I = index_expr(Line, F, Name, record_fields(Name, St)),
expr(I, St);
expr({record,Line,Name,Is}, St) ->
expr({tuple,Line,[{atom,Line,Name} |
record_inits(record_fields(Name, St), Is)]},
St);
expr({record_field,Line,R,Name,F}, St) ->
get_record_field(Line, R, F, Name, St);
expr({record,_,R,Name,Us}, St0) ->
{Ue,St1} = record_update(R, Name, record_fields(Name, St0), Us, St0),
expr(Ue, St1);
expr({bin,Line,Es0}, St0) ->
{Es1,St1} = expr_bin(Es0, St0),
{{bin,Line,Es1},St1};
expr({block,Line,Es0}, St0) ->
{Es,St1} = exprs(Es0, St0),
{{block,Line,Es},St1};
expr({'if',Line,Cs0}, St0) ->
{Cs,St1} = clauses(Cs0, St0),
{{'if',Line,Cs},St1};
expr({'case',Line,E0,Cs0}, St0) ->
{E,St1} = expr(E0, St0),
{Cs,St2} = clauses(Cs0, St1),
{{'case',Line,E,Cs},St2};
expr({'receive',Line,Cs0}, St0) ->
{Cs,St1} = clauses(Cs0, St0),
{{'receive',Line,Cs},St1};
expr({'receive',Line,Cs0,To0,ToEs0}, St0) ->
{To,St1} = expr(To0, St0),
{ToEs,St2} = exprs(ToEs0, St1),
{Cs,St3} = clauses(Cs0, St2),
{{'receive',Line,Cs,To,ToEs},St3};
expr({'fun',_,{function,_F,_A}}=Fun, St) ->
{Fun,St};
expr({'fun',_,{function,_M,_F,_A}}=Fun, St) ->
{Fun,St};
expr({'fun',Line,{clauses,Cs0}}, St0) ->
{Cs,St1} = clauses(Cs0, St0),
{{'fun',Line,{clauses,Cs}},St1};
expr({named_fun,Line,Name,Cs0}, St0) ->
{Cs,St1} = clauses(Cs0, St0),
{{named_fun,Line,Name,Cs},St1};
expr({call,Line,{atom,_,is_record},[A,{atom,_,Name}]}, St) ->
record_test(Line, A, Name, St);
expr({call,Line,{remote,_,{atom,_,erlang},{atom,_,is_record}},
[A,{atom,_,Name}]}, St) ->
record_test(Line, A, Name, St);
expr({call,Line,{tuple,_,[{atom,_,erlang},{atom,_,is_record}]},
[A,{atom,_,Name}]}, St) ->
record_test(Line, A, Name, St);
expr({call,Line,{atom,_La,N}=Atom,As0}, St0) ->
{As,St1} = expr_list(As0, St0),
Ar = length(As),
case erl_internal:bif(N, Ar) of
true ->
{{call,Line,Atom,As},St1};
false ->
case imported(N, Ar, St1) of
{yes,_Mod} ->
{{call,Line,Atom,As},St1};
no ->
case {N,Ar} of
{record_info,2} ->
record_info_call(Line, As, St1);
_ ->
{{call,Line,Atom,As},St1}
end
end
end;
expr({call,Line,{remote,Lr,M,F},As0}, St0) ->
{[M1,F1 | As1],St1} = expr_list([M,F | As0], St0),
{{call,Line,{remote,Lr,M1,F1},As1},St1};
expr({call,Line,F,As0}, St0) ->
{[Fun1 | As1],St1} = expr_list([F | As0], St0),
{{call,Line,Fun1,As1},St1};
expr({'try',Line,Es0,Scs0,Ccs0,As0}, St0) ->
{Es1,St1} = exprs(Es0, St0),
{Scs1,St2} = clauses(Scs0, St1),
{Ccs1,St3} = clauses(Ccs0, St2),
{As1,St4} = exprs(As0, St3),
{{'try',Line,Es1,Scs1,Ccs1,As1},St4};
expr({'catch',Line,E0}, St0) ->
{E,St1} = expr(E0, St0),
{{'catch',Line,E},St1};
expr({match,Line,P0,E0}, St0) ->
{E,St1} = expr(E0, St0),
{P,St2} = pattern(P0, St1),
{{match,Line,P,E},St2};
expr({op,Line,'not',A0}, St0) ->
{A,St1} = bool_operand(A0, St0),
{{op,Line,'not',A},St1};
expr({op,Line,Op,A0}, St0) ->
{A,St1} = expr(A0, St0),
{{op,Line,Op,A},St1};
expr({op,Line,Op,L0,R0}, St0) when Op =:= 'and';
Op =:= 'or' ->
{L,St1} = bool_operand(L0, St0),
{R,St2} = bool_operand(R0, St1),
{{op,Line,Op,L,R},St2};
expr({op,Line,Op,L0,R0}, St0) when Op =:= 'andalso';
Op =:= 'orelse' ->
{L,St1} = bool_operand(L0, St0),
{R,St2} = bool_operand(R0, St1),
{{op,Line,Op,L,R},St2#exprec{checked_ra = St1#exprec.checked_ra}};
expr({op,Line,Op,L0,R0}, St0) ->
{L,St1} = expr(L0, St0),
{R,St2} = expr(R0, St1),
{{op,Line,Op,L,R},St2}.
expr_list([E0 | Es0], St0) ->
{E,St1} = expr(E0, St0),
{Es,St2} = expr_list(Es0, St1),
{[E | Es],St2};
expr_list([], St) -> {[],St}.
bool_operand(E0, St0) ->
{E1,St1} = expr(E0, St0),
strict_record_access(E1, St1).
strict_record_access(E, #exprec{strict_ra = []} = St) ->
{E, St};
strict_record_access(E0, St0) ->
#exprec{strict_ra = StrictRA, checked_ra = CheckedRA} = St0,
{New,NC} = lists:foldl(fun ({Key,_L,_R,_Sz}=A, {L,C}) ->
case lists:keymember(Key, 1, C) of
true -> {L,C};
false -> {[A|L],[A|C]}
end
end, {[],CheckedRA}, StrictRA),
E1 = if New =:= [] -> E0; true -> conj(New, E0) end,
St1 = St0#exprec{strict_ra = [], checked_ra = NC},
expr(E1, St1).
%% Make it look nice (?) when compiled with the 'E' flag
%% ('and'/2 is left recursive).
conj([], _E) ->
empty;
conj([{{Name,_Rp},L,R,Sz} | AL], E) ->
NL = neg_line(L),
T1 = {op,NL,'orelse',
{call,NL,
{remote,NL,{atom,NL,erlang},{atom,NL,is_record}},
[R,{atom,NL,Name},{integer,NL,Sz}]},
{atom,NL,fail}},
T2 = case conj(AL, none) of
empty -> T1;
C -> {op,NL,'and',C,T1}
end,
case E of
none ->
case T2 of
{op,_,'and',_,_} ->
T2;
_ ->
%% Wrap the 'orelse' expression in an dummy 'and true' to make
%% sure that the entire guard fails if the 'orelse'
%% expression returns 'fail'. ('orelse' used to verify
%% that its right operand was a boolean, but that is no
%% longer the case.)
{op,NL,'and',T2,{atom,NL,true}}
end;
_ ->
{op,NL,'and',T2,E}
end.
lc_tq(Line , Qualifiers , State ) - >
{ [ TransQual],State ' }
lc_tq(Line, [{generate,Lg,P0,G0} | Qs0], St0) ->
{G1,St1} = expr(G0, St0),
{P1,St2} = pattern(P0, St1),
{Qs1,St3} = lc_tq(Line, Qs0, St2),
{[{generate,Lg,P1,G1} | Qs1],St3};
lc_tq(Line, [{b_generate,Lg,P0,G0} | Qs0], St0) ->
{G1,St1} = expr(G0, St0),
{P1,St2} = pattern(P0, St1),
{Qs1,St3} = lc_tq(Line, Qs0, St2),
{[{b_generate,Lg,P1,G1} | Qs1],St3};
lc_tq(Line, [F0 | Qs0], St0) ->
%% Allow record/2 and expand out as guard test.
case erl_lint:is_guard_test(F0) of
true ->
{F1,St1} = guard_test(F0, St0),
{Qs1,St2} = lc_tq(Line, Qs0, St1),
{[F1|Qs1],St2};
false ->
{F1,St1} = expr(F0, St0),
{Qs1,St2} = lc_tq(Line, Qs0, St1),
{[F1 | Qs1],St2}
end;
lc_tq(_Line, [], St0) ->
{[],St0#exprec{checked_ra = []}}.
%% normalise_fields([RecDef]) -> [Field].
%% Normalise the field definitions to always have a default value. If
%% none has been given then use 'undefined'.
normalise_fields(Fs) ->
map(fun ({record_field,Lf,Field}) ->
{record_field,Lf,Field,{atom,Lf,undefined}};
({typed_record_field,{record_field,Lf,Field},_Type}) ->
{record_field,Lf,Field,{atom,Lf,undefined}};
({typed_record_field,Field,_Type}) ->
Field;
(F) -> F
end, Fs).
record_fields(RecordName , State )
find_field(FieldName , )
record_fields(R, St) -> dict:fetch(R, St#exprec.records).
find_field(F, [{record_field,_,{atom,_,F},Val} | _]) -> {ok,Val};
find_field(F, [_ | Fs]) -> find_field(F, Fs);
find_field(_, []) -> error.
%% field_names(RecFields) -> [Name].
%% Return a list of the field names structures.
field_names(Fs) ->
map(fun ({record_field,_,Field,_Val}) -> Field end, Fs).
index_expr(Line , FieldExpr , Name , ) - > IndexExpr .
%% Return an expression which evaluates to the index of a
%% field. Currently only handle the case where the field is an
%% atom. This expansion must be passed through expr again.
index_expr(Line, {atom,_,F}, _Name, Fs) ->
{integer,Line,index_expr(F, Fs, 2)}.
index_expr(F, [{record_field,_,{atom,_,F},_} | _], I) -> I;
index_expr(F, [_ | Fs], I) -> index_expr(F, Fs, I+1).
get_record_field(Line , RecExpr , FieldExpr , Name , St ) - > { Expr , St ' } .
%% Return an expression which verifies that the type of record
%% is correct and then returns the value of the field.
%% This expansion must be passed through expr again.
get_record_field(Line, R, Index, Name, St) ->
case strict_record_tests(St#exprec.compile) of
false ->
sloppy_get_record_field(Line, R, Index, Name, St);
true ->
strict_get_record_field(Line, R, Index, Name, St)
end.
strict_get_record_field(Line, R, {atom,_,F}=Index, Name, St0) ->
case is_in_guard() of
false -> %Body context.
{Var,St} = new_var(Line, St0),
Fs = record_fields(Name, St),
I = index_expr(F, Fs, 2),
P = record_pattern(2, I, Var, length(Fs)+1, Line, [{atom,Line,Name}]),
NLine = neg_line(Line),
E = {'case',NLine,R,
[{clause,NLine,[{tuple,NLine,P}],[],[Var]},
{clause,NLine,[{var,NLine,'_'}],[],
[{call,NLine,{remote,NLine,
{atom,NLine,erlang},
{atom,NLine,error}},
[{tuple,NLine,[{atom,NLine,badrecord},{atom,NLine,Name}]}]}]}]},
expr(E, St);
true -> %In a guard.
Fs = record_fields(Name, St0),
I = index_expr(Line, Index, Name, Fs),
{ExpR,St1} = expr(R, St0),
%% Just to make comparison simple:
ExpRp = erl_lint:modify_line(ExpR, fun(_L) -> 0 end),
RA = {{Name,ExpRp},Line,ExpR,length(Fs)+1},
St2 = St1#exprec{strict_ra = [RA | St1#exprec.strict_ra]},
{{call,Line,
{remote,Line,{atom,Line,erlang},{atom,Line,element}},
[I,ExpR]},St2}
end.
record_pattern(I, I, Var, Sz, Line, Acc) ->
record_pattern(I+1, I, Var, Sz, Line, [Var | Acc]);
record_pattern(Cur, I, Var, Sz, Line, Acc) when Cur =< Sz ->
record_pattern(Cur+1, I, Var, Sz, Line, [{var,Line,'_'} | Acc]);
record_pattern(_, _, _, _, _, Acc) -> reverse(Acc).
sloppy_get_record_field(Line, R, Index, Name, St) ->
Fs = record_fields(Name, St),
I = index_expr(Line, Index, Name, Fs),
expr({call,Line,
{remote,Line,{atom,Line,erlang},{atom,Line,element}},
[I,R]}, St).
strict_record_tests([strict_record_tests | _]) -> true;
strict_record_tests([no_strict_record_tests | _]) -> false;
strict_record_tests([_ | Os]) -> strict_record_tests(Os);
strict_record_tests([]) -> true. %Default.
strict_record_updates([strict_record_updates | _]) -> true;
strict_record_updates([no_strict_record_updates | _]) -> false;
strict_record_updates([_ | Os]) -> strict_record_updates(Os);
strict_record_updates([]) -> false. %Default.
%% pattern_fields([RecDefField], [Match]) -> [Pattern].
%% Build a list of match patterns for the record tuple elements.
%% This expansion must be passed through pattern again. N.B. We are
%% scanning the record definition field list!
pattern_fields(Fs, Ms) ->
Wildcard = record_wildcard_init(Ms),
map(fun ({record_field,L,{atom,_,F},_}) ->
case find_field(F, Ms) of
{ok,Match} -> Match;
error when Wildcard =:= none -> {var,L,'_'};
error -> Wildcard
end
end, Fs).
%% record_inits([RecDefField], [Init]) -> [InitExpr].
%% Build a list of initialisation expressions for the record tuple
%% elements. This expansion must be passed through expr
%% again. N.B. We are scanning the record definition field list!
record_inits(Fs, Is) ->
WildcardInit = record_wildcard_init(Is),
map(fun ({record_field,_,{atom,_,F},D}) ->
case find_field(F, Is) of
{ok,Init} -> Init;
error when WildcardInit =:= none -> D;
error -> WildcardInit
end
end, Fs).
record_wildcard_init([{record_field,_,{var,_,'_'},D} | _]) -> D;
record_wildcard_init([_ | Is]) -> record_wildcard_init(Is);
record_wildcard_init([]) -> none.
record_update(Record , RecordName , [ RecDefField ] , [ Update ] , State ) - >
{ Expr , State ' }
%% Build an expression to update fields in a record returning a new
%% record. Try to be smart and optimise this. This expansion must be
%% passed through expr again.
record_update(R, Name, Fs, Us0, St0) ->
Line = element(2, R),
{Pre,Us,St1} = record_exprs(Us0, St0),
Nf = length(Fs), %# of record fields
Nu = length(Us), %# of update fields
Nc = Nf - Nu, %# of copy fields
%% We need a new variable for the record expression
%% to guarantee that it is only evaluated once.
{Var,St2} = new_var(Line, St1),
StrictUpdates = strict_record_updates(St2#exprec.compile),
%% Try to be intelligent about which method of updating record to use.
{Update,St} =
if
Nu =:= 0 ->
record_match(Var, Name, Line, Fs, Us, St2);
Nu =< Nc, not StrictUpdates -> %Few fields updated
{record_setel(Var, Name, Fs, Us), St2};
true -> %The wide area inbetween
record_match(Var, Name, element(2, hd(Us)), Fs, Us, St2)
end,
{{block,Line,Pre ++ [{match,Line,Var,R},Update]},St}.
record_match(Record , RecordName , [ RecDefField ] , [ Update ] , State )
%% Build a 'case' expression to modify record fields.
record_match(R, Name, Lr, Fs, Us, St0) ->
{Ps,News,St1} = record_upd_fs(Fs, Us, St0),
NLr = neg_line(Lr),
{{'case',Lr,R,
[{clause,Lr,[{tuple,Lr,[{atom,Lr,Name} | Ps]}],[],
[{tuple,Lr,[{atom,Lr,Name} | News]}]},
{clause,NLr,[{var,NLr,'_'}],[],
[call_error(NLr, {tuple,NLr,[{atom,NLr,badrecord},{atom,NLr,Name}]})]}
]},
St1}.
record_upd_fs([{record_field,Lf,{atom,_La,F},_Val} | Fs], Us, St0) ->
{P,St1} = new_var(Lf, St0),
{Ps,News,St2} = record_upd_fs(Fs, Us, St1),
case find_field(F, Us) of
{ok,New} -> {[P | Ps],[New | News],St2};
error -> {[P | Ps],[P | News],St2}
end;
record_upd_fs([], _, St) -> {[],[],St}.
record_setel(Record , RecordName , [ RecDefField ] , [ Update ] )
%% Build a nested chain of setelement calls to build the
%% updated record tuple.
record_setel(R, Name, Fs, Us0) ->
Us1 = foldl(fun ({record_field,Lf,Field,Val}, Acc) ->
{integer,_,FieldIndex} = I = index_expr(Lf, Field, Name, Fs),
[{FieldIndex,{I,Lf,Val}} | Acc]
end, [], Us0),
Us2 = sort(Us1),
Us = [T || {_,T} <- Us2],
Lr = element(2, hd(Us)),
Wildcards = duplicate(length(Fs), {var,Lr,'_'}),
NLr = neg_line(Lr),
{'case',Lr,R,
[{clause,Lr,[{tuple,Lr,[{atom,Lr,Name} | Wildcards]}],[],
[foldr(fun ({I,Lf,Val}, Acc) ->
{call,Lf,{remote,Lf,{atom,Lf,erlang},
{atom,Lf,setelement}},[I,Acc,Val]} end,
R, Us)]},
{clause,NLr,[{var,NLr,'_'}],[],
[call_error(NLr, {tuple,NLr,[{atom,NLr,badrecord},{atom,NLr,Name}]})]}]}.
%% Expand a call to record_info/2. We have checked that it is not
%% shadowed by an import.
record_info_call(Line, [{atom,_Li,Info},{atom,_Ln,Name}], St) ->
case Info of
size ->
{{integer,Line,1+length(record_fields(Name, St))},St};
fields ->
{make_list(field_names(record_fields(Name, St)), Line),St}
end.
%% Break out expressions from an record update list and bind to new
%% variables. The idea is that we will evaluate all update expressions
%% before starting to update the record.
record_exprs(Us, St) ->
record_exprs(Us, St, [], []).
record_exprs([{record_field,Lf,{atom,_La,_F}=Name,Val}=Field0 | Us], St0, Pre, Fs) ->
case is_simple_val(Val) of
true ->
record_exprs(Us, St0, Pre, [Field0 | Fs]);
false ->
{Var,St} = new_var(Lf, St0),
Bind = {match,Lf,Var,Val},
Field = {record_field,Lf,Name,Var},
record_exprs(Us, St, [Bind | Pre], [Field | Fs])
end;
record_exprs([], St, Pre, Fs) ->
{reverse(Pre),Fs,St}.
is_simple_val({var,_,_}) -> true;
is_simple_val(Val) ->
try
erl_parse:normalise(Val),
true
catch error:_ ->
false
end.
pattern_bin([Element ] , State ) - > { [ Element],[Variable],[UsedVar],State } .
pattern_bin(Es0, St) ->
foldr(fun (E, Acc) -> pattern_element(E, Acc) end, {[],St}, Es0).
pattern_element({bin_element,Line,Expr0,Size,Type}, {Es,St0}) ->
{Expr,St1} = pattern(Expr0, St0),
{[{bin_element,Line,Expr,Size,Type} | Es],St1}.
expr_bin([Element ] , State ) - > { [ Element],State } .
expr_bin(Es0, St) ->
foldr(fun (E, Acc) -> bin_element(E, Acc) end, {[],St}, Es0).
bin_element({bin_element,Line,Expr,Size,Type}, {Es,St0}) ->
{Expr1,St1} = expr(Expr, St0),
{Size1,St2} = if Size =:= default -> {default,St1};
true -> expr(Size, St1)
end,
{[{bin_element,Line,Expr1,Size1,Type} | Es],St2}.
new_var(L, St0) ->
{New,St1} = new_var_name(St0),
{{var,L,New},St1}.
new_var_name(St) ->
C = St#exprec.vcount,
{list_to_atom("rec" ++ integer_to_list(C)),St#exprec{vcount=C+1}}.
make_list(Ts, Line) ->
foldr(fun (H, T) -> {cons,Line,H,T} end, {nil,Line}, Ts).
call_error(L, R) ->
{call,L,{remote,L,{atom,L,erlang},{atom,L,error}},[R]}.
import({Mod,Fs}, St) ->
St#exprec{imports=add_imports(Mod, Fs, St#exprec.imports)};
import(_Mod0, St) ->
St.
add_imports(Mod, [F | Fs], Is) ->
add_imports(Mod, Fs, orddict:store(F, Mod, Is));
add_imports(_, [], Is) -> Is.
imported(F, A, St) ->
case orddict:find({F,A}, St#exprec.imports) of
{ok,Mod} -> {yes,Mod};
error -> no
end.
%%%
Replace is_record/3 in guards with matching if possible .
%%%
optimize_is_record(H0, G0, #exprec{compile=Opts}) ->
case opt_rec_vars(G0) of
[] ->
{H0,G0};
Rs0 ->
case lists:member(no_is_record_optimization, Opts) of
true ->
{H0,G0};
false ->
{H,Rs} = opt_pattern_list(H0, Rs0),
G = opt_remove(G0, Rs),
{H,G}
end
end.
%% opt_rec_vars(Guards) -> Vars.
%% Search through the guard expression, looking for
variables referenced in those is_record/3 calls that
%% will fail the entire guard if they evaluate to 'false'
%%
%% In the following code
%%
%% f(X, Y, Z) when is_record(X, r1) andalso
%% (is_record(Y, r2) orelse is_record(Z, r3))
%%
%% the entire guard will be false if the record test for
%% X fails, and the clause can be rewritten to:
%%
%% f({r1,...}=X, Y, Z) when true andalso
%% (is_record(Y, r2) or is_record(Z, r3))
%%
opt_rec_vars([G|Gs]) ->
Rs = opt_rec_vars_1(G, orddict:new()),
opt_rec_vars(Gs, Rs);
opt_rec_vars([]) -> orddict:new().
opt_rec_vars([G|Gs], Rs0) ->
Rs1 = opt_rec_vars_1(G, orddict:new()),
Rs = ordsets:intersection(Rs0, Rs1),
opt_rec_vars(Gs, Rs);
opt_rec_vars([], Rs) -> Rs.
opt_rec_vars_1([T|Ts], Rs0) ->
Rs = opt_rec_vars_2(T, Rs0),
opt_rec_vars_1(Ts, Rs);
opt_rec_vars_1([], Rs) -> Rs.
opt_rec_vars_2({op,_,'and',A1,A2}, Rs) ->
opt_rec_vars_1([A1,A2], Rs);
opt_rec_vars_2({op,_,'andalso',A1,A2}, Rs) ->
opt_rec_vars_1([A1,A2], Rs);
opt_rec_vars_2({op,_,'orelse',Arg,{atom,_,fail}}, Rs) ->
Since the second argument guarantees failure ,
it is safe to inspect the first argument .
opt_rec_vars_2(Arg, Rs);
opt_rec_vars_2({call,_,{remote,_,{atom,_,erlang},{atom,_,is_record}},
[{var,_,V},{atom,_,Tag},{integer,_,Sz}]}, Rs) ->
orddict:store(V, {Tag,Sz}, Rs);
opt_rec_vars_2({call,_,{atom,_,is_record},
[{var,_,V},{atom,_,Tag},{integer,_,Sz}]}, Rs) ->
orddict:store(V, {Tag,Sz}, Rs);
opt_rec_vars_2(_, Rs) -> Rs.
opt_pattern_list(Ps, Rs) ->
opt_pattern_list(Ps, Rs, []).
opt_pattern_list([P0|Ps], Rs0, Acc) ->
{P,Rs} = opt_pattern(P0, Rs0),
opt_pattern_list(Ps, Rs, [P|Acc]);
opt_pattern_list([], Rs, Acc) ->
{reverse(Acc),Rs}.
opt_pattern({var,_,V}=Var, Rs0) ->
case orddict:find(V, Rs0) of
{ok,{Tag,Sz}} ->
Rs = orddict:store(V, {remove,Tag,Sz}, Rs0),
{opt_var(Var, Tag, Sz),Rs};
_ ->
{Var,Rs0}
end;
opt_pattern({cons,Line,H0,T0}, Rs0) ->
{H,Rs1} = opt_pattern(H0, Rs0),
{T,Rs} = opt_pattern(T0, Rs1),
{{cons,Line,H,T},Rs};
opt_pattern({tuple,Line,Es0}, Rs0) ->
{Es,Rs} = opt_pattern_list(Es0, Rs0),
{{tuple,Line,Es},Rs};
opt_pattern({match,Line,Pa0,Pb0}, Rs0) ->
{Pa,Rs1} = opt_pattern(Pa0, Rs0),
{Pb,Rs} = opt_pattern(Pb0, Rs1),
{{match,Line,Pa,Pb},Rs};
opt_pattern(P, Rs) -> {P,Rs}.
opt_var({var,Line,_}=Var, Tag, Sz) ->
Rp = record_pattern(2, -1, ignore, Sz, Line, [{atom,Line,Tag}]),
{match,Line,{tuple,Line,Rp},Var}.
opt_remove(Gs, Rs) ->
[opt_remove_1(G, Rs) || G <- Gs].
opt_remove_1(Ts, Rs) ->
[opt_remove_2(T, Rs) || T <- Ts].
opt_remove_2({op,L,'and'=Op,A1,A2}, Rs) ->
{op,L,Op,opt_remove_2(A1, Rs),opt_remove_2(A2, Rs)};
opt_remove_2({op,L,'andalso'=Op,A1,A2}, Rs) ->
{op,L,Op,opt_remove_2(A1, Rs),opt_remove_2(A2, Rs)};
opt_remove_2({op,L,'orelse',A1,A2}, Rs) ->
{op,L,'orelse',opt_remove_2(A1, Rs),A2};
opt_remove_2({call,Line,{remote,_,{atom,_,erlang},{atom,_,is_record}},
[{var,_,V},{atom,_,Tag},{integer,_,Sz}]}=A, Rs) ->
case orddict:find(V, Rs) of
{ok,{remove,Tag,Sz}} ->
{atom,Line,true};
_ ->
A
end;
opt_remove_2({call,Line,{atom,_,is_record},
[{var,_,V},{atom,_,Tag},{integer,_,Sz}]}=A, Rs) ->
case orddict:find(V, Rs) of
{ok,{remove,Tag,Sz}} ->
{atom,Line,true};
_ ->
A
end;
opt_remove_2(A, _) -> A.
neg_line(L) ->
erl_parse:set_line(L, fun(Line) -> -abs(Line) end).
| null | https://raw.githubusercontent.com/wireless-net/erlang-nommu/79f32f81418e022d8ad8e0e447deaea407289926/lib/stdlib/src/erl_expand_records.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
Purpose : Expand records into tuples.
N.B. Although structs (tagged tuples) are not yet allowed in the
that handles them.
Compile flags
Variable counter
Imports
Record definitions
Typed records
Are there -spec or -type in the module
strict record accesses
successfully accessed records
erl_lint without errors.
Check if we need to keep the record information for usage in types.
created by strict_record_access/2 calls any of the old guard tests.
In case that later optimization passes have been turned off.
first and bind the value to a new variable.
evaluate to a tuple properly.
expr({struct,Line,Tag,Es0}, Vs, St0) ->
Make it look nice (?) when compiled with the 'E' flag
('and'/2 is left recursive).
Wrap the 'orelse' expression in an dummy 'and true' to make
sure that the entire guard fails if the 'orelse'
expression returns 'fail'. ('orelse' used to verify
that its right operand was a boolean, but that is no
longer the case.)
Allow record/2 and expand out as guard test.
normalise_fields([RecDef]) -> [Field].
Normalise the field definitions to always have a default value. If
none has been given then use 'undefined'.
field_names(RecFields) -> [Name].
Return a list of the field names structures.
Return an expression which evaluates to the index of a
field. Currently only handle the case where the field is an
atom. This expansion must be passed through expr again.
Return an expression which verifies that the type of record
is correct and then returns the value of the field.
This expansion must be passed through expr again.
Body context.
In a guard.
Just to make comparison simple:
Default.
Default.
pattern_fields([RecDefField], [Match]) -> [Pattern].
Build a list of match patterns for the record tuple elements.
This expansion must be passed through pattern again. N.B. We are
scanning the record definition field list!
record_inits([RecDefField], [Init]) -> [InitExpr].
Build a list of initialisation expressions for the record tuple
elements. This expansion must be passed through expr
again. N.B. We are scanning the record definition field list!
Build an expression to update fields in a record returning a new
record. Try to be smart and optimise this. This expansion must be
passed through expr again.
# of record fields
# of update fields
# of copy fields
We need a new variable for the record expression
to guarantee that it is only evaluated once.
Try to be intelligent about which method of updating record to use.
Few fields updated
The wide area inbetween
Build a 'case' expression to modify record fields.
Build a nested chain of setelement calls to build the
updated record tuple.
Expand a call to record_info/2. We have checked that it is not
shadowed by an import.
Break out expressions from an record update list and bind to new
variables. The idea is that we will evaluate all update expressions
before starting to update the record.
opt_rec_vars(Guards) -> Vars.
Search through the guard expression, looking for
will fail the entire guard if they evaluate to 'false'
In the following code
f(X, Y, Z) when is_record(X, r1) andalso
(is_record(Y, r2) orelse is_record(Z, r3))
the entire guard will be false if the record test for
X fails, and the clause can be rewritten to:
f({r1,...}=X, Y, Z) when true andalso
(is_record(Y, r2) or is_record(Z, r3))
| Copyright Ericsson AB 2005 - 2012 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
language there is code included in pattern/2 and expr/3 ( commented out )
-module(erl_expand_records).
-export([module/2]).
-import(lists, [map/2,foldl/3,foldr/3,sort/1,reverse/1,duplicate/2]).
}).
-spec(module(AbsForms, CompileOptions) -> AbsForms when
AbsForms :: [erl_parse:abstract_form()],
CompileOptions :: [compile:option()]).
Is is assumed that Fs is a valid list of forms . It should pass
module(Fs0, Opts0) ->
Opts = compiler_options(Fs0) ++ Opts0,
TRecs = typed_records(Fs0),
UsesTypes = uses_types(Fs0),
St0 = #exprec{compile = Opts, trecords = TRecs, uses_types = UsesTypes},
{Fs,_St} = forms(Fs0, St0),
Fs.
compiler_options(Forms) ->
lists:flatten([C || {attribute,_,compile,C} <- Forms]).
typed_records(Fs) ->
typed_records(Fs, sets:new()).
typed_records([{attribute,_L,type,{{record, Name},_Defs,[]}} | Fs], Trecs) ->
typed_records(Fs, sets:add_element(Name, Trecs));
typed_records([_|Fs], Trecs) ->
typed_records(Fs, Trecs);
typed_records([], Trecs) ->
Trecs.
uses_types([{attribute,_L,spec,_}|_]) -> true;
uses_types([{attribute,_L,type,_}|_]) -> true;
uses_types([{attribute,_L,opaque,_}|_]) -> true;
uses_types([_|Fs]) -> uses_types(Fs);
uses_types([]) -> false.
forms([{attribute,L,record,{Name,Defs}} | Fs], St0) ->
NDefs = normalise_fields(Defs),
St = St0#exprec{records=dict:store(Name, NDefs, St0#exprec.records)},
{Fs1, St1} = forms(Fs, St),
case St#exprec.uses_types of
true ->
case sets:is_element(Name, St#exprec.trecords) of
true -> {Fs1, St1};
false -> {[{attribute,L,type,{{record,Name},Defs,[]}}|Fs1], St1}
end;
false ->
{Fs1, St1}
end;
forms([{attribute,L,import,Is} | Fs0], St0) ->
St1 = import(Is, St0),
{Fs,St2} = forms(Fs0, St1),
{[{attribute,L,import,Is} | Fs], St2};
forms([{function,L,N,A,Cs0} | Fs0], St0) ->
{Cs,St1} = clauses(Cs0, St0),
{Fs,St2} = forms(Fs0, St1),
{[{function,L,N,A,Cs} | Fs],St2};
forms([F | Fs0], St0) ->
{Fs,St} = forms(Fs0, St0),
{[F | Fs], St};
forms([], St) -> {[],St}.
clauses([{clause,Line,H0,G0,B0} | Cs0], St0) ->
{H1,St1} = head(H0, St0),
{G1,St2} = guard(G0, St1),
{H,G} = optimize_is_record(H1, G1, St2),
{B,St3} = exprs(B0, St2),
{Cs,St4} = clauses(Cs0, St3),
{[{clause,Line,H,G,B} | Cs],St4};
clauses([], St) -> {[],St}.
head(As, St) -> pattern_list(As, St).
pattern({var,_,'_'}=Var, St) ->
{Var,St};
pattern({var,_,_}=Var, St) ->
{Var,St};
pattern({char,_,_}=Char, St) ->
{Char,St};
pattern({integer,_,_}=Int, St) ->
{Int,St};
pattern({float,_,_}=Float, St) ->
{Float,St};
pattern({atom,_,_}=Atom, St) ->
{Atom,St};
pattern({string,_,_}=String, St) ->
{String,St};
pattern({nil,_}=Nil, St) ->
{Nil,St};
pattern({cons,Line,H,T}, St0) ->
{TH,St1} = pattern(H, St0),
{TT,St2} = pattern(T, St1),
{{cons,Line,TH,TT},St2};
pattern({tuple,Line,Ps}, St0) ->
{TPs,St1} = pattern_list(Ps, St0),
{{tuple,Line,TPs},St1};
pattern({map,Line,Ps}, St0) ->
{TPs,St1} = pattern_list(Ps, St0),
{{map,Line,TPs},St1};
pattern({map_field_exact,Line,K0,V0}, St0) ->
{K,St1} = expr(K0, St0),
{V,St2} = pattern(V0, St1),
{{map_field_exact,Line,K,V},St2};
pattern({struct , Line , Tag , Ps } , St0 ) - >
, St1 } = pattern_list(Ps , St0 ) ,
{ { struct , Line , Tag , TPs},TPsvs , St1 } ;
pattern({record_index,Line,Name,Field}, St) ->
{index_expr(Line, Field, Name, record_fields(Name, St)),St};
pattern({record,Line,Name,Pfs}, St0) ->
Fs = record_fields(Name, St0),
{TMs,St1} = pattern_list(pattern_fields(Fs, Pfs), St0),
{{tuple,Line,[{atom,Line,Name} | TMs]},St1};
pattern({bin,Line,Es0}, St0) ->
{Es1,St1} = pattern_bin(Es0, St0),
{{bin,Line,Es1},St1};
pattern({match,Line,Pat1, Pat2}, St0) ->
{TH,St1} = pattern(Pat2, St0),
{TT,St2} = pattern(Pat1, St1),
{{match,Line,TT,TH},St2};
pattern({op,Line,Op,A0}, St0) ->
{A,St1} = pattern(A0, St0),
{{op,Line,Op,A},St1};
pattern({op,Line,Op,L0,R0}, St0) ->
{L,St1} = pattern(L0, St0),
{R,St2} = pattern(R0, St1),
{{op,Line,Op,L,R},St2}.
pattern_list([P0 | Ps0], St0) ->
{P,St1} = pattern(P0, St0),
{Ps,St2} = pattern_list(Ps0, St1),
{[P | Ps],St2};
pattern_list([], St) -> {[],St}.
guard([G0 | Gs0], St0) ->
{G,St1} = guard_tests(G0, St0),
{Gs,St2} = guard(Gs0, St1),
{[G | Gs],St2};
guard([], St) -> {[],St}.
guard_tests(Gts0, St0) ->
{Gts1,St1} = guard_tests1(Gts0, St0),
{Gts1,St1#exprec{checked_ra = []}}.
guard_tests1([Gt0 | Gts0], St0) ->
{Gt1,St1} = guard_test(Gt0, St0),
{Gts1,St2} = guard_tests1(Gts0, St1),
{[Gt1 | Gts1],St2};
guard_tests1([], St) -> {[],St}.
guard_test(G0, St0) ->
in_guard(fun() ->
{G1,St1} = guard_test1(G0, St0),
strict_record_access(G1, St1)
end).
Normalising guard tests ensures that none of the Boolean operands
guard_test1({call,Line,{atom,Lt,Tname},As}, St) ->
Test = {atom,Lt,normalise_test(Tname, length(As))},
expr({call,Line,Test,As}, St);
guard_test1(Test, St) ->
expr(Test, St).
normalise_test(atom, 1) -> is_atom;
normalise_test(binary, 1) -> is_binary;
normalise_test(float, 1) -> is_float;
normalise_test(function, 1) -> is_function;
normalise_test(integer, 1) -> is_integer;
normalise_test(list, 1) -> is_list;
normalise_test(number, 1) -> is_number;
normalise_test(pid, 1) -> is_pid;
normalise_test(port, 1) -> is_port;
normalise_test(record, 2) -> is_record;
normalise_test(reference, 1) -> is_reference;
normalise_test(tuple, 1) -> is_tuple;
normalise_test(Name, _) -> Name.
is_in_guard() ->
get(erl_expand_records_in_guard) =/= undefined.
in_guard(F) ->
undefined = put(erl_expand_records_in_guard, true),
Res = F(),
true = erase(erl_expand_records_in_guard),
Res.
record_test(Line , Term , Name , Vs , St ) - >
Generate code for is_record/1 .
record_test(Line, Term, Name, St) ->
case is_in_guard() of
false ->
record_test_in_body(Line, Term, Name, St);
true ->
record_test_in_guard(Line, Term, Name, St)
end.
record_test_in_guard(Line, Term, Name, St) ->
case not_a_tuple(Term) of
true ->
expr({atom,Line,false}, St);
false ->
Fs = record_fields(Name, St),
NLine = neg_line(Line),
expr({call,NLine,{remote,NLine,{atom,NLine,erlang},{atom,NLine,is_record}},
[Term,{atom,Line,Name},{integer,Line,length(Fs)+1}]},
St)
end.
not_a_tuple({atom,_,_}) -> true;
not_a_tuple({integer,_,_}) -> true;
not_a_tuple({float,_,_}) -> true;
not_a_tuple({nil,_}) -> true;
not_a_tuple({cons,_,_,_}) -> true;
not_a_tuple({char,_,_}) -> true;
not_a_tuple({string,_,_}) -> true;
not_a_tuple({record_index,_,_,_}) -> true;
not_a_tuple({bin,_,_}) -> true;
not_a_tuple({op,_,_,_}) -> true;
not_a_tuple({op,_,_,_,_}) -> true;
not_a_tuple(_) -> false.
record_test_in_body(Line, Expr, Name, St0) ->
As may have side effects , we must evaluate it
We must use also handle the case that does not
Fs = record_fields(Name, St0),
{Var,St} = new_var(Line, St0),
NLine = neg_line(Line),
expr({block,Line,
[{match,Line,Var,Expr},
{call,NLine,{remote,NLine,{atom,NLine,erlang},
{atom,NLine,is_record}},
[Var,{atom,Line,Name},{integer,Line,length(Fs)+1}]}]}, St).
exprs([E0 | Es0], St0) ->
{E,St1} = expr(E0, St0),
{Es,St2} = exprs(Es0, St1),
{[E | Es],St2};
exprs([], St) -> {[],St}.
expr({var,_,_}=Var, St) ->
{Var,St};
expr({char,_,_}=Char, St) ->
{Char,St};
expr({integer,_,_}=Int, St) ->
{Int,St};
expr({float,_,_}=Float, St) ->
{Float,St};
expr({atom,_,_}=Atom, St) ->
{Atom,St};
expr({string,_,_}=String, St) ->
{String,St};
expr({nil,_}=Nil, St) ->
{Nil,St};
expr({cons,Line,H0,T0}, St0) ->
{H,St1} = expr(H0, St0),
{T,St2} = expr(T0, St1),
{{cons,Line,H,T},St2};
expr({lc,Line,E0,Qs0}, St0) ->
{Qs1,St1} = lc_tq(Line, Qs0, St0),
{E1,St2} = expr(E0, St1),
{{lc,Line,E1,Qs1},St2};
expr({bc,Line,E0,Qs0}, St0) ->
{Qs1,St1} = lc_tq(Line, Qs0, St0),
{E1,St2} = expr(E0, St1),
{{bc,Line,E1,Qs1},St2};
expr({tuple,Line,Es0}, St0) ->
{Es1,St1} = expr_list(Es0, St0),
{{tuple,Line,Es1},St1};
expr({map,Line,Es0}, St0) ->
{Es1,St1} = expr_list(Es0, St0),
{{map,Line,Es1},St1};
expr({map,Line,Arg0,Es0}, St0) ->
{Arg1,St1} = expr(Arg0, St0),
{Es1,St2} = expr_list(Es0, St1),
{{map,Line,Arg1,Es1},St2};
expr({map_field_assoc,Line,K0,V0}, St0) ->
{K,St1} = expr(K0, St0),
{V,St2} = expr(V0, St1),
{{map_field_assoc,Line,K,V},St2};
expr({map_field_exact,Line,K0,V0}, St0) ->
{K,St1} = expr(K0, St0),
{V,St2} = expr(V0, St1),
{{map_field_exact,Line,K,V},St2};
{ Es1,Esvs , Esus , St1 } = expr_list(Es0 , Vs , St0 ) ,
{ { struct , Line , Tag , Es1},Esvs , Esus , St1 } ;
expr({record_index,Line,Name,F}, St) ->
I = index_expr(Line, F, Name, record_fields(Name, St)),
expr(I, St);
expr({record,Line,Name,Is}, St) ->
expr({tuple,Line,[{atom,Line,Name} |
record_inits(record_fields(Name, St), Is)]},
St);
expr({record_field,Line,R,Name,F}, St) ->
get_record_field(Line, R, F, Name, St);
expr({record,_,R,Name,Us}, St0) ->
{Ue,St1} = record_update(R, Name, record_fields(Name, St0), Us, St0),
expr(Ue, St1);
expr({bin,Line,Es0}, St0) ->
{Es1,St1} = expr_bin(Es0, St0),
{{bin,Line,Es1},St1};
expr({block,Line,Es0}, St0) ->
{Es,St1} = exprs(Es0, St0),
{{block,Line,Es},St1};
expr({'if',Line,Cs0}, St0) ->
{Cs,St1} = clauses(Cs0, St0),
{{'if',Line,Cs},St1};
expr({'case',Line,E0,Cs0}, St0) ->
{E,St1} = expr(E0, St0),
{Cs,St2} = clauses(Cs0, St1),
{{'case',Line,E,Cs},St2};
expr({'receive',Line,Cs0}, St0) ->
{Cs,St1} = clauses(Cs0, St0),
{{'receive',Line,Cs},St1};
expr({'receive',Line,Cs0,To0,ToEs0}, St0) ->
{To,St1} = expr(To0, St0),
{ToEs,St2} = exprs(ToEs0, St1),
{Cs,St3} = clauses(Cs0, St2),
{{'receive',Line,Cs,To,ToEs},St3};
expr({'fun',_,{function,_F,_A}}=Fun, St) ->
{Fun,St};
expr({'fun',_,{function,_M,_F,_A}}=Fun, St) ->
{Fun,St};
expr({'fun',Line,{clauses,Cs0}}, St0) ->
{Cs,St1} = clauses(Cs0, St0),
{{'fun',Line,{clauses,Cs}},St1};
expr({named_fun,Line,Name,Cs0}, St0) ->
{Cs,St1} = clauses(Cs0, St0),
{{named_fun,Line,Name,Cs},St1};
expr({call,Line,{atom,_,is_record},[A,{atom,_,Name}]}, St) ->
record_test(Line, A, Name, St);
expr({call,Line,{remote,_,{atom,_,erlang},{atom,_,is_record}},
[A,{atom,_,Name}]}, St) ->
record_test(Line, A, Name, St);
expr({call,Line,{tuple,_,[{atom,_,erlang},{atom,_,is_record}]},
[A,{atom,_,Name}]}, St) ->
record_test(Line, A, Name, St);
expr({call,Line,{atom,_La,N}=Atom,As0}, St0) ->
{As,St1} = expr_list(As0, St0),
Ar = length(As),
case erl_internal:bif(N, Ar) of
true ->
{{call,Line,Atom,As},St1};
false ->
case imported(N, Ar, St1) of
{yes,_Mod} ->
{{call,Line,Atom,As},St1};
no ->
case {N,Ar} of
{record_info,2} ->
record_info_call(Line, As, St1);
_ ->
{{call,Line,Atom,As},St1}
end
end
end;
expr({call,Line,{remote,Lr,M,F},As0}, St0) ->
{[M1,F1 | As1],St1} = expr_list([M,F | As0], St0),
{{call,Line,{remote,Lr,M1,F1},As1},St1};
expr({call,Line,F,As0}, St0) ->
{[Fun1 | As1],St1} = expr_list([F | As0], St0),
{{call,Line,Fun1,As1},St1};
expr({'try',Line,Es0,Scs0,Ccs0,As0}, St0) ->
{Es1,St1} = exprs(Es0, St0),
{Scs1,St2} = clauses(Scs0, St1),
{Ccs1,St3} = clauses(Ccs0, St2),
{As1,St4} = exprs(As0, St3),
{{'try',Line,Es1,Scs1,Ccs1,As1},St4};
expr({'catch',Line,E0}, St0) ->
{E,St1} = expr(E0, St0),
{{'catch',Line,E},St1};
expr({match,Line,P0,E0}, St0) ->
{E,St1} = expr(E0, St0),
{P,St2} = pattern(P0, St1),
{{match,Line,P,E},St2};
expr({op,Line,'not',A0}, St0) ->
{A,St1} = bool_operand(A0, St0),
{{op,Line,'not',A},St1};
expr({op,Line,Op,A0}, St0) ->
{A,St1} = expr(A0, St0),
{{op,Line,Op,A},St1};
expr({op,Line,Op,L0,R0}, St0) when Op =:= 'and';
Op =:= 'or' ->
{L,St1} = bool_operand(L0, St0),
{R,St2} = bool_operand(R0, St1),
{{op,Line,Op,L,R},St2};
expr({op,Line,Op,L0,R0}, St0) when Op =:= 'andalso';
Op =:= 'orelse' ->
{L,St1} = bool_operand(L0, St0),
{R,St2} = bool_operand(R0, St1),
{{op,Line,Op,L,R},St2#exprec{checked_ra = St1#exprec.checked_ra}};
expr({op,Line,Op,L0,R0}, St0) ->
{L,St1} = expr(L0, St0),
{R,St2} = expr(R0, St1),
{{op,Line,Op,L,R},St2}.
expr_list([E0 | Es0], St0) ->
{E,St1} = expr(E0, St0),
{Es,St2} = expr_list(Es0, St1),
{[E | Es],St2};
expr_list([], St) -> {[],St}.
bool_operand(E0, St0) ->
{E1,St1} = expr(E0, St0),
strict_record_access(E1, St1).
strict_record_access(E, #exprec{strict_ra = []} = St) ->
{E, St};
strict_record_access(E0, St0) ->
#exprec{strict_ra = StrictRA, checked_ra = CheckedRA} = St0,
{New,NC} = lists:foldl(fun ({Key,_L,_R,_Sz}=A, {L,C}) ->
case lists:keymember(Key, 1, C) of
true -> {L,C};
false -> {[A|L],[A|C]}
end
end, {[],CheckedRA}, StrictRA),
E1 = if New =:= [] -> E0; true -> conj(New, E0) end,
St1 = St0#exprec{strict_ra = [], checked_ra = NC},
expr(E1, St1).
conj([], _E) ->
empty;
conj([{{Name,_Rp},L,R,Sz} | AL], E) ->
NL = neg_line(L),
T1 = {op,NL,'orelse',
{call,NL,
{remote,NL,{atom,NL,erlang},{atom,NL,is_record}},
[R,{atom,NL,Name},{integer,NL,Sz}]},
{atom,NL,fail}},
T2 = case conj(AL, none) of
empty -> T1;
C -> {op,NL,'and',C,T1}
end,
case E of
none ->
case T2 of
{op,_,'and',_,_} ->
T2;
_ ->
{op,NL,'and',T2,{atom,NL,true}}
end;
_ ->
{op,NL,'and',T2,E}
end.
lc_tq(Line , Qualifiers , State ) - >
{ [ TransQual],State ' }
lc_tq(Line, [{generate,Lg,P0,G0} | Qs0], St0) ->
{G1,St1} = expr(G0, St0),
{P1,St2} = pattern(P0, St1),
{Qs1,St3} = lc_tq(Line, Qs0, St2),
{[{generate,Lg,P1,G1} | Qs1],St3};
lc_tq(Line, [{b_generate,Lg,P0,G0} | Qs0], St0) ->
{G1,St1} = expr(G0, St0),
{P1,St2} = pattern(P0, St1),
{Qs1,St3} = lc_tq(Line, Qs0, St2),
{[{b_generate,Lg,P1,G1} | Qs1],St3};
lc_tq(Line, [F0 | Qs0], St0) ->
case erl_lint:is_guard_test(F0) of
true ->
{F1,St1} = guard_test(F0, St0),
{Qs1,St2} = lc_tq(Line, Qs0, St1),
{[F1|Qs1],St2};
false ->
{F1,St1} = expr(F0, St0),
{Qs1,St2} = lc_tq(Line, Qs0, St1),
{[F1 | Qs1],St2}
end;
lc_tq(_Line, [], St0) ->
{[],St0#exprec{checked_ra = []}}.
normalise_fields(Fs) ->
map(fun ({record_field,Lf,Field}) ->
{record_field,Lf,Field,{atom,Lf,undefined}};
({typed_record_field,{record_field,Lf,Field},_Type}) ->
{record_field,Lf,Field,{atom,Lf,undefined}};
({typed_record_field,Field,_Type}) ->
Field;
(F) -> F
end, Fs).
record_fields(RecordName , State )
find_field(FieldName , )
record_fields(R, St) -> dict:fetch(R, St#exprec.records).
find_field(F, [{record_field,_,{atom,_,F},Val} | _]) -> {ok,Val};
find_field(F, [_ | Fs]) -> find_field(F, Fs);
find_field(_, []) -> error.
field_names(Fs) ->
map(fun ({record_field,_,Field,_Val}) -> Field end, Fs).
index_expr(Line , FieldExpr , Name , ) - > IndexExpr .
index_expr(Line, {atom,_,F}, _Name, Fs) ->
{integer,Line,index_expr(F, Fs, 2)}.
index_expr(F, [{record_field,_,{atom,_,F},_} | _], I) -> I;
index_expr(F, [_ | Fs], I) -> index_expr(F, Fs, I+1).
get_record_field(Line , RecExpr , FieldExpr , Name , St ) - > { Expr , St ' } .
get_record_field(Line, R, Index, Name, St) ->
case strict_record_tests(St#exprec.compile) of
false ->
sloppy_get_record_field(Line, R, Index, Name, St);
true ->
strict_get_record_field(Line, R, Index, Name, St)
end.
strict_get_record_field(Line, R, {atom,_,F}=Index, Name, St0) ->
case is_in_guard() of
{Var,St} = new_var(Line, St0),
Fs = record_fields(Name, St),
I = index_expr(F, Fs, 2),
P = record_pattern(2, I, Var, length(Fs)+1, Line, [{atom,Line,Name}]),
NLine = neg_line(Line),
E = {'case',NLine,R,
[{clause,NLine,[{tuple,NLine,P}],[],[Var]},
{clause,NLine,[{var,NLine,'_'}],[],
[{call,NLine,{remote,NLine,
{atom,NLine,erlang},
{atom,NLine,error}},
[{tuple,NLine,[{atom,NLine,badrecord},{atom,NLine,Name}]}]}]}]},
expr(E, St);
Fs = record_fields(Name, St0),
I = index_expr(Line, Index, Name, Fs),
{ExpR,St1} = expr(R, St0),
ExpRp = erl_lint:modify_line(ExpR, fun(_L) -> 0 end),
RA = {{Name,ExpRp},Line,ExpR,length(Fs)+1},
St2 = St1#exprec{strict_ra = [RA | St1#exprec.strict_ra]},
{{call,Line,
{remote,Line,{atom,Line,erlang},{atom,Line,element}},
[I,ExpR]},St2}
end.
record_pattern(I, I, Var, Sz, Line, Acc) ->
record_pattern(I+1, I, Var, Sz, Line, [Var | Acc]);
record_pattern(Cur, I, Var, Sz, Line, Acc) when Cur =< Sz ->
record_pattern(Cur+1, I, Var, Sz, Line, [{var,Line,'_'} | Acc]);
record_pattern(_, _, _, _, _, Acc) -> reverse(Acc).
sloppy_get_record_field(Line, R, Index, Name, St) ->
Fs = record_fields(Name, St),
I = index_expr(Line, Index, Name, Fs),
expr({call,Line,
{remote,Line,{atom,Line,erlang},{atom,Line,element}},
[I,R]}, St).
strict_record_tests([strict_record_tests | _]) -> true;
strict_record_tests([no_strict_record_tests | _]) -> false;
strict_record_tests([_ | Os]) -> strict_record_tests(Os);
strict_record_updates([strict_record_updates | _]) -> true;
strict_record_updates([no_strict_record_updates | _]) -> false;
strict_record_updates([_ | Os]) -> strict_record_updates(Os);
pattern_fields(Fs, Ms) ->
Wildcard = record_wildcard_init(Ms),
map(fun ({record_field,L,{atom,_,F},_}) ->
case find_field(F, Ms) of
{ok,Match} -> Match;
error when Wildcard =:= none -> {var,L,'_'};
error -> Wildcard
end
end, Fs).
record_inits(Fs, Is) ->
WildcardInit = record_wildcard_init(Is),
map(fun ({record_field,_,{atom,_,F},D}) ->
case find_field(F, Is) of
{ok,Init} -> Init;
error when WildcardInit =:= none -> D;
error -> WildcardInit
end
end, Fs).
record_wildcard_init([{record_field,_,{var,_,'_'},D} | _]) -> D;
record_wildcard_init([_ | Is]) -> record_wildcard_init(Is);
record_wildcard_init([]) -> none.
record_update(Record , RecordName , [ RecDefField ] , [ Update ] , State ) - >
{ Expr , State ' }
record_update(R, Name, Fs, Us0, St0) ->
Line = element(2, R),
{Pre,Us,St1} = record_exprs(Us0, St0),
{Var,St2} = new_var(Line, St1),
StrictUpdates = strict_record_updates(St2#exprec.compile),
{Update,St} =
if
Nu =:= 0 ->
record_match(Var, Name, Line, Fs, Us, St2);
{record_setel(Var, Name, Fs, Us), St2};
record_match(Var, Name, element(2, hd(Us)), Fs, Us, St2)
end,
{{block,Line,Pre ++ [{match,Line,Var,R},Update]},St}.
record_match(Record , RecordName , [ RecDefField ] , [ Update ] , State )
record_match(R, Name, Lr, Fs, Us, St0) ->
{Ps,News,St1} = record_upd_fs(Fs, Us, St0),
NLr = neg_line(Lr),
{{'case',Lr,R,
[{clause,Lr,[{tuple,Lr,[{atom,Lr,Name} | Ps]}],[],
[{tuple,Lr,[{atom,Lr,Name} | News]}]},
{clause,NLr,[{var,NLr,'_'}],[],
[call_error(NLr, {tuple,NLr,[{atom,NLr,badrecord},{atom,NLr,Name}]})]}
]},
St1}.
record_upd_fs([{record_field,Lf,{atom,_La,F},_Val} | Fs], Us, St0) ->
{P,St1} = new_var(Lf, St0),
{Ps,News,St2} = record_upd_fs(Fs, Us, St1),
case find_field(F, Us) of
{ok,New} -> {[P | Ps],[New | News],St2};
error -> {[P | Ps],[P | News],St2}
end;
record_upd_fs([], _, St) -> {[],[],St}.
record_setel(Record , RecordName , [ RecDefField ] , [ Update ] )
record_setel(R, Name, Fs, Us0) ->
Us1 = foldl(fun ({record_field,Lf,Field,Val}, Acc) ->
{integer,_,FieldIndex} = I = index_expr(Lf, Field, Name, Fs),
[{FieldIndex,{I,Lf,Val}} | Acc]
end, [], Us0),
Us2 = sort(Us1),
Us = [T || {_,T} <- Us2],
Lr = element(2, hd(Us)),
Wildcards = duplicate(length(Fs), {var,Lr,'_'}),
NLr = neg_line(Lr),
{'case',Lr,R,
[{clause,Lr,[{tuple,Lr,[{atom,Lr,Name} | Wildcards]}],[],
[foldr(fun ({I,Lf,Val}, Acc) ->
{call,Lf,{remote,Lf,{atom,Lf,erlang},
{atom,Lf,setelement}},[I,Acc,Val]} end,
R, Us)]},
{clause,NLr,[{var,NLr,'_'}],[],
[call_error(NLr, {tuple,NLr,[{atom,NLr,badrecord},{atom,NLr,Name}]})]}]}.
record_info_call(Line, [{atom,_Li,Info},{atom,_Ln,Name}], St) ->
case Info of
size ->
{{integer,Line,1+length(record_fields(Name, St))},St};
fields ->
{make_list(field_names(record_fields(Name, St)), Line),St}
end.
record_exprs(Us, St) ->
record_exprs(Us, St, [], []).
record_exprs([{record_field,Lf,{atom,_La,_F}=Name,Val}=Field0 | Us], St0, Pre, Fs) ->
case is_simple_val(Val) of
true ->
record_exprs(Us, St0, Pre, [Field0 | Fs]);
false ->
{Var,St} = new_var(Lf, St0),
Bind = {match,Lf,Var,Val},
Field = {record_field,Lf,Name,Var},
record_exprs(Us, St, [Bind | Pre], [Field | Fs])
end;
record_exprs([], St, Pre, Fs) ->
{reverse(Pre),Fs,St}.
is_simple_val({var,_,_}) -> true;
is_simple_val(Val) ->
try
erl_parse:normalise(Val),
true
catch error:_ ->
false
end.
pattern_bin([Element ] , State ) - > { [ Element],[Variable],[UsedVar],State } .
pattern_bin(Es0, St) ->
foldr(fun (E, Acc) -> pattern_element(E, Acc) end, {[],St}, Es0).
pattern_element({bin_element,Line,Expr0,Size,Type}, {Es,St0}) ->
{Expr,St1} = pattern(Expr0, St0),
{[{bin_element,Line,Expr,Size,Type} | Es],St1}.
expr_bin([Element ] , State ) - > { [ Element],State } .
expr_bin(Es0, St) ->
foldr(fun (E, Acc) -> bin_element(E, Acc) end, {[],St}, Es0).
bin_element({bin_element,Line,Expr,Size,Type}, {Es,St0}) ->
{Expr1,St1} = expr(Expr, St0),
{Size1,St2} = if Size =:= default -> {default,St1};
true -> expr(Size, St1)
end,
{[{bin_element,Line,Expr1,Size1,Type} | Es],St2}.
new_var(L, St0) ->
{New,St1} = new_var_name(St0),
{{var,L,New},St1}.
new_var_name(St) ->
C = St#exprec.vcount,
{list_to_atom("rec" ++ integer_to_list(C)),St#exprec{vcount=C+1}}.
make_list(Ts, Line) ->
foldr(fun (H, T) -> {cons,Line,H,T} end, {nil,Line}, Ts).
call_error(L, R) ->
{call,L,{remote,L,{atom,L,erlang},{atom,L,error}},[R]}.
import({Mod,Fs}, St) ->
St#exprec{imports=add_imports(Mod, Fs, St#exprec.imports)};
import(_Mod0, St) ->
St.
add_imports(Mod, [F | Fs], Is) ->
add_imports(Mod, Fs, orddict:store(F, Mod, Is));
add_imports(_, [], Is) -> Is.
imported(F, A, St) ->
case orddict:find({F,A}, St#exprec.imports) of
{ok,Mod} -> {yes,Mod};
error -> no
end.
Replace is_record/3 in guards with matching if possible .
optimize_is_record(H0, G0, #exprec{compile=Opts}) ->
case opt_rec_vars(G0) of
[] ->
{H0,G0};
Rs0 ->
case lists:member(no_is_record_optimization, Opts) of
true ->
{H0,G0};
false ->
{H,Rs} = opt_pattern_list(H0, Rs0),
G = opt_remove(G0, Rs),
{H,G}
end
end.
variables referenced in those is_record/3 calls that
opt_rec_vars([G|Gs]) ->
Rs = opt_rec_vars_1(G, orddict:new()),
opt_rec_vars(Gs, Rs);
opt_rec_vars([]) -> orddict:new().
opt_rec_vars([G|Gs], Rs0) ->
Rs1 = opt_rec_vars_1(G, orddict:new()),
Rs = ordsets:intersection(Rs0, Rs1),
opt_rec_vars(Gs, Rs);
opt_rec_vars([], Rs) -> Rs.
opt_rec_vars_1([T|Ts], Rs0) ->
Rs = opt_rec_vars_2(T, Rs0),
opt_rec_vars_1(Ts, Rs);
opt_rec_vars_1([], Rs) -> Rs.
opt_rec_vars_2({op,_,'and',A1,A2}, Rs) ->
opt_rec_vars_1([A1,A2], Rs);
opt_rec_vars_2({op,_,'andalso',A1,A2}, Rs) ->
opt_rec_vars_1([A1,A2], Rs);
opt_rec_vars_2({op,_,'orelse',Arg,{atom,_,fail}}, Rs) ->
Since the second argument guarantees failure ,
it is safe to inspect the first argument .
opt_rec_vars_2(Arg, Rs);
opt_rec_vars_2({call,_,{remote,_,{atom,_,erlang},{atom,_,is_record}},
[{var,_,V},{atom,_,Tag},{integer,_,Sz}]}, Rs) ->
orddict:store(V, {Tag,Sz}, Rs);
opt_rec_vars_2({call,_,{atom,_,is_record},
[{var,_,V},{atom,_,Tag},{integer,_,Sz}]}, Rs) ->
orddict:store(V, {Tag,Sz}, Rs);
opt_rec_vars_2(_, Rs) -> Rs.
opt_pattern_list(Ps, Rs) ->
opt_pattern_list(Ps, Rs, []).
opt_pattern_list([P0|Ps], Rs0, Acc) ->
{P,Rs} = opt_pattern(P0, Rs0),
opt_pattern_list(Ps, Rs, [P|Acc]);
opt_pattern_list([], Rs, Acc) ->
{reverse(Acc),Rs}.
opt_pattern({var,_,V}=Var, Rs0) ->
case orddict:find(V, Rs0) of
{ok,{Tag,Sz}} ->
Rs = orddict:store(V, {remove,Tag,Sz}, Rs0),
{opt_var(Var, Tag, Sz),Rs};
_ ->
{Var,Rs0}
end;
opt_pattern({cons,Line,H0,T0}, Rs0) ->
{H,Rs1} = opt_pattern(H0, Rs0),
{T,Rs} = opt_pattern(T0, Rs1),
{{cons,Line,H,T},Rs};
opt_pattern({tuple,Line,Es0}, Rs0) ->
{Es,Rs} = opt_pattern_list(Es0, Rs0),
{{tuple,Line,Es},Rs};
opt_pattern({match,Line,Pa0,Pb0}, Rs0) ->
{Pa,Rs1} = opt_pattern(Pa0, Rs0),
{Pb,Rs} = opt_pattern(Pb0, Rs1),
{{match,Line,Pa,Pb},Rs};
opt_pattern(P, Rs) -> {P,Rs}.
opt_var({var,Line,_}=Var, Tag, Sz) ->
Rp = record_pattern(2, -1, ignore, Sz, Line, [{atom,Line,Tag}]),
{match,Line,{tuple,Line,Rp},Var}.
opt_remove(Gs, Rs) ->
[opt_remove_1(G, Rs) || G <- Gs].
opt_remove_1(Ts, Rs) ->
[opt_remove_2(T, Rs) || T <- Ts].
opt_remove_2({op,L,'and'=Op,A1,A2}, Rs) ->
{op,L,Op,opt_remove_2(A1, Rs),opt_remove_2(A2, Rs)};
opt_remove_2({op,L,'andalso'=Op,A1,A2}, Rs) ->
{op,L,Op,opt_remove_2(A1, Rs),opt_remove_2(A2, Rs)};
opt_remove_2({op,L,'orelse',A1,A2}, Rs) ->
{op,L,'orelse',opt_remove_2(A1, Rs),A2};
opt_remove_2({call,Line,{remote,_,{atom,_,erlang},{atom,_,is_record}},
[{var,_,V},{atom,_,Tag},{integer,_,Sz}]}=A, Rs) ->
case orddict:find(V, Rs) of
{ok,{remove,Tag,Sz}} ->
{atom,Line,true};
_ ->
A
end;
opt_remove_2({call,Line,{atom,_,is_record},
[{var,_,V},{atom,_,Tag},{integer,_,Sz}]}=A, Rs) ->
case orddict:find(V, Rs) of
{ok,{remove,Tag,Sz}} ->
{atom,Line,true};
_ ->
A
end;
opt_remove_2(A, _) -> A.
neg_line(L) ->
erl_parse:set_line(L, fun(Line) -> -abs(Line) end).
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.